├── .gitignore ├── .travis.yml ├── LICENSE ├── README.md ├── benchmark └── sync-vs-async.js ├── examples ├── blocking.js └── non-blocking.js ├── package.json ├── src └── index.js └── test ├── b64.decode.js ├── b64.encode.js ├── b64.js └── fixtures └── values.js /.gitignore: -------------------------------------------------------------------------------- 1 | ## Node 2 | 3 | # Logs 4 | logs 5 | *.log 6 | npm-debug.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # node-waf configuration 27 | .lock-wscript 28 | 29 | # Compiled binary addons (http://nodejs.org/api/addons.html) 30 | build/Release 31 | 32 | # Dependency directories 33 | node_modules 34 | jspm_packages 35 | 36 | # Optional npm cache directory 37 | .npm 38 | 39 | # Optional eslint cache 40 | .eslintcache 41 | 42 | # Optional REPL history 43 | .node_repl_history 44 | 45 | ## OS X 46 | 47 | *.DS_Store 48 | .AppleDouble 49 | .LSOverride 50 | 51 | # Icon must end with two \r 52 | Icon 53 | 54 | 55 | # Thumbnails 56 | ._* 57 | 58 | # Files that might appear in the root of a volume 59 | .DocumentRevisions-V100 60 | .fseventsd 61 | .Spotlight-V100 62 | .TemporaryItems 63 | .Trashes 64 | .VolumeIcon.icns 65 | .com.apple.timemachine.donotpresent 66 | 67 | # Directories potentially created on remote AFP share 68 | .AppleDB 69 | .AppleDesktop 70 | Network Trash Folder 71 | Temporary Items 72 | .apdisk 73 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - '7' 4 | - '6' 5 | - '4' 6 | script: npm test 7 | after_success: npm run coverage; npm run bench 8 | notifications: 9 | email: 10 | on_success: never 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Luke Childs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # base64-async 2 | 3 | > Non-blocking chunked Base64 encoding 4 | 5 | [![Build Status](https://travis-ci.org/lukechilds/base64-async.svg?branch=master)](https://travis-ci.org/lukechilds/base64-async) 6 | [![Coverage Status](https://coveralls.io/repos/github/lukechilds/base64-async/badge.svg?branch=master)](https://coveralls.io/github/lukechilds/base64-async?branch=master) 7 | [![npm](https://img.shields.io/npm/v/base64-async.svg)](https://www.npmjs.com/package/base64-async) 8 | 9 | Process large Base64 documents without blocking the event loop. 10 | 11 | Configurable chunk size option to optimise for your use case. 12 | 13 | > **Note:** 14 | > 15 | > Base64 in Node.js is already crazy fast. Breaking the work up into chunks and adding async logic adds [overhead](#performance). If you aren't dealing with large files it will probably be more efficient to just block the event loop for the small amount of time it takes Node.js to process Base64 synchronously. 16 | 17 | ## Install 18 | 19 | ```shell 20 | npm install --save base64-async 21 | ``` 22 | 23 | ## Usage 24 | 25 | ```js 26 | const b64 = require('base64-async'); 27 | const fs = require('fs'); 28 | const buffer = fs.readFileSync('somehugefile.jpg'); 29 | 30 | b64.encode(buffer).then(b64String => console.log(b64String)); 31 | // aGkgbXVt... 32 | 33 | b64.decode(b64String).then(buffer => console.log(buffer)); 34 | // 35 | 36 | // or, for the cool kids 37 | const b64String = await b64.encode(buffer); 38 | const buffer = await b64.decode(b64String); 39 | 40 | // which is equivalent to this 41 | const b64String = await b64(buffer); 42 | const buffer = await b64(b64String); 43 | // If no method is specified, buffers are encoded, strings are decoded 44 | ``` 45 | 46 | ## Example 47 | 48 | ``` 49 | $ npm run example 50 | 51 | Registering 4 asynchronous jobs... 52 | Encoding 100 MB with default Node.js Buffer API... 53 | Base64 encode complete 54 | Hi, I'm an asynchronous job, and I'm late by 231ms 55 | Hi, I'm an asynchronous job, and I'm late by 238ms 56 | Hi, I'm an asynchronous job, and I'm late by 239ms 57 | Hi, I'm an asynchronous job, and I'm late by 245ms 58 | 59 | Registering 4 asynchronous jobs... 60 | Encoding 100 MB with base64-async in chunks of 250 kB... 61 | Hi, I'm an asynchronous job, and I'm on time 62 | Hi, I'm an asynchronous job, and I'm on time 63 | Hi, I'm an asynchronous job, and I'm on time 64 | Hi, I'm an asynchronous job, and I'm on time 65 | Base64 encode complete 66 | ``` 67 | 68 | ([example source code](/examples)) 69 | 70 | Notice how none of the async jobs can start until the Buffer API has finished encoding and stops blocking the event loop? With `base64-async` the async jobs can execute in-between each chunk of data. 71 | 72 | ## Performance 73 | 74 | ``` 75 | $ npm run bench 76 | 77 | Benchmark completed with a chunk size of 250 kB 78 | ┌────────┬──────────────┬──────────────┬──────────────┬──────────────┐ 79 | │ Bytes │ Encode Sync │ Decode Sync │ Encode Async │ Decode Async │ 80 | ├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ 81 | │ 10 kB │ 0.097225ms │ 0.383031ms │ 1.276201ms │ 0.537687ms │ 82 | ├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ 83 | │ 100 kB │ 0.198161ms │ 0.271577ms │ 0.99799ms │ 0.356765ms │ 84 | ├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ 85 | │ 1 MB │ 1.924415ms │ 2.038406ms │ 2.679117ms │ 2.544993ms │ 86 | ├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ 87 | │ 10 MB │ 15.749204ms │ 16.280246ms │ 33.666111ms │ 29.918725ms │ 88 | ├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ 89 | │ 100 MB │ 165.189455ms │ 195.298199ms │ 246.359068ms │ 280.792751ms │ 90 | └────────┴──────────────┴──────────────┴──────────────┴──────────────┘ 91 | ``` 92 | 93 | As you can see, the total processing time is longer with `base64-async` (as we spend some time paused waiting for the event loop). However, if you have an idea of the size of the data you'll be working with, you can play around with the chunk size to get better performance. 94 | 95 | The included benchmarking tool accepts arguments to help you test this: 96 | 97 | ``` 98 | $ npm run bench -- --chunkSize=1000000 --bytesToBenchmark=50000000,100000000 99 | 100 | Benchmark completed with a chunk size of 1 MB 101 | ┌────────┬──────────────┬──────────────┬──────────────┬──────────────┐ 102 | │ Bytes │ Encode Sync │ Decode Sync │ Encode Async │ Decode Async │ 103 | ├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ 104 | │ 50 MB │ 79.675533ms │ 87.251079ms │ 92.400367ms │ 137.468082ms │ 105 | ├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ 106 | │ 100 MB │ 203.423705ms │ 173.567974ms │ 186.181857ms │ 264.123311ms │ 107 | └────────┴──────────────┴──────────────┴──────────────┴──────────────┘ 108 | ``` 109 | 110 | ## API 111 | 112 | ### b64(input, [options]) 113 | 114 | Returns a Promise that resolves to the Base64 encoded/decoded input. 115 | 116 | #### input 117 | 118 | Type: `string`, `buffer` 119 | 120 | A Base64 string to decode, or a Buffer to encode. 121 | 122 | #### options 123 | 124 | Type: `object` 125 | 126 | ##### options.chunkSize 127 | 128 | Type: `number`
129 | Default: `250000` 130 | 131 | Size of the chunk of data to work on before deferring execution to the next iteration of the event loop. 132 | 133 | If encoding, the number is interpreted as number of bytes. If decoding, the number is interpreted as number of characters. 134 | 135 | ### b64.encode(input, [options]) 136 | 137 | Returns a Promise that resolves to a Base64 encoded string. 138 | 139 | #### input 140 | 141 | Type: `buffer` 142 | 143 | A Buffer to encode. 144 | 145 | ### b64.decode(input, [options]) 146 | 147 | Returns a Promise that resolves to a decoded Buffer. 148 | 149 | #### input 150 | 151 | Type: `string` 152 | 153 | A Base64 string to decode. 154 | 155 | ## License 156 | 157 | MIT © Luke Childs 158 | -------------------------------------------------------------------------------- /benchmark/sync-vs-async.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /* eslint-disable no-await-in-loop */ 4 | 5 | const prettyBytes = require('pretty-bytes'); 6 | const Table = require('cli-table'); 7 | const timeSpan = require('time-span'); 8 | const minimist = require('minimist'); 9 | const b64 = require('../'); 10 | 11 | const argv = minimist(process.argv.slice(2)); 12 | 13 | const chunkSize = argv.chunkSize || 250000; 14 | const bytesToBenchmark = ( 15 | argv.bytesToBenchmark && 16 | argv.bytesToBenchmark.split(',').map(Number) 17 | ) || [10000, 100000, 1000000, 10000000, 100000000]; 18 | 19 | const log = text => { 20 | if (process.stdout.clearLine) { 21 | process.stdout.clearLine(); 22 | process.stdout.cursorTo(0); 23 | process.stdout.write(text); 24 | } 25 | }; 26 | 27 | const bench = noOfBytes => Promise.resolve().then(async () => { 28 | const results = {}; 29 | const humanBytes = prettyBytes(noOfBytes); 30 | const buffer = Buffer.alloc(noOfBytes); 31 | let end; 32 | 33 | log(`${humanBytes}: Encoding sync...`); 34 | end = timeSpan(); 35 | const bufferBase64 = buffer.toString('base64'); 36 | results.encodeSync = end(); 37 | 38 | log(`${humanBytes}: Decoding sync...`); 39 | end = timeSpan(); 40 | Buffer.from(bufferBase64, 'base64'); 41 | results.decodeSync = end(); 42 | 43 | log(`${humanBytes}: Encoding async...`); 44 | end = timeSpan(); 45 | await b64(buffer, { chunkSize }); 46 | results.encodeAsync = end(); 47 | 48 | log(`${humanBytes}: Decoding async...`); 49 | end = timeSpan(); 50 | await b64(bufferBase64, { chunkSize }); 51 | results.decodeAsync = end(); 52 | 53 | return results; 54 | }); 55 | 56 | (async () => { 57 | const table = new Table({ 58 | head: [ 59 | 'Bytes', 60 | 'Encode Sync', 61 | 'Decode Sync', 62 | 'Encode Async', 63 | 'Decode Async' 64 | ] 65 | }); 66 | 67 | for (const noOfBytes of bytesToBenchmark) { 68 | const results = await bench(noOfBytes); 69 | table.push([ 70 | prettyBytes(noOfBytes), 71 | results.encodeSync + 'ms', 72 | results.decodeSync + 'ms', 73 | results.encodeAsync + 'ms', 74 | results.decodeAsync + 'ms' 75 | ]); 76 | } 77 | 78 | log(`Benchmark completed with a chunk size of ${prettyBytes(chunkSize)}\n`); 79 | console.log(table.toString()); 80 | })(); 81 | -------------------------------------------------------------------------------- /examples/blocking.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const prettyBytes = require('pretty-bytes'); 4 | 5 | const bytes = 100000000; 6 | const buf = Buffer.alloc(bytes); 7 | const interval = 25; 8 | const syncStart = Date.now(); 9 | const asyncJobs = 4; 10 | 11 | console.log(`Registering ${asyncJobs} asynchronous jobs...`); 12 | let i = 0; 13 | const syncId = setInterval(() => { 14 | if (++i >= asyncJobs) { 15 | clearInterval(syncId); 16 | } 17 | 18 | const late = Date.now() - (syncStart + (interval * i)); 19 | console.log(`Hi, I'm an asynchronous job, and I'm ${late > 10 ? `late by ${late}ms` : 'on time'}`); 20 | }, interval); 21 | 22 | console.log(`Encoding ${prettyBytes(bytes)} with default Node.js Buffer API...`); 23 | buf.toString('base64'); 24 | console.log('Base64 encode complete'); 25 | -------------------------------------------------------------------------------- /examples/non-blocking.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const prettyBytes = require('pretty-bytes'); 4 | const b64 = require('../'); 5 | 6 | const chunkSize = 250000; 7 | const bytes = 100000000; 8 | const buf = Buffer.alloc(bytes); 9 | const interval = 25; 10 | const asyncStart = Date.now(); 11 | const asyncJobs = 4; 12 | 13 | console.log(`Registering ${asyncJobs} asynchronous jobs...`); 14 | let i = 0; 15 | const asyncId = setInterval(() => { 16 | if (++i >= asyncJobs) { 17 | clearInterval(asyncId); 18 | } 19 | 20 | const late = Date.now() - (asyncStart + (interval * i)); 21 | console.log(`Hi, I'm an asynchronous job, and I'm ${late > 10 ? `late by ${late}ms` : 'on time'}`); 22 | }, interval); 23 | 24 | console.log(`Encoding ${prettyBytes(bytes)} with base64-async in chunks of ${prettyBytes(chunkSize)}...`); 25 | b64(buf, { chunkSize }).then(() => { 26 | console.log('Base64 encode complete'); 27 | }); 28 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "base64-async", 3 | "version": "2.1.3", 4 | "description": "Non-blocking chunked Base64 encoding", 5 | "main": "src/index.js", 6 | "scripts": { 7 | "bench": "benchmark/sync-vs-async.js", 8 | "example": "examples/blocking.js && echo && examples/non-blocking.js", 9 | "test": "xo && nyc ava", 10 | "coverage": "nyc report --reporter=text-lcov | coveralls" 11 | }, 12 | "xo": { 13 | "extends": "xo-lukechilds" 14 | }, 15 | "repository": { 16 | "type": "git", 17 | "url": "git+https://github.com/lukechilds/base64-async.git" 18 | }, 19 | "keywords": [ 20 | "async", 21 | "asynchronous", 22 | "non-blocking", 23 | "base64", 24 | "encode", 25 | "decode" 26 | ], 27 | "author": "Luke Childs (http://lukechilds.co.uk)", 28 | "license": "MIT", 29 | "bugs": { 30 | "url": "https://github.com/lukechilds/base64-async/issues" 31 | }, 32 | "homepage": "https://github.com/lukechilds/base64-async", 33 | "dependencies": {}, 34 | "devDependencies": { 35 | "ava": "^0.25.0", 36 | "cli-table": "^0.3.1", 37 | "coveralls": "^3.0.0", 38 | "eslint-config-xo-lukechilds": "^1.0.0", 39 | "minimist": "^1.2.0", 40 | "nyc": "^11.0.2", 41 | "pretty-bytes": "^5.0.0", 42 | "time-span": "^2.0.0", 43 | "xo": "^0.19.0" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const validateOpts = (opts, chunkMultiple) => { 4 | opts = Object.assign({}, { chunkSize: 250000 }, opts); 5 | 6 | opts.chunkSize = Math.ceil(opts.chunkSize / chunkMultiple) * chunkMultiple; 7 | 8 | if (opts.chunkSize === 0) { 9 | throw new Error('opts.chunkSize must be larger than 0'); 10 | } 11 | 12 | return opts; 13 | }; 14 | 15 | const b64 = (input, opts) => { 16 | if (input instanceof Buffer || typeof input === 'string') { 17 | const method = input instanceof Buffer ? 'encode' : 'decode'; 18 | return b64[method](input, opts); 19 | } 20 | 21 | return Promise.reject(new TypeError('input must be a buffer or string')); 22 | }; 23 | 24 | b64.encode = (input, opts) => new Promise(resolve => { 25 | const chunkMultiple = 3; 26 | opts = validateOpts(opts, chunkMultiple); 27 | 28 | if (!(input instanceof Buffer)) { 29 | throw new TypeError('input must be a buffer'); 30 | } 31 | 32 | const bufferLength = input.length; 33 | let currentIndex = 0; 34 | let output = ''; 35 | 36 | setImmediate(function encodeChunk() { 37 | const chunk = input.slice(currentIndex, currentIndex + opts.chunkSize); 38 | output += chunk.toString('base64'); 39 | currentIndex += opts.chunkSize; 40 | if (currentIndex < bufferLength) { 41 | setImmediate(encodeChunk); 42 | } else { 43 | resolve(output); 44 | } 45 | }); 46 | }); 47 | 48 | b64.decode = (input, opts) => new Promise(resolve => { 49 | const chunkMultiple = 4; 50 | opts = validateOpts(opts, chunkMultiple); 51 | 52 | if (typeof input !== 'string') { 53 | throw new TypeError('input must be a base64 string'); 54 | } 55 | 56 | const stringLength = input.length; 57 | const outputBuffers = []; 58 | let currentIndex = 0; 59 | 60 | setImmediate(function decodeChunk() { 61 | const chunk = input.slice(currentIndex, currentIndex + opts.chunkSize); 62 | outputBuffers.push(Buffer.from(chunk, 'base64')); 63 | currentIndex += opts.chunkSize; 64 | if (currentIndex < stringLength) { 65 | setImmediate(decodeChunk); 66 | } else { 67 | resolve(Buffer.concat(outputBuffers)); 68 | } 69 | }); 70 | }); 71 | 72 | module.exports = b64; 73 | -------------------------------------------------------------------------------- /test/b64.decode.js: -------------------------------------------------------------------------------- 1 | import test from 'ava'; 2 | import b64 from '../'; 3 | import values from './fixtures/values'; 4 | 5 | test('b64.decode is a function', t => { 6 | t.is(typeof b64.decode, 'function'); 7 | }); 8 | 9 | test('b64.decode returns a Promise', t => { 10 | const returnValue = b64.decode(values.base64); 11 | t.true(returnValue instanceof Promise); 12 | }); 13 | 14 | test('b64.decode decodes Base64 to buffer', async t => { 15 | const result = await b64.decode(values.base64); 16 | t.true(Buffer.compare(result, values.buffer) === 0); 17 | }); 18 | 19 | test('b64.decode decodes correctly in chunks', async t => { 20 | const result = await b64.decode(values.base64, { chunkSize: 4 }); 21 | t.true(Buffer.compare(result, values.buffer) === 0); 22 | }); 23 | 24 | test('b64.decode rounds chunks up to multiples of 4', async t => { 25 | const result = await b64.decode(values.base64, { chunkSize: 2 }); 26 | t.true(Buffer.compare(result, values.buffer) === 0); 27 | }); 28 | 29 | test('b64.decode rejects Promise if chunkSize is 0', async t => { 30 | const error = await t.throws(b64.decode(values.string, { chunkSize: 0 })); 31 | t.is(error.message, 'opts.chunkSize must be larger than 0'); 32 | }); 33 | 34 | test('b64.decode rejects Promise if input is not a string', async t => { 35 | const error = await t.throws(b64.decode(values.buffer)); 36 | t.is(error.message, 'input must be a base64 string'); 37 | }); 38 | -------------------------------------------------------------------------------- /test/b64.encode.js: -------------------------------------------------------------------------------- 1 | import test from 'ava'; 2 | import b64 from '../'; 3 | import values from './fixtures/values'; 4 | 5 | test('b64.encode is a function', t => { 6 | t.is(typeof b64.encode, 'function'); 7 | }); 8 | 9 | test('b64.encode returns a Promise', t => { 10 | const returnValue = b64.encode(values.buffer); 11 | t.true(returnValue instanceof Promise); 12 | }); 13 | 14 | test('b64.encode encodes buffer to Base64', async t => { 15 | const result = await b64.encode(values.buffer); 16 | t.is(result, values.base64); 17 | }); 18 | 19 | test('b64.encode encodes correctly in chunks', async t => { 20 | const result = await b64.encode(values.buffer, { chunkSize: 3 }); 21 | t.is(result, values.base64); 22 | }); 23 | 24 | test('b64.encode rounds chunks up to multiples of 3', async t => { 25 | const result = await b64.encode(values.buffer, { chunkSize: 1 }); 26 | t.is(result, values.base64); 27 | }); 28 | 29 | test('b64.encode rejects Promise if chunkSize is 0', async t => { 30 | const error = await t.throws(b64.encode(values.buffer, { chunkSize: 0 })); 31 | t.is(error.message, 'opts.chunkSize must be larger than 0'); 32 | }); 33 | 34 | test('b64.encode rejects Promise if input is not a buffer', async t => { 35 | const error = await t.throws(b64.encode(values.string)); 36 | t.is(error.message, 'input must be a buffer'); 37 | }); 38 | -------------------------------------------------------------------------------- /test/b64.js: -------------------------------------------------------------------------------- 1 | import test from 'ava'; 2 | import b64 from '../'; 3 | import values from './fixtures/values'; 4 | 5 | test('b64 is a function', t => { 6 | t.is(typeof b64, 'function'); 7 | }); 8 | 9 | test('b64 calls b64.encode on buffers', async t => { 10 | const result = await b64(values.buffer); 11 | t.is(result, values.base64); 12 | }); 13 | 14 | test('b64 calls b64.decode on strings', async t => { 15 | const result = await b64(values.base64); 16 | t.true(Buffer.compare(result, values.buffer) === 0); 17 | }); 18 | 19 | test('b64 rejects Promise if input is not a buffer or string', async t => { 20 | const error = await t.throws(b64(0)); 21 | t.is(error.message, 'input must be a buffer or string'); 22 | }); 23 | -------------------------------------------------------------------------------- /test/fixtures/values.js: -------------------------------------------------------------------------------- 1 | const buffer = Buffer.from([0x72, 0x98, 0x4f, 0x96, 0x75, 0x03, 0xcd, 0x28, 0x38, 0xbf]); 2 | const base64 = buffer.toString('base64'); 3 | const hexString = buffer.toString('hex'); 4 | 5 | module.exports = { 6 | buffer, 7 | base64, 8 | hexString 9 | }; 10 | --------------------------------------------------------------------------------