├── .babelrc ├── .eslintignore ├── src ├── .eslintrc.json ├── util.js ├── defaultTransportFactory.js ├── impl │ ├── fetch.js │ ├── mozXhr.js │ └── xhr.js ├── defaultChunkParser.js └── index.js ├── .gitignore ├── test ├── server │ ├── .eslintrc.json │ └── index.js └── integ │ ├── .eslintrc.json │ └── chunked-request.spec.js ├── .npmignore ├── .travis.yml ├── .eslintrc.json ├── run-karma.sh ├── webpack.config.js ├── release.sh ├── LICENSE ├── package.json ├── CHANGELOG.md ├── karma.conf.js └── README.md /.babelrc: -------------------------------------------------------------------------------- 1 | { "presets": ["es2015"] } 2 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | lib/ 2 | build/ 3 | dist/ 4 | webpack.config.js -------------------------------------------------------------------------------- /src/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "rules": { 3 | "no-var": 2 4 | } 5 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | lib/ 3 | build/ 4 | .idea/ 5 | npm-debug* 6 | sauce_connect.log -------------------------------------------------------------------------------- /test/server/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "node": true 4 | }, 5 | "rules": { 6 | "no-console": 0 7 | } 8 | } -------------------------------------------------------------------------------- /test/integ/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "jasmine": true 4 | }, 5 | "rules": { 6 | "no-console": 0 7 | } 8 | } -------------------------------------------------------------------------------- /src/util.js: -------------------------------------------------------------------------------- 1 | export function isObject(value) { 2 | return !!value && typeof value === 'object'; 3 | } 4 | 5 | export function noop() { 6 | /* No operation */ 7 | } 8 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # If this file is not present the contents of .gitignore is used 2 | build/ 3 | test/ 4 | .* 5 | karma.conf.js 6 | release.sh 7 | run-karma.sh 8 | sauce_connect.log -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - "4" 4 | cache: 5 | directories: 6 | - node_modules 7 | before_install: 8 | - true && `base64 --decode <<< ZXhwb3J0IFNBVUNFX1VTRVJOQU1FPWpvbm55cmVldmVz` 9 | - true && `base64 --decode <<< ZXhwb3J0IFNBVUNFX0FDQ0VTU19LRVk9NTgzMzU1NDUtOWYxYS00Y2M3LThmZmItYjFmZjgwMzg5NmVm` 10 | install: npm install 11 | script: 12 | - npm run test -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "eslint:recommended", 3 | "rules": { 4 | "indent": [2, 2], 5 | "no-var": 2, 6 | "prefer-const": 2 7 | }, 8 | "env": { 9 | "browser": true 10 | }, 11 | "globals": { 12 | "Uint8Array": false, 13 | "TextEncoder": false, 14 | "TextDecoder": false 15 | }, 16 | "parserOptions": { 17 | "ecmaVersion": 6, 18 | "sourceType": "module" 19 | }, 20 | "root": true 21 | } -------------------------------------------------------------------------------- /run-karma.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | set -x 4 | 5 | function killChunkedResponseServer { 6 | echo "Killing ChunkedResponse Server..." 7 | kill ${SERVER_PID} &> /dev/null 8 | } 9 | 10 | echo "Starting ChunkedResponse Server..." 11 | node ./test/server/index.js & 12 | SERVER_PID=$! 13 | 14 | # Check the ChunkedResponse server started up ok. 15 | sleep 0.5 16 | ps ${SERVER_PID} &> /dev/null 17 | 18 | # Kill the ChunkedREsponse server when this script exists. 19 | trap killChunkedResponseServer EXIT 20 | 21 | ./node_modules/.bin/karma start $@ 22 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | module.exports = { 4 | entry: "./test/integ/chunked-request.spec.js", 5 | output: { 6 | path: path.resolve(__dirname, 'build'), 7 | filename: 'integration-tests.js', 8 | }, 9 | devtool: 'source-map', 10 | module: { 11 | rules: [ 12 | { 13 | test: /\.js$/, 14 | include: /src|test/, 15 | exclude: /node_modules/, 16 | loader: 'babel-loader?cacheDirectory' 17 | } 18 | ] 19 | }, 20 | resolve: { 21 | extensions: [".js"] 22 | } 23 | }; 24 | 25 | -------------------------------------------------------------------------------- /src/defaultTransportFactory.js: -------------------------------------------------------------------------------- 1 | import fetchRequest from './impl/fetch'; 2 | import mozXhrRequest from './impl/mozXhr'; 3 | import xhrRequest from './impl/xhr'; 4 | 5 | let selected = null; 6 | 7 | export default function defaultTransportFactory() { 8 | if (!selected) { 9 | selected = detectTransport(); 10 | } 11 | return selected; 12 | } 13 | 14 | function detectTransport() { 15 | if (typeof Response !== 'undefined' && Response.prototype.hasOwnProperty("body") && typeof Headers === 'function') { 16 | return fetchRequest; 17 | } 18 | const mozChunked = 'moz-chunked-arraybuffer'; 19 | if (supportsXhrResponseType(mozChunked)) { 20 | return mozXhrRequest; 21 | } 22 | 23 | return xhrRequest; 24 | } 25 | 26 | function supportsXhrResponseType(type) { 27 | try { 28 | const tmpXhr = new XMLHttpRequest(); 29 | tmpXhr.responseType = type; 30 | return tmpXhr.responseType === type; 31 | } catch (e) { /* IE throws on setting responseType to an unsupported value */ } 32 | return false; 33 | } -------------------------------------------------------------------------------- /release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | VERSION=${1} 5 | if [ -z ${VERSION} ]; then 6 | echo "VERSION not set" 7 | exit 1 8 | fi 9 | 10 | if [[ `git status --porcelain` ]]; then 11 | echo "There are pending changes, refusing to release." 12 | exit 1 13 | fi 14 | 15 | read -p "Release v${VERSION}? " -n 1 -r 16 | echo 17 | if [[ $REPLY =~ ^[Yy]$ ]] 18 | then 19 | echo "Staring npm publish" 20 | npm publish 21 | 22 | echo "Building standalone artifact" 23 | npm run build:lib 24 | 25 | # Patch up ES6 module entry point so it's more pleasant to use. 26 | echo 'module.exports = require("./index").default;' > ./lib/entry.js 27 | 28 | mkdir -p dist 29 | ./node_modules/.bin/webpack --output-library chunkedRequest --output-library-target umd --entry ./lib/entry.js --output-path dist --output-filename chunked-request.js 30 | 31 | echo "Creating Github release branch release/v${VERSION}" 32 | git checkout -b release/v${VERSION} 33 | git add . 34 | git commit -m "Release ${VERSION}" 35 | git tag v${VERSION} 36 | git push origin --tags 37 | 38 | echo "All done!" 39 | fi 40 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 John Reeves 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/impl/fetch.js: -------------------------------------------------------------------------------- 1 | import { BrowserHeaders } from 'browser-headers'; 2 | 3 | export const READABLE_BYTE_STREAM = 'readable-byte-stream'; 4 | 5 | export default function fetchRequest(options) { 6 | const { onRawChunk, onRawComplete, method, body, credentials } = options; 7 | const headers = options.headers.toHeaders(); 8 | 9 | function pump(reader, res) { 10 | return reader.read() 11 | .then(result => { 12 | if (result.done) { 13 | setTimeout(() => { 14 | onRawComplete({ 15 | statusCode: res.status, 16 | transport: READABLE_BYTE_STREAM, 17 | raw: res 18 | }); 19 | }); 20 | return; 21 | } 22 | onRawChunk(result.value); 23 | return pump(reader, res); 24 | }); 25 | } 26 | 27 | function onError(err) { 28 | setTimeout(() => { 29 | options.onRawComplete({ 30 | statusCode: 0, 31 | transport: READABLE_BYTE_STREAM, 32 | raw: err 33 | }); 34 | }); 35 | } 36 | 37 | fetch(options.url, { headers, method, body, credentials }) 38 | .then(res => { 39 | options.onRawHeaders(new BrowserHeaders(res.headers), res.status); 40 | return pump(res.body.getReader(), res) 41 | }) 42 | .catch(onError); 43 | } 44 | -------------------------------------------------------------------------------- /src/defaultChunkParser.js: -------------------------------------------------------------------------------- 1 | const entryDelimiter = '\n'; 2 | 3 | // The defaultChunkParser expects the response from the server to consist of new-line 4 | // delimited JSON, eg: 5 | // 6 | // { "chunk": "#1", "data": "Hello" }\n 7 | // { "chunk": "#2", "data": "World" } 8 | // 9 | // It will correctly handle the case where a chunk is emitted by the server across 10 | // delimiter boundaries. 11 | export default function defaultChunkParser(bytes, state = {}, flush = false) { 12 | if (!state.textDecoder) { 13 | state.textDecoder = new TextDecoder(); 14 | } 15 | const textDecoder = state.textDecoder; 16 | const chunkStr = textDecoder.decode(bytes, { stream: !flush }) 17 | const jsonLiterals = chunkStr.split(entryDelimiter); 18 | if (state.trailer) { 19 | jsonLiterals[0] = `${state.trailer}${jsonLiterals[0]}`; 20 | state.trailer = ''; 21 | } 22 | 23 | // Is this a complete message? If not; push the trailing (incomplete) string 24 | // into the state. 25 | if (!flush && !hasSuffix(chunkStr, entryDelimiter)) { 26 | state.trailer = jsonLiterals.pop(); 27 | } 28 | 29 | const jsonObjects = jsonLiterals 30 | .filter(v => v.trim() !== '') 31 | .map(v => JSON.parse(v)); 32 | 33 | return [ jsonObjects, state ]; 34 | } 35 | 36 | function hasSuffix(s, suffix) { 37 | return s.substr(s.length - suffix.length) === suffix; 38 | } -------------------------------------------------------------------------------- /src/impl/mozXhr.js: -------------------------------------------------------------------------------- 1 | import { BrowserHeaders } from 'browser-headers'; 2 | 3 | export const MOZ_CHUNKED = 'moz-chunked'; 4 | 5 | export default function mozXhrRequest(options) { 6 | const xhr = new XMLHttpRequest(); 7 | 8 | function onProgressEvent() { 9 | options.onRawChunk(new Uint8Array(xhr.response)); 10 | } 11 | 12 | function onLoadEvent() { 13 | options.onRawComplete({ 14 | statusCode: xhr.status, 15 | transport: MOZ_CHUNKED, 16 | raw: xhr 17 | }); 18 | } 19 | 20 | function onStateChange() { 21 | if(this.readyState == this.HEADERS_RECEIVED) { 22 | options.onRawHeaders(new BrowserHeaders(this.getAllResponseHeaders()), this.status); 23 | } 24 | } 25 | function onError(err) { 26 | options.onRawComplete({ 27 | statusCode: 0, 28 | transport: MOZ_CHUNKED, 29 | raw: err 30 | }); 31 | } 32 | 33 | xhr.open(options.method, options.url); 34 | xhr.responseType = 'moz-chunked-arraybuffer'; 35 | options.headers.forEach((key, values) => { 36 | xhr.setRequestHeader(key, values.join(", ")); 37 | }); 38 | if (options.credentials === 'include') { 39 | xhr.withCredentials = true; 40 | } 41 | xhr.addEventListener('readystatechange', onStateChange); 42 | xhr.addEventListener('progress', onProgressEvent); 43 | xhr.addEventListener('loadend', onLoadEvent); 44 | xhr.addEventListener('error', onError); 45 | xhr.send(options.body); 46 | } 47 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "chunked-request", 3 | "version": "0.7.1", 4 | "main": "lib/index.js", 5 | "jsnext:main": "src/index.js", 6 | "repository": "https://github.com/jonnyreeves/chunked-request", 7 | "license": "MIT", 8 | "keywords": [ 9 | "request", 10 | "chunked", 11 | "transfer", 12 | "comet", 13 | "xhr", 14 | "fetch" 15 | ], 16 | "scripts": { 17 | "prepublish": "npm run clean && npm run build:lib", 18 | "clean": "rm -rf build/*", 19 | "build:integ": "webpack", 20 | "build:lib": "mkdir -p lib && babel --out-dir lib src", 21 | "lint": "eslint .", 22 | "test": "npm run lint && npm run test:integ", 23 | "test:integ": "npm run build:integ && ./run-karma.sh --single-run", 24 | "release": "./release.sh ${npm_package_version}" 25 | }, 26 | "devDependencies": { 27 | "babel-cli": "^6.23.0", 28 | "babel-core": "^6.23.1", 29 | "babel-loader": "^6.3.2", 30 | "babel-preset-es2015": "^6.13.2", 31 | "cookie": "^0.3.1", 32 | "eslint": "^3.3.1", 33 | "jasmine-core": "^2.4.1", 34 | "karma": "^1.2.0", 35 | "karma-chrome-launcher": "^1.0.1", 36 | "karma-jasmine": "^1.0.2", 37 | "karma-sauce-launcher": "^1.0.0", 38 | "karma-sourcemap-loader": "^0.3.7", 39 | "lodash": "^4.15.0", 40 | "text-encoding": "^0.6.0", 41 | "url": "^0.11.0", 42 | "webpack": "^2.2.1" 43 | }, 44 | "dependencies": { 45 | "browser-headers": "^0.3.1" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/impl/xhr.js: -------------------------------------------------------------------------------- 1 | import { BrowserHeaders } from 'browser-headers'; 2 | 3 | export const XHR = 'xhr'; 4 | 5 | export default function xhrRequest(options) { 6 | const textEncoder = new TextEncoder(); 7 | const xhr = new XMLHttpRequest(); 8 | let index = 0; 9 | 10 | function onProgressEvent() { 11 | const rawText = xhr.responseText.substr(index); 12 | index = xhr.responseText.length; 13 | options.onRawChunk(textEncoder.encode(rawText, { stream: true })); 14 | } 15 | 16 | function onLoadEvent() { 17 | // Force the textEncoder to flush. 18 | options.onRawChunk(textEncoder.encode("", { stream: false })); 19 | options.onRawComplete({ 20 | statusCode: xhr.status, 21 | transport: XHR, 22 | raw: xhr 23 | }); 24 | } 25 | 26 | function onStateChange() { 27 | if(this.readyState == this.HEADERS_RECEIVED) { 28 | options.onRawHeaders(new BrowserHeaders(this.getAllResponseHeaders()), this.status); 29 | } 30 | } 31 | 32 | function onError(err) { 33 | options.onRawComplete({ 34 | statusCode: 0, 35 | transport: XHR, 36 | raw: err 37 | }); 38 | } 39 | 40 | xhr.open(options.method, options.url); 41 | xhr.responseType = 'text'; 42 | options.headers.forEach((key, values) => { 43 | xhr.setRequestHeader(key, values.join(", ")); 44 | }); 45 | if (options.credentials === 'include') { 46 | xhr.withCredentials = true; 47 | } 48 | xhr.addEventListener('readystatechange', onStateChange); 49 | xhr.addEventListener('progress', onProgressEvent); 50 | xhr.addEventListener('loadend', onLoadEvent); 51 | xhr.addEventListener('error', onError); 52 | xhr.send(options.body); 53 | } 54 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [0.7.1] - 15/03/2017 2 | ### Fixed 3 | - Upgrade [BrowserHeaders](https://github.com/improbable-eng/js-browser-headers) dependency (#27 @MarcusLongmuir) 4 | 5 | ## [0.7.0] - 10/03/2017 6 | ### Added 7 | - Allow request headers to be supplied in any format supported by [BrowserHeaders](https://github.com/improbable-eng/js-browser-headers) (#26 @MarcusLongmuir) 8 | 9 | ## [0.6.0] - 02/03/2017 10 | ### Added 11 | - `onHeaders` callback (#25 @MarcusLongmuir) 12 | 13 | ## [0.5.3] - 26/10/2016 14 | ### Fixed 15 | - Defer onComplete callback to prevent fetch swallowing exceptions (#23 @MarcusLongmuir) 16 | 17 | ## [0.5.2] - 07/09/2016 18 | ### Fixed 19 | - Fixed a bug where PhantomJS' `Uint8Array` constructor must be invoked with a length argument otherwise it throws an exception... 20 | 21 | ## [0.5.1] - 07/09/2016 22 | ### Fixed 23 | - Fixed a bug in the xhr transport where the chunk parser would be invoked 24 | with the bytes `[110, 117, 108, 108]` (string value: `"null"`) when using Chrome's native TextEncoder implementation. 25 | 26 | ## [0.5.0] - 29/08/2016 27 | ### Added 28 | - Transport exception handling, eg: 'no route to host'. (#8 @Ruben-Hartog) 29 | - Support for WebWorkers (#11, @ariutta) 30 | - Changed `chunkParser` interface; parsers now receive data as a `Uint8Array` and now pump their own internal state; the `defaultChunkParser` continues to work as before. (#15, @ariutta) 31 | 32 | ### Fixed 33 | - Chrome ReadableStream detection (#14) 34 | 35 | ## [0.4.0] - 19/05/2016 36 | ### Added 37 | - Support responses that do not end with a trailing delimiter (#9, @MarcusLongmuir) 38 | - Switched to `loadend` event to catch failures as well as success on XHR based transports. 39 | 40 | ## [0.3.1] - 30/03/2016 41 | ### Added 42 | - npm keywords 43 | 44 | ### Fixed 45 | - Add `dist/` to `.eslintignore` to prevent build failures on release branches 46 | - Exclude dot files and other dev related junk from npm package. 47 | 48 | ## [0.3.0] - 30/03/2016 49 | ### Added 50 | - `credentials` support, defaults to `'same-origin'` for consistency between XHR and fetch based transports. (#5, #6) 51 | 52 | ## [0.2.1] - 26/03/2016 53 | ### Added 54 | - Standalone browser artifact (fixes #4) 55 | 56 | ## [0.2.0] - 26/03/2016 57 | ### Added 58 | - Support for partial chunk parsing where the chunk does not end with the delimiter 59 | - `rawChunk` and `prevChunkSuffix` properties added to chunk parse errors supplied to `onChunk()` 60 | 61 | ## [0.1.0] - 18/03/2016 62 | ### Added 63 | - Initial release 64 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import { isObject, noop } from './util'; 2 | import defaultTransportFactory from './defaultTransportFactory'; 3 | import defaultChunkParser from './defaultChunkParser'; 4 | import { BrowserHeaders } from 'browser-headers'; 5 | 6 | // chunkedRequest will make a network request to the URL specified in `options.url` 7 | // passing chunks of data extracted by the optional `options.chunkParser` to the 8 | // optional `options.onChunk` callback. When the headers of the response are received 9 | // the optional `options.onHeaders` callback will be invoked with the headers as an 10 | // instance of BrowserHeaders and the numeric status code. When the request has 11 | // completed the optional `options.onComplete` callback will be invoked. 12 | export default function chunkedRequest(options) { 13 | validateOptions(options); 14 | 15 | const { 16 | url, 17 | headers, 18 | method = 'GET', 19 | body, 20 | credentials = 'same-origin', 21 | onHeaders = noop, 22 | onComplete = noop, 23 | onChunk = noop, 24 | chunkParser = defaultChunkParser 25 | } = options; 26 | 27 | // parserState can be utilised by the chunkParser to hold on to state; the 28 | // defaultChunkParser uses it to keep track of any trailing text the last 29 | // delimiter in the chunk. There is no contract for parserState. 30 | let parserState; 31 | 32 | function processRawHeaders(headers, status) { 33 | onHeaders(headers, status); 34 | } 35 | 36 | function processRawChunk(chunkBytes, flush = false) { 37 | let parsedChunks = null; 38 | let parseError = null; 39 | 40 | try { 41 | [ parsedChunks, parserState ] = chunkParser(chunkBytes, parserState, flush); 42 | } catch (e) { 43 | parseError = e; 44 | parseError.chunkBytes = chunkBytes; 45 | parseError.parserState = parserState; 46 | } finally { 47 | if (parseError || (parsedChunks && parsedChunks.length > 0)) { 48 | onChunk(parseError, parsedChunks); 49 | } 50 | } 51 | } 52 | 53 | function processRawComplete(rawComplete) { 54 | if (parserState) { 55 | // Flush the parser to process any remaining state. 56 | processRawChunk(new Uint8Array(0), true); 57 | } 58 | onComplete(rawComplete); 59 | } 60 | 61 | let transport = options.transport; 62 | if (!transport) { 63 | transport = chunkedRequest.transportFactory(); 64 | } 65 | 66 | transport({ 67 | url, 68 | headers: new BrowserHeaders(headers || {}), 69 | method, 70 | body, 71 | credentials, 72 | onRawHeaders: processRawHeaders, 73 | onRawChunk: processRawChunk, 74 | onRawComplete: processRawComplete 75 | }); 76 | } 77 | 78 | // override this function to delegate to an alternative transport function selection 79 | // strategy; useful when testing. 80 | chunkedRequest.transportFactory = defaultTransportFactory; 81 | 82 | function validateOptions(o) { 83 | // Required. 84 | if (!isObject(o)) throw new Error('Invalid options argument'); 85 | if (typeof o.url !== 'string' || o.length === 0) throw new Error('Invalid options.url value'); 86 | 87 | // Optional. 88 | if (o.onComplete && typeof o.onComplete !== 'function') throw new Error('Invalid options.onComplete value'); 89 | if (o.onHeaders && typeof o.onHeaders !== 'function') throw new Error('Invalid options.onHeaders value'); 90 | if (o.onChunk && typeof o.onChunk !== 'function') throw new Error('Invalid options.onChunk value'); 91 | if (o.chunkParser && typeof o.chunkParser !== 'function') throw new Error('Invalid options.chunkParser value'); 92 | } 93 | -------------------------------------------------------------------------------- /karma.conf.js: -------------------------------------------------------------------------------- 1 | // Karma configuration 2 | // Generated on Wed Feb 17 2016 15:48:21 GMT+0000 (GMT) 3 | 4 | /*eslint-env node*/ 5 | /*eslint no-var: 0*/ 6 | module.exports = function(config) { 7 | 8 | // Browsers to run on Sauce Labs 9 | // Check out https://saucelabs.com/platforms for all browser/OS combos 10 | var customLaunchers = { 11 | /* 12 | // Currently disabled as chunked-transfer responses appear to fail on *all* 13 | // SauceLabs MacOS platforms? 14 | 'SL_Safari': { 15 | base: 'SauceLabs', 16 | browserName: 'safari', 17 | platform: 'OS X 10.11' 18 | }, 19 | */ 20 | 'SL_Chrome': { 21 | base: 'SauceLabs', 22 | browserName: 'chrome', 23 | platform: 'linux' 24 | }, 25 | 'SL_Firefox': { 26 | base: 'SauceLabs', 27 | browserName: 'firefox', 28 | platform: 'linux' 29 | }, 30 | 'SL_IE10': { 31 | base: 'SauceLabs', 32 | browserName: 'internet explorer', 33 | platform: 'Windows 7', 34 | version: '10' 35 | } 36 | }; 37 | 38 | var reporters = ['dots']; 39 | var browsers = []; 40 | var singlerun = false; 41 | var concurrency = Infinity; 42 | 43 | if (process.env.SAUCE_USERNAME) { 44 | reporters.push('saucelabs'); 45 | Array.prototype.push.apply(browsers, Object.keys(customLaunchers)); 46 | singlerun = true; 47 | concurrency = 1; 48 | } 49 | 50 | config.set({ 51 | 52 | // base path that will be used to resolve all patterns (eg. files, exclude) 53 | basePath: '', 54 | 55 | 56 | // frameworks to use 57 | // available frameworks: https://npmjs.org/browse/keyword/karma-adapter 58 | frameworks: ['jasmine'], 59 | 60 | sauceLabs: { 61 | recordScreenshots: false, 62 | connectOptions: { 63 | port: 5757, 64 | logfile: 'sauce_connect.log' 65 | }, 66 | public: 'public' 67 | }, 68 | 69 | // list of files / patterns to load in the browser 70 | files: [ 71 | 'node_modules/text-encoding/lib/encoding.js', 72 | 'build/integration-tests.js' 73 | ], 74 | 75 | 76 | // list of files to exclude 77 | exclude: [ 78 | ], 79 | 80 | proxies: { 81 | '/chunked-response': 'http://localhost:2001/chunked-response', 82 | '/chunked-utf8-response': 'http://localhost:2001/chunked-utf8-response', 83 | '/split-chunked-response': 'http://localhost:2001/split-chunked-response', 84 | '/error-response': 'http://localhost:2001/error-response', 85 | '/echo-response': 'http://localhost:2001/echo-response' 86 | }, 87 | 88 | 89 | // preprocess matching files before serving them to the browser 90 | // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor 91 | preprocessors: { 92 | '**/*.js': ['sourcemap'] 93 | }, 94 | 95 | 96 | // test results reporter to use 97 | // possible values: 'dots', 'progress' 98 | // available reporters: https://npmjs.org/browse/keyword/karma-reporter 99 | reporters: reporters, 100 | 101 | 102 | // web server port 103 | port: 9876, 104 | 105 | 106 | // enable / disable colors in the output (reporters and logs) 107 | colors: true, 108 | 109 | 110 | // level of logging 111 | // possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG 112 | logLevel: config.LOG_INFO, 113 | 114 | 115 | // enable / disable watching file and executing tests whenever any file changes 116 | autoWatch: true, 117 | 118 | 119 | browsers: browsers, 120 | captureTimeout: 120000, 121 | browserNoActivityTimeout: 60000, 122 | customLaunchers: customLaunchers, 123 | 124 | // Continuous Integration mode 125 | // if true, Karma captures browsers, runs the tests and exits 126 | singleRun: singlerun, 127 | 128 | // Concurrency level 129 | // how many browser should be started simultaneous 130 | concurrency: concurrency 131 | }) 132 | }; 133 | -------------------------------------------------------------------------------- /test/server/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const http = require('http'); 4 | const url = require('url'); 5 | const cookieParser = require('cookie'); 6 | 7 | // Which port should HTTP traffic be served over? 8 | const httpPort = process.env.HTTP_PORT || 2001; 9 | 10 | // How frequently should chunks be written to the response? Note that we have no 11 | // control over when chunks are actually emitted to the client so it's best to keep 12 | // this value high and pray to the gods of TCP. 13 | const chunkIntervalMs = 1000; 14 | 15 | function formatChunk(chunkNumber, numEntries, delimitLastMessage) { 16 | let data = ''; 17 | for (let i = 0; i < numEntries; i++) { 18 | data += '{ "chunk": "#' + chunkNumber + '", "data": "#' + i + '" }'; 19 | if (delimitLastMessage || i < numEntries - 1) { 20 | data += '\n'; 21 | } 22 | } 23 | return data; 24 | } 25 | 26 | function readRequestBody(req, callback) { 27 | const body = []; 28 | req 29 | .on('data', function (chunk) { 30 | body.push(chunk); 31 | }) 32 | .on('end', function () { 33 | callback(Buffer.concat(body).toString()); 34 | }); 35 | } 36 | 37 | function serveEchoResponse(req, res) { 38 | res.setHeader('Content-Type', 'text/html; charset=UTF-8'); 39 | res.setHeader('Transfer-Encoding', 'chunked'); 40 | 41 | readRequestBody(req, body => { 42 | res.write(JSON.stringify({ 43 | headers: req.headers, 44 | method: req.method, 45 | cookies: cookieParser.parse(req.headers.cookie || ''), 46 | body 47 | }) + "\n"); 48 | res.end(); 49 | }); 50 | } 51 | 52 | function serveSplitChunkedResponse(req, res) { 53 | res.setHeader('Content-Type', 'text/html; charset=UTF-8'); 54 | res.setHeader('Transfer-Encoding', 'chunked'); 55 | 56 | let firstChunk = formatChunk(1, 2, true); 57 | let secondChunk = formatChunk(2, 2, false); 58 | 59 | secondChunk = firstChunk.substr(firstChunk.length - 5) + secondChunk; 60 | firstChunk = firstChunk.substr(0, firstChunk.length - 5); 61 | 62 | res.write(firstChunk); 63 | setTimeout(function () { 64 | res.write(secondChunk); 65 | res.end(); 66 | }, chunkIntervalMs); 67 | } 68 | 69 | function serveChunkedUtf8Response(req, res) { 70 | res.setHeader('Content-Type', 'text/html; charset=UTF-8'); 71 | res.setHeader('Transfer-Encoding', 'chunked'); 72 | res.write(JSON.stringify({ "message": "𝌆" }) + "\n"); 73 | res.end(); 74 | } 75 | 76 | function serveChunkedResponse(req, res) { 77 | const query = url.parse(req.url, true).query; 78 | const numChunks = parseInt(query.numChunks, 10) || 4; 79 | const entriesPerChunk = parseInt(query.entriesPerChunk, 10) || 2; 80 | const delimitLast = parseInt(query.delimitLast, 10)===1; 81 | 82 | res.setHeader('Content-Type', 'text/html; charset=UTF-8'); 83 | res.setHeader('Transfer-Encoding', 'chunked'); 84 | res.setHeader('My-Header', 'My-Header-Value'); 85 | 86 | // Start at 1 as we serve the first chunk immediately. 87 | let i = 1; 88 | res.write(formatChunk(i, entriesPerChunk, numChunks!==1 || delimitLast)); 89 | 90 | // Only serving a single chunk? We're done. 91 | if (numChunks === 1) { 92 | return res.end(); 93 | } 94 | 95 | // Let the chunks begin! 96 | const chunkIntervalId = setInterval(function () { 97 | i++; 98 | const delimitLastMessage = i!==numChunks || delimitLast; 99 | res.write(formatChunk(i, entriesPerChunk, delimitLastMessage)); 100 | if (i >= numChunks) { 101 | clearInterval(chunkIntervalId); 102 | res.end(); 103 | } 104 | }, chunkIntervalMs); 105 | } 106 | 107 | function serveErrorResponse(req, res) { 108 | res.writeHead(500, {'My-Error-Header': 'My-Error-Header-Value'}); 109 | res.write(JSON.stringify({ error: "internal" })); 110 | res.end(); 111 | } 112 | 113 | function handler(req, res) { 114 | req.parsedUrl = url.parse(req.url, true); 115 | 116 | switch (req.parsedUrl.pathname) { 117 | case '/chunked-response': 118 | return serveChunkedResponse(req, res); 119 | case '/chunked-utf8-response': 120 | return serveChunkedUtf8Response(req, res); 121 | case '/split-chunked-response': 122 | return serveSplitChunkedResponse(req, res); 123 | case '/echo-response': 124 | return serveEchoResponse(req, res); 125 | case '/error-response': 126 | return serveErrorResponse(req, res); 127 | } 128 | } 129 | 130 | console.log("Serving on http://localhost:" + httpPort); 131 | http.createServer(handler).listen(httpPort); -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # chunked-request 2 | > Compatibility layer for efficient streaming of chunked-transfer encoded responses 3 | 4 | ## Somewhat deprecated in favor of [jonnyreeves/fetch-readablestream](https://github.com/jonnyreeves/fetch-readablestream/) 5 | 6 | You can leverage [chunked-transfer](https://en.wikipedia.org/wiki/Chunked_transfer_encoding) encoded responses on your service tier to provide partial responses to the client before the entire response has been sent. 7 | 8 | At the time of writing (August 2016) there is fragmented support for efficient chunked transfer encoding in Javascript with `moz-chunked-text` provided only in Firefox and `ReadableStream` support only present in Chrome. Other browsers need to fall-back to substring'ing the `responseText` property when the XHR's readyState event is fired. 9 | 10 | This library aims to smooth over the available implementations and provide a consistent API for dealing with cross-browser support. 11 | 12 | ## Installation 13 | via npm as an ES5/ES6 module: 14 | 15 | ```bash 16 | $ npm install chunked-request 17 | ``` 18 | 19 | or as a standalone ES5 browser script by obtaining `dist/chunked-request.js` from a [tagged release](https://github.com/jonnyreeves/chunked-request/releases). 20 | 21 | ## Browser Support 22 | This library is tested against IE 10, Safari, Firefox and Chrome. It relies on browser support for [TypedArray](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray), [TextDecoder](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder) and [TextDecoder](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder) Browser APIs; for legacy environments such as Safari and IE10, you will need to supply one or more of the polyfills listed below: 23 | 24 | * [TextEncoder / TextDecoder Polyfill](https://www.npmjs.com/package/text-encoding) (IE10, Safari, PhantomJS) 25 | 26 | ## API 27 | 28 | ```js 29 | import chunkedRequest from 'chunked-request'; 30 | 31 | chunkedRequest({ 32 | url: 'http://my.api/endpoint', 33 | method: 'POST', 34 | headers: { /*...*/ }, 35 | body: JSON.stringify({ /*...*/ }), 36 | credentials: 'include', 37 | chunkParser(rawChunk) { /*...*/ }, 38 | onChunk(err, parsedChunk) { /*...*/ }, 39 | onHeaders(headers, status) { /*...*/ } 40 | onComplete(result) { /*...*/ } 41 | }); 42 | ``` 43 | 44 | #### url (required) 45 | The URL to make the request against as a string 46 | 47 | #### method (optional) 48 | The HTTP method to use when making the request. 49 | 50 | #### headers (optional) 51 | HTTP headers to sent with the request in the form of one of: 52 | * An instance of [BrowserHeaders](https://github.com/improbable-eng/js-browser-headers) 53 | * An instance of `Headers` 54 | * An object consisting of `string: (string|string[])` (e.g. `{"key-a":["one","two"],"key-b":"three"}`) 55 | * A `Map` 56 | * A CLRF-delimited string (e.g. `key-a: one\r\nkey-b: two`) 57 | 58 | #### body (optional) 59 | The value to send along with the request. 60 | 61 | #### credentials (optional) 62 | Determine if HTTP cookies will be sent along with the request, one of `same-origin`, `include` or `omit` (mirroring the fetch API). Defaults to `same-domain` for consistency between fetch and XHR based transport; note that a value of `omit` will not affect XHR based transports which will always send cookies with requests made against the same origin. 63 | 64 | #### chunkParser (optional) 65 | A function which implements the following interface: 66 | 67 | ```js 68 | (chunkBytes, state, flush) => [ parsed, state ] 69 | ``` 70 | 71 | The chunk parser converts the supplied Uint8Array of bytes into structured data which will be supplied to the `onChunk` callback. If no `chunkParser` function is supplied the `defaultChunkParser` will be used which expects the data to be JSON literals delimited by newline (`\\n`) characters. 72 | 73 | See [Writing a Custom Chunk Parser](#Writing a Custom Chunk Parser) below for more deatils on how to implement this interface. 74 | 75 | If the `chunkParser` throws an exception, the chunk will be discarded and the error that was raised will be passed to the `onChunk` callback augmented with a `chunkBytes` property that contains the byte Array supplied to the parser and a `parserState` property which contains the state that was supplied (see below). 76 | 77 | #### onChunk (optional) 78 | A function which implements the following interface: 79 | 80 | ```js 81 | (err, parsedChunk) => undefined 82 | ``` 83 | 84 | The `onChunk` handler will be invoked each time a chunk of data it returned by the server. This function will be invoked one or more times depending on the response. The function is invoked with two arguments; the first is an optional error which will be null unless there was a parsing error thrown by the `chunkParser``. The second argument is an optional parsedChunk value which is produced by the supplied `chunkParser` (see: `options.chunkParser`). 85 | 86 | #### onHeaders (optional) 87 | A function which implements the following interface: 88 | 89 | ```js 90 | (headers, statusCode) => undefined 91 | ``` 92 | 93 | A function which will be invoked once when the browser has returned the headers of the response. This will be invoked *before* the first `onChunk` callback. This function is invoked with two arguments: 94 | 95 | * `headers` - An instance of [BrowserHeaders](https://github.com/improbable-eng/js-browser-headers) 96 | * `statusCode` - HTTP status code returned by the underlying transport 97 | 98 | 99 | #### onComplete (optional) 100 | A function which implements the following interface: 101 | 102 | ```js 103 | ({ statusCode, transport, raw }) => undefined 104 | ``` 105 | 106 | A function which will be invoked once when the browser has closed the connection to the server. This function is invoked with a single argument which contains the following properties: 107 | 108 | * `statusCode` - HTTP status code returned by the underlying transport 109 | * `transport` - The transport used for the request (see `options.transport`) 110 | * `raw` - The underlying object used to make the request; typically an XHR or fetch response depending on the `transport` value. 111 | 112 | Failed connections will have a status code of 0. Note that the `onChunk` option should be used to process the incoming response body. 113 | 114 | #### transport (optional) 115 | A function which implements the following interface: 116 | 117 | ```js 118 | ({ url, headers, method, body, credentials, onComplete, onRawChunk }) => undefined 119 | ``` 120 | 121 | The underlying function used to make the request, see the provided implementations if you wish to provide a custom extension. Note that you must supply a Uint8Array to the `onRawChunk` callback. 122 | 123 | If no value is supplied the `chunkedRequest.transportFactory` function will be invoked to determine which transport method to use. The default `transportFactory` will attempt to select the best available method for the current platform; but you can override this method for substituting a test-double or custom implementation. 124 | 125 | The `headers` property is an instance of [BrowserHeaders](https://github.com/improbable-eng/js-browser-headers). 126 | 127 | 128 | ## Writing a Custom Chunk Parser 129 | The `chunkParser` takes a 'chunk' of bytes in the form of a `Uint8Array` which were provided by the remote server and then converts it into the value passed to the `onChunk` callback (see `options.onChunk`). In it's simplest form the `chunkParser` acts as a passthru; the following example converts the supplied bytes into a string: 130 | 131 | ```js 132 | chunkedRequest({ 133 | chunkParser(bytes) { 134 | const str = utf8BytesToString(bytes); 135 | return [ str ]; 136 | } 137 | onChunk(err, str) { 138 | console.log(`Chunk recieved: ${str}`); 139 | } 140 | } 141 | ``` 142 | 143 | 144 | Chunk Parsers will typically be dealing with structured data (eg: JSON literals) where a message can only be parsed if it is well formed (ie: a complete JSON literal). Because of the nature of chunked transfer, the server may end up flushing a chunk of data to the browser that contains an incomplete datastructure. The example below illustrates this where the first chunk from the server (Chunk 1) has an incomplete JSON literal which is subsiquently completed by the proceeding chunk (Chunk 2). 145 | 146 | ``` 147 | Server (Chunk 1)> { "name": "Jonny" }\n{ "name": "Frank" }\n{ "na 148 | Server (Chunk 2)> me": "Bob" } 149 | ``` 150 | 151 | A naieve chunk parser implementation would attempt to parse the JSON literals contained in each chunk like so: 152 | 153 | ```js 154 | chunkParser(bytes) { 155 | const jsonLiterals = utf8BytesToString(bytes).split("\n"); 156 | // This will not work; Array index 2 `'{ "nam' is an incomplete JSON 157 | // literal and will cause a SyntaxError from JSON.parse 158 | return [ jsonLiterals.map(v => JSON.parse(v)) ]; 159 | } 160 | ``` 161 | 162 | Instead, the chunkParser should make use of the `state` object to retain any incomplete messages so they can be processed in the next pass: 163 | 164 | ```js 165 | chunkParser(bytes, state = {}) { 166 | const jsonLiterals = utf8BytesToString(bytes).split("\n"); 167 | 168 | // Does the state object contain any data that was not parsed 169 | // in a previous pass (see below). 170 | if (state.trailer) { 171 | // Glue the data back together for a (potentially) complete literal. 172 | jsonLiterals[0] = `${state.trailer}${jsonLiterals[0]}`; 173 | } 174 | 175 | // Check to see if the last literal parsed from this chunk ended with a 176 | // message delimiter. 177 | if (jsonLiterals[jsonLiterals.length-1] !== "\n") { 178 | // move the last entry into the parser's state as it's incomplete; we 179 | // can process it on the next pass. 180 | state.trailer = jsonLiterals.pop(); 181 | } 182 | 183 | return [ jsonLiterals.map(v => JSON.parse(v)), state ]; 184 | } 185 | ``` 186 | 187 | Finally, stateful chunk parsers must observe the third argument, `flush`. This flag will be true when the server has closed the conneciton indicating that there will be no further data. The chunkParser must process any remaining data in the state object at this point. 188 | -------------------------------------------------------------------------------- /test/integ/chunked-request.spec.js: -------------------------------------------------------------------------------- 1 | import chunkedRequest from '../../src/index'; 2 | import isEqual from 'lodash/isEqual'; 3 | import isObject from 'lodash/isObject'; 4 | import { BrowserHeaders } from 'browser-headers'; 5 | 6 | // These integration tests run through Karma; check `karma.conf.js` for 7 | // configuration. Note that the dev-server which provides the `/chunked-response` 8 | // endpoint is proxied through karma to work around CORS constraints. 9 | describe('chunked-request', () => { 10 | 11 | it('should parse a response that consists of a single chunk', done => { 12 | const receivedChunks = []; 13 | 14 | let onHeadersCalled = false; 15 | const onHeaders = (headers, status) => { 16 | expect(status).toBe(200, 'status 200'); 17 | expect(isEqual(headers.get("my-header"), ["My-Header-Value"])).toBe(true, 'received headers'); 18 | onHeadersCalled = true; 19 | }; 20 | 21 | const onComplete = () => { 22 | const chunkErrors = receivedChunks.filter(v => v instanceof Error); 23 | 24 | expect(onHeadersCalled).toBe(true); 25 | expect(receivedChunks.length).toBe(1, 'receivedChunks'); 26 | expect(chunkErrors.length).toBe(0, 'of which errors'); 27 | expect(isEqual(receivedChunks, [ [ {chunk: '#1', data: '#0'} ] ])).toBe(true, 'parsed chunks'); 28 | 29 | done(); 30 | }; 31 | 32 | chunkedRequest({ 33 | url: `/chunked-response?numChunks=1&entriesPerChunk=1&delimitLast=1`, 34 | onChunk: (err, chunk) => receivedChunks.push(err || chunk), 35 | onHeaders, 36 | onComplete, 37 | }); 38 | }); 39 | 40 | it('should supply a Uint8Array to the chunkParser', done => { 41 | let actual = false; 42 | 43 | const onComplete = () => { 44 | expect(actual).toBe(true); 45 | done(); 46 | }; 47 | 48 | chunkedRequest({ 49 | url: `/chunked-response?numChunks=1&entriesPerChunk=1&delimitLast=1`, 50 | chunkParser: bytes => { actual = (bytes instanceof Uint8Array); }, 51 | onComplete 52 | }); 53 | }); 54 | 55 | it('should parse utf8 responses', done => { 56 | const receivedChunks = []; 57 | 58 | const onComplete = () => { 59 | const chunkErrors = receivedChunks.filter(v => v instanceof Error); 60 | 61 | expect(receivedChunks.length).toBe(1, 'receivedChunks'); 62 | expect(chunkErrors.length).toBe(0, 'of which errors'); 63 | expect(isEqual(receivedChunks, [ [ {message: "𝌆"} ] ])).toBe(true, 'parsed chunks'); 64 | 65 | done(); 66 | }; 67 | 68 | chunkedRequest({ 69 | url: `/chunked-utf8-response`, 70 | onChunk: (err, chunk) => receivedChunks.push(err || chunk), 71 | onComplete 72 | }); 73 | }); 74 | 75 | it('should parse a response that consists of two chunks and ends with a delimiter', done => { 76 | const receivedChunks = []; 77 | 78 | let onHeadersCalled = false; 79 | let onChunkCalled = false; 80 | const onHeaders = (headers, status) => { 81 | expect(onChunkCalled).toBe(false, 'onChunk should not be called before onHeaders'); 82 | expect(status).toBe(200, 'status 200'); 83 | onHeadersCalled = true; 84 | }; 85 | 86 | const onComplete = () => { 87 | const chunkErrors = receivedChunks.filter(v => v instanceof Error); 88 | 89 | expect(receivedChunks.length).toBe(3, 'receivedChunks'); 90 | expect(chunkErrors.length).toBe(0, 'of which errors'); 91 | expect(isEqual(receivedChunks, [ 92 | [ {chunk: '#1', data: '#0'} ], 93 | [ {chunk: '#2', data: '#0'} ], 94 | [ {chunk: '#3', data: '#0'} ] 95 | ])).toBe(true, 'parsed chunks'); 96 | 97 | done(); 98 | }; 99 | 100 | chunkedRequest({ 101 | url: `/chunked-response?numChunks=3&entriesPerChunk=1&delimitLast=1`, 102 | onChunk: (err, chunk) => { 103 | expect(onHeadersCalled).toBe(true, 'onHeaders should be called before the first onChunk'); 104 | onChunkCalled = true; 105 | receivedChunks.push(err || chunk) 106 | }, 107 | onHeaders, 108 | onComplete 109 | }); 110 | }); 111 | 112 | it('should parse a response that consists of two chunks and does not end with a delimiter', done => { 113 | const receivedChunks = []; 114 | 115 | const onComplete = () => { 116 | const chunkErrors = receivedChunks.filter(v => v instanceof Error); 117 | 118 | expect(receivedChunks.length).toBe(3, 'receivedChunks'); 119 | expect(chunkErrors.length).toBe(0, 'of which errors'); 120 | expect(isEqual(receivedChunks, [ 121 | [ {chunk: '#1', data: '#0'} ], 122 | [ {chunk: '#2', data: '#0'} ], 123 | [ {chunk: '#3', data: '#0'} ] 124 | ])).toBe(true, 'parsed chunks'); 125 | 126 | done(); 127 | }; 128 | 129 | chunkedRequest({ 130 | url: `/chunked-response?numChunks=3&entriesPerChunk=1&delimitLast=0`, 131 | onChunk: (err, chunk) => { 132 | receivedChunks.push(err || chunk) 133 | }, 134 | onComplete 135 | }); 136 | }); 137 | 138 | it('should handle incomplete JSON chunks in the response', done => { 139 | const receivedChunks = []; 140 | 141 | const onComplete = () => { 142 | const chunkErrors = receivedChunks.filter(v => v instanceof Error); 143 | 144 | expect(receivedChunks.length).toBe(3, 'receivedChunks'); 145 | expect(chunkErrors.length).toBe(0, 'of which errors'); 146 | expect(isEqual(receivedChunks, [ 147 | [ {chunk: '#1', data: '#0'} ], 148 | [ {chunk: '#1', data: '#1'}, {chunk: '#2', data: '#0'} ], 149 | [ {chunk: '#2', data: '#1'} ] 150 | ])).toBe(true, 'parsed chunks'); 151 | 152 | done(); 153 | }; 154 | 155 | chunkedRequest({ 156 | url: `/split-chunked-response`, 157 | onChunk: (err, chunk) => { 158 | receivedChunks.push(err || chunk) 159 | }, 160 | onComplete 161 | }); 162 | }); 163 | 164 | it('should catch errors raised by the chunkParser and pass them to the `onChunk` callback', done => { 165 | const receivedChunks = []; 166 | const onComplete = () => { 167 | const chunkErrors = receivedChunks.filter(v => v instanceof Error); 168 | expect(chunkErrors.length).toBe(1, 'one errors caught'); 169 | expect(chunkErrors[0].message).toBe('expected'); 170 | 171 | const rawChunkStr = new TextDecoder().decode(chunkErrors[0].chunkBytes); 172 | expect(rawChunkStr).toBe(`{ "chunk": "#1", "data": "#0" }\n`); 173 | 174 | done(); 175 | }; 176 | 177 | chunkedRequest({ 178 | url: `/chunked-response?numChunks=1&entriesPerChunk=1&delimitLast=1`, 179 | chunkParser: (chunkBytes, state, flush) => { 180 | if (chunkBytes.length > 0 && !flush) { 181 | throw new Error("expected"); 182 | } 183 | return []; 184 | }, 185 | onChunk: (err, chunk) => { 186 | receivedChunks.push(err || chunk) 187 | }, 188 | onComplete 189 | }); 190 | }); 191 | 192 | describe('response object', () => { 193 | it('200 OK`', done => { 194 | chunkedRequest({ 195 | url: `/chunked-response?numChunks=2&entriesPerChunk=1&delimitLast=1`, 196 | onComplete: result => { 197 | expect(isObject(result)).toBe(true, 'is an object'); 198 | expect(result.statusCode).toBe(200, 'statusCode'); 199 | expect(isObject(result.raw)).toBe(true, 'raw transport agent provided'); 200 | 201 | done(); 202 | } 203 | }) 204 | }); 205 | 206 | it('500 Internal Server Error', done => { 207 | let onHeadersCalled = false; 208 | const onHeaders = (headers, status) => { 209 | expect(status).toBe(500, 'status 500'); 210 | expect(isEqual(headers.get("my-error-header"), ["My-Error-Header-Value"])).toBe(true, 'received headers'); 211 | onHeadersCalled = true; 212 | }; 213 | const onComplete = result => { 214 | expect(onHeadersCalled).toBe(true); 215 | expect(isObject(result)).toBe(true, 'is an object'); 216 | expect(result.statusCode).toBe(500, 'statusCode'); 217 | expect(isObject(result.raw)).toBe(true, 'raw transport agent provided'); 218 | 219 | done(); 220 | }; 221 | chunkedRequest({ 222 | url: `/error-response`, 223 | onHeaders, 224 | onComplete, 225 | }) 226 | }); 227 | 228 | it('should use the supplied request options', done => { 229 | const receivedChunks = []; 230 | 231 | chunkedRequest({ 232 | url: `/echo-response`, 233 | headers: { 'accept': 'application/json' }, 234 | method: 'POST', 235 | body: 'expected-body', 236 | onChunk: (err, chunk) => receivedChunks.push(err || chunk), 237 | onComplete: () => { 238 | const chunkErrors = receivedChunks.filter(v => v instanceof Error); 239 | 240 | expect(receivedChunks.length).toBe(1, 'one chunk'); 241 | expect(chunkErrors.length).toBe(0, 'no errors'); 242 | 243 | const { headers, method, body } = receivedChunks[0][0]; 244 | expect(isObject(headers)).toBe(true, 'has headers'); 245 | expect(headers.accept).toBe("application/json", 'accept header'); 246 | expect(method).toBe("POST", 'method'); 247 | expect(body).toBe('expected-body', 'body'); 248 | 249 | done(); 250 | } 251 | }); 252 | }); 253 | 254 | 255 | describe('headers support', () => { 256 | const browserHeaders = new BrowserHeaders(); 257 | browserHeaders.append("accept", "application/json"); 258 | browserHeaders.append("myheader", "my-value"); 259 | 260 | const headerFormats = [ 261 | [{'accept': 'application/json', 'myheader': 'my-value'}, "object"], 262 | [{'accept': ['application/json'], 'myheader': ['my-value']}, "object"], 263 | ['accept: application/json\r\nmyheader: my-value', "clrf string"], 264 | [browserHeaders, "BrowserHeaders"], 265 | ]; 266 | 267 | if (typeof Headers !== "undefined") { 268 | const headers = new Headers(); 269 | headers.append("accept", "application/json"); 270 | headers.append("myheader", "my-value"); 271 | headerFormats.push([headers, "Headers"]); 272 | } 273 | 274 | if (typeof Map !== "undefined") { 275 | const headerMap = new Map();// eslint-disable-line no-undef 276 | headerMap.set("accept", "application/json"); 277 | headerMap.set("myheader", ["my-value"]); 278 | headerFormats.push([headerMap, "Map"]); 279 | } 280 | 281 | headerFormats.forEach(testCase => { 282 | const headers = testCase[0]; 283 | const testName = testCase[1]; 284 | 285 | it('should accept headers in various formats (' + testName + ')', done => { 286 | const receivedChunks = []; 287 | chunkedRequest({ 288 | url: `/echo-response`, 289 | headers: headers, 290 | method: 'POST', 291 | body: 'expected-body', 292 | onChunk: (err, chunk) => receivedChunks.push(err || chunk), 293 | onComplete: () => { 294 | const {headers, method, body} = receivedChunks[0][0]; 295 | expect(isObject(headers)).toBe(true, testName + ': has headers'); 296 | expect(headers.accept).toBe("application/json", testName + ': accept header'); 297 | expect(headers.myheader).toBe("my-value", testName + ': myheader header'); 298 | expect(method).toBe("POST", testName + ': method'); 299 | expect(body).toBe('expected-body', testName + ': body'); 300 | done(); 301 | } 302 | }); 303 | }); 304 | }); 305 | }); 306 | 307 | describe("credentials", () => { 308 | const cookieNames = []; 309 | 310 | function setCookie(name, value) { 311 | document.cookie = `${name}=${value}`; 312 | cookieNames.push(name); 313 | } 314 | 315 | function clearSetCookies() { 316 | cookieNames.forEach(name => { 317 | document.cookie = `${name}=false;max-age=0`; 318 | }) 319 | } 320 | 321 | afterEach(clearSetCookies); 322 | 323 | it('should honour the `credentials` flag', done => { 324 | const receivedChunks = []; 325 | 326 | setCookie('myCookie', 'myValue'); 327 | 328 | chunkedRequest({ 329 | url: `/echo-response`, 330 | method: 'GET', 331 | onChunk: (err, chunk) => receivedChunks.push(err || chunk), 332 | onComplete: () => { 333 | const chunkErrors = receivedChunks.filter(v => v instanceof Error); 334 | 335 | expect(receivedChunks.length).toBe(1, 'one chunk'); 336 | expect(chunkErrors.length).toBe(0, 'no errors'); 337 | 338 | const { cookies } = receivedChunks[0][0]; 339 | expect(isObject(cookies)).toBe(true, 'has cookies'); 340 | expect(cookies.myCookie).toEqual('myValue', 'cookie sent'); 341 | 342 | done(); 343 | } 344 | }); 345 | }); 346 | }); 347 | }); 348 | }); 349 | --------------------------------------------------------------------------------