├── samples ├── .gitignore └── Node.js │ ├── public │ ├── cancel.png │ ├── pause.png │ ├── resume.png │ ├── index.html │ ├── style.css │ └── app.js │ ├── package.json │ ├── README.md │ ├── app.js │ ├── uploader-node.js │ └── package-lock.json ├── .npmrc ├── assets ├── simple-uploader-QQ.jpg ├── simple-uploader-QQ-2.png └── simple-uploader-QQ-3.png ├── test └── unit │ ├── README.md │ ├── lib │ ├── fakeFile.js │ └── FakeXMLHttpRequestUpload.js │ └── specs │ ├── fileRemove.js │ ├── file.js │ ├── singleFile.js │ ├── fileAdd.js │ ├── event.js │ ├── folder.js │ ├── setup.js │ ├── webKitDataTransfer.js │ ├── utils.js │ └── upload.js ├── .npmignore ├── .editorconfig ├── test.sh ├── .eslintrc.js ├── .travis.yml ├── .gitignore ├── src ├── event.js ├── utils.js ├── chunk.js ├── file.js └── uploader.js ├── package.json ├── LICENSE ├── gulpfile.js ├── README_zh-CN.md ├── README.md └── dist └── uploader.min.js /samples/.gitignore: -------------------------------------------------------------------------------- 1 | tmp/ 2 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | phantomjs_cdnurl=https://npm.taobao.org/mirrors/phantomjs/ -------------------------------------------------------------------------------- /assets/simple-uploader-QQ.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simple-uploader/Uploader/HEAD/assets/simple-uploader-QQ.jpg -------------------------------------------------------------------------------- /assets/simple-uploader-QQ-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simple-uploader/Uploader/HEAD/assets/simple-uploader-QQ-2.png -------------------------------------------------------------------------------- /assets/simple-uploader-QQ-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simple-uploader/Uploader/HEAD/assets/simple-uploader-QQ-3.png -------------------------------------------------------------------------------- /test/unit/README.md: -------------------------------------------------------------------------------- 1 | # uploader test 2 | 3 | All tests are based on https://github.com/flowjs/flow.js/tree/master/test 4 | -------------------------------------------------------------------------------- /samples/Node.js/public/cancel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simple-uploader/Uploader/HEAD/samples/Node.js/public/cancel.png -------------------------------------------------------------------------------- /samples/Node.js/public/pause.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simple-uploader/Uploader/HEAD/samples/Node.js/public/pause.png -------------------------------------------------------------------------------- /samples/Node.js/public/resume.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simple-uploader/Uploader/HEAD/samples/Node.js/public/resume.png -------------------------------------------------------------------------------- /samples/Node.js/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "connect-multiparty": "^2.2.0", 4 | "express": "^4.3.1" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | .* 2 | *.md 3 | *.yml 4 | build 5 | coverage 6 | dist/uploader.min.js.map 7 | samples 8 | explorations 9 | perf 10 | test 11 | bower.json 12 | gulpfile.js 13 | sauce_connect.log -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | charset = utf-8 6 | trim_trailing_whitespace = false 7 | insert_final_newline = true 8 | indent_style = space 9 | indent_size = 2 10 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | if [ $TESTNAME = "unit-tests" ]; then 5 | echo "Running unit-tests" 6 | export DISPLAY=:99.0 7 | sh -e /etc/init.d/xvfb start 8 | sleep 1 9 | npm run test 10 | npm run codecov 11 | elif [ $TESTNAME = "browser-tests" ]; then 12 | echo "Running browser-tests" 13 | npm run test:ci 14 | fi 15 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | node: true, 4 | browser: true 5 | }, 6 | extends: 'standard', 7 | rules: { 8 | // allow paren-less arrow functions 9 | 'arrow-parens': 0, 10 | // allow async-await 11 | 'generator-star-spacing': 0, 12 | 'no-tabs': 0, 13 | 'space-before-function-paren': 0 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /test/unit/lib/fakeFile.js: -------------------------------------------------------------------------------- 1 | function File(content, name, options) { 2 | var blob = new Blob(content) 3 | this._blob = blob 4 | this.size = blob.size 5 | this.type = blob.type 6 | this.name = name 7 | this.lastModifiedDate = new Date() 8 | this.lastModified = this.lastModifiedDate.getTime() 9 | if (options) { 10 | for (var k in options) { 11 | this[k] = options[k] 12 | } 13 | } 14 | } 15 | 16 | File.prototype = { 17 | constructor: File, 18 | slice: function () { 19 | return this._blob.slice.apply(this._blob, arguments) 20 | } 21 | } 22 | 23 | window.File = File 24 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | sudo: required 3 | dist: trusty 4 | addons: 5 | chrome: stable 6 | cache: 7 | directories: 8 | - node_modules 9 | node_js: 10 | - "6" 11 | branches: 12 | only: 13 | - master 14 | matrix: 15 | fast_finish: true 16 | include: 17 | - env: TESTNAME="unit-tests" 18 | - env: TESTNAME="browser-tests" 19 | addons: 20 | sauce_connect: true 21 | allow_failures: 22 | - env: TESTNAME="browser-tests" 23 | before_script: 24 | - "sudo chown root /opt/google/chrome/chrome-sandbox" 25 | - "sudo chmod 4755 /opt/google/chrome/chrome-sandbox" 26 | script: ./test.sh 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | .DS_Store 5 | # Runtime data 6 | pids 7 | *.pid 8 | *.seed 9 | 10 | # Directory for instrumented libs generated by jscoverage/JSCover 11 | lib-cov 12 | 13 | # Coverage directory used by tools like istanbul 14 | coverage 15 | 16 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 17 | .grunt 18 | 19 | # node-waf configuration 20 | .lock-wscript 21 | 22 | # Compiled binary addons (http://nodejs.org/api/addons.html) 23 | build/Release 24 | 25 | # Dependency directory 26 | # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git 27 | node_modules 28 | -------------------------------------------------------------------------------- /samples/Node.js/README.md: -------------------------------------------------------------------------------- 1 | # Sample code for Node.js 2 | 3 | This sample is written for [Node.js](http://nodejs.org/) and requires [Express](http://expressjs.com/) to make the sample code cleaner. 4 | 5 | To install and run: 6 | 7 | cd samples/Node.js 8 | npm install 9 | node app.js 10 | 11 | Then browse to [localhost:3000](http://localhost:3000). 12 | 13 | File chunks will be uploaded to samples/Node.js/tmp directory. 14 | 15 | ## Enabling Cross-domain Uploads 16 | 17 | If you would like to load the Uploader.js library from one domain and have your Node.js reside on another, you must allow 'Access-Control-Allow-Origin' from '*'. Please remember, there are some potential security risks with enabling this functionality. If you would still like to implement cross-domain uploads, open app.js and uncomment lines 24-31 and uncomment line 17. 18 | 19 | Then in public/index.html, on line 49, update the target with your server's address. For example: target:'http://www.example.com/upload' 20 | -------------------------------------------------------------------------------- /test/unit/specs/fileRemove.js: -------------------------------------------------------------------------------- 1 | var Uploader = require('../../../src/uploader') 2 | 3 | describe('fileRemoved event', function () { 4 | var uploader 5 | 6 | beforeEach(function () { 7 | uploader = new Uploader({ 8 | generateUniqueIdentifier: function (file) { 9 | return file.size 10 | } 11 | }) 12 | }) 13 | 14 | it('should call fileRemoved event on uploader.removeFile', function () { 15 | var valid = false 16 | var removedFile = null 17 | uploader.on('fileRemoved', function (file) { 18 | expect(file.file instanceof File).toBeTruthy() 19 | removedFile = file 20 | valid = true 21 | }) 22 | uploader.addFile(new File(['file part'], 'test')) 23 | var addedFile = uploader.files[0] 24 | uploader.removeFile(addedFile) 25 | expect(removedFile).toBe(addedFile) 26 | expect(valid).toBeTruthy() 27 | }) 28 | 29 | it('should call fileRemoved event uploaderFile.cancel', function () { 30 | var valid = false 31 | var removedFile = null 32 | uploader.on('fileRemoved', function (file) { 33 | expect(file.file instanceof File).toBeTruthy() 34 | removedFile = file 35 | valid = true 36 | }) 37 | uploader.addFile(new File(['file part'], 'test')) 38 | var addedFile = uploader.files[0] 39 | addedFile.cancel() 40 | expect(removedFile).toBe(addedFile) 41 | expect(valid).toBeTruthy() 42 | }) 43 | }) 44 | -------------------------------------------------------------------------------- /src/event.js: -------------------------------------------------------------------------------- 1 | var each = require('./utils').each 2 | 3 | var event = { 4 | 5 | _eventData: null, 6 | 7 | on: function (name, func) { 8 | if (!this._eventData) this._eventData = {} 9 | if (!this._eventData[name]) this._eventData[name] = [] 10 | var listened = false 11 | each(this._eventData[name], function (fuc) { 12 | if (fuc === func) { 13 | listened = true 14 | return false 15 | } 16 | }) 17 | if (!listened) { 18 | this._eventData[name].push(func) 19 | } 20 | }, 21 | 22 | off: function (name, func) { 23 | if (!this._eventData) this._eventData = {} 24 | if (!this._eventData[name] || !this._eventData[name].length) return 25 | if (func) { 26 | each(this._eventData[name], function (fuc, i) { 27 | if (fuc === func) { 28 | this._eventData[name].splice(i, 1) 29 | return false 30 | } 31 | }, this) 32 | } else { 33 | this._eventData[name] = [] 34 | } 35 | }, 36 | 37 | trigger: function (name) { 38 | if (!this._eventData) this._eventData = {} 39 | if (!this._eventData[name]) return true 40 | var args = this._eventData[name].slice.call(arguments, 1) 41 | var preventDefault = false 42 | each(this._eventData[name], function (fuc) { 43 | preventDefault = fuc.apply(this, args) === false || preventDefault 44 | }, this) 45 | return !preventDefault 46 | } 47 | } 48 | 49 | module.exports = event 50 | -------------------------------------------------------------------------------- /test/unit/specs/file.js: -------------------------------------------------------------------------------- 1 | var Uploader = require('../../../src/uploader') 2 | 3 | describe('Uploader.File functions - file', function () { 4 | var uploader 5 | var file 6 | 7 | beforeEach(function () { 8 | uploader = new Uploader({}) 9 | var rFile = new File(['xx'], 'image.jpg', { 10 | type: 'image/png' 11 | }) 12 | file = new Uploader.File(uploader, rFile, uploader) 13 | }) 14 | 15 | it('should get type', function () { 16 | expect(file.getType()).toBe('png') 17 | file.file.type = '' 18 | expect(file.getType()).toBe('') 19 | }) 20 | 21 | it('should get extension', function () { 22 | expect(file.name).toBe('image.jpg') 23 | expect(file.getExtension()).toBe('jpg') 24 | file.name = '' 25 | expect(file.getExtension()).toBe('') 26 | file.name = 'image' 27 | expect(file.getExtension()).toBe('') 28 | file.name = '.dwq.dq.wd.qdw.E' 29 | expect(file.getExtension()).toBe('e') 30 | }) 31 | 32 | it('error', function () { 33 | expect(file.error).toBe(false) 34 | }) 35 | 36 | it('getSize', function () { 37 | expect(file.getSize()).toBe(2) 38 | }) 39 | 40 | it('getFormatSize', function () { 41 | expect(file.getFormatSize()).toBe('2 bytes') 42 | }) 43 | 44 | it('isComplete', function () { 45 | expect(file.isComplete()).toBe(false) 46 | }) 47 | 48 | it('getRoot', function () { 49 | var rootFile = file.getRoot() 50 | expect(rootFile).toBe(file) 51 | }) 52 | }) 53 | -------------------------------------------------------------------------------- /test/unit/specs/singleFile.js: -------------------------------------------------------------------------------- 1 | var Uploader = require('../../../src/uploader') 2 | 3 | describe('add single file', function () { 4 | var uploader 5 | 6 | beforeEach(function () { 7 | uploader = new Uploader({ 8 | generateUniqueIdentifier: function (file) { 9 | return file.size 10 | }, 11 | singleFile: true 12 | }) 13 | }) 14 | 15 | it('should add single file', function () { 16 | uploader.addFile(new File(['file part'], 'file')) 17 | expect(uploader.files.length).toBe(1) 18 | var file = uploader.files[0] 19 | uploader.upload() 20 | expect(file.isUploading()).toBeTruthy() 21 | uploader.addFile(new File(['file part 2'], 'file2')) 22 | expect(uploader.files.length).toBe(1) 23 | expect(file.isUploading()).toBeFalsy() 24 | }) 25 | 26 | it('should fire remove event after adding another file', function () { 27 | var events = [] 28 | uploader.on('catchAll', function (event) { 29 | events.push(event) 30 | }) 31 | uploader.addFile(new File(['file part'], 'file')) 32 | expect(uploader.files.length).toBe(1) 33 | expect(events.length).toBe(3) 34 | expect(events[0]).toBe('fileAdded') 35 | expect(events[1]).toBe('filesAdded') 36 | expect(events[2]).toBe('filesSubmitted') 37 | 38 | var removedFile = uploader.files[0] 39 | uploader.on('fileRemoved', function(file){ 40 | expect(file).toBe(removedFile) 41 | }) 42 | uploader.addFile(new File(['file part 2'], 'file2')) 43 | expect(uploader.files.length).toBe(1) 44 | expect(events.length).toBe(7) 45 | expect(events[3]).toBe('fileAdded') 46 | expect(events[4]).toBe('filesAdded') 47 | expect(events[5]).toBe('fileRemoved') 48 | expect(events[6]).toBe('filesSubmitted') 49 | }) 50 | }) 51 | -------------------------------------------------------------------------------- /samples/Node.js/app.js: -------------------------------------------------------------------------------- 1 | process.env.TMPDIR = 'tmp'; // to avoid the EXDEV rename error, see http://stackoverflow.com/q/21071303/76173 2 | 3 | var express = require('express'); 4 | var multipart = require('connect-multiparty'); 5 | var multipartMiddleware = multipart(); 6 | var uploader = require('./uploader-node.js')('tmp'); 7 | var app = express(); 8 | 9 | // Configure access control allow origin header stuff 10 | var ACCESS_CONTROLL_ALLOW_ORIGIN = true; 11 | 12 | // Host most stuff in the public folder 13 | app.use(express.static(__dirname + '/public')); 14 | app.use(express.static(__dirname + '/../../dist')); 15 | 16 | // Handle uploads through Uploader.js 17 | app.post('/upload', multipartMiddleware, function(req, res) { 18 | uploader.post(req, function(status, filename, original_filename, identifier) { 19 | console.log('POST', status, original_filename, identifier); 20 | if (ACCESS_CONTROLL_ALLOW_ORIGIN) { 21 | res.header("Access-Control-Allow-Origin", "*"); 22 | res.header("Access-Control-Allow-Headers", "content-type") 23 | } 24 | setTimeout(function () { 25 | res.send(status); 26 | }, 500); 27 | }); 28 | }); 29 | 30 | 31 | app.options('/upload', function(req, res){ 32 | console.log('OPTIONS'); 33 | if (ACCESS_CONTROLL_ALLOW_ORIGIN) { 34 | res.header("Access-Control-Allow-Origin", "*"); 35 | res.header("Access-Control-Allow-Headers", "content-type") 36 | } 37 | res.status(200).send(); 38 | }); 39 | 40 | // Handle status checks on chunks through Uploader.js 41 | app.get('/upload', function(req, res) { 42 | uploader.get(req, function(status, filename, original_filename, identifier) { 43 | console.log('GET', status); 44 | if (ACCESS_CONTROLL_ALLOW_ORIGIN) { 45 | res.header("Access-Control-Allow-Origin", "*"); 46 | } 47 | 48 | res.status(status == 'found' ? 200 : 204).send(status); 49 | }); 50 | }); 51 | 52 | app.get('/download/:identifier', function(req, res) { 53 | uploader.write(req.params.identifier, res); 54 | }); 55 | 56 | app.listen(3000); 57 | -------------------------------------------------------------------------------- /test/unit/specs/fileAdd.js: -------------------------------------------------------------------------------- 1 | var Uploader = require('../../../src/uploader') 2 | 3 | describe('fileAdd event', function () { 4 | var uploader 5 | 6 | beforeEach(function () { 7 | uploader = new Uploader({ 8 | generateUniqueIdentifier: function (file) { 9 | return file.size 10 | } 11 | }) 12 | }) 13 | 14 | it('should call fileAdded event', function () { 15 | var valid = false 16 | uploader.on('fileAdded', function (file) { 17 | expect(file.file instanceof File).toBeTruthy() 18 | valid = true 19 | }) 20 | uploader.addFile(new File(['file part'], 'testfile')) 21 | expect(valid).toBeTruthy() 22 | }) 23 | 24 | it('should call filesAdded event', function () { 25 | var count = 0 26 | uploader.on('filesAdded', function (files) { 27 | count = files.length 28 | }) 29 | uploader.addFiles([ 30 | new File(['file part'], 'testfile'), 31 | new File(['file 2 part'], 'testfile2') 32 | ]) 33 | expect(count).toBe(2) 34 | expect(uploader.files.length).toBe(2) 35 | }) 36 | 37 | it('should validate fileAdded', function () { 38 | uploader.on('fileAdded', function () { 39 | return false 40 | }) 41 | uploader.addFile(new File(['file part'], 'test')) 42 | expect(uploader.files.length).toBe(0) 43 | expect(uploader.fileList.length).toBe(0) 44 | }) 45 | 46 | it('should validate filesAdded', function () { 47 | uploader.on('filesAdded', function () { 48 | return false 49 | }) 50 | uploader.addFile(new File(['file part'], 'test')) 51 | expect(uploader.files.length).toBe(0) 52 | expect(uploader.fileList.length).toBe(0) 53 | }) 54 | 55 | it('should validate fileAdded and filesAdded', function () { 56 | uploader.on('fileAdded', function () { 57 | return false 58 | }) 59 | var valid = false 60 | uploader.on('filesAdded', function (files) { 61 | valid = files.length === 0 62 | }) 63 | uploader.addFile(new File(['file part'], 'test')) 64 | expect(valid).toBeTruthy() 65 | }) 66 | }) 67 | -------------------------------------------------------------------------------- /test/unit/specs/event.js: -------------------------------------------------------------------------------- 1 | var _ = require('../../../src/utils') 2 | var uevent = require('../../../src/event') 3 | 4 | describe('event', function () { 5 | 6 | beforeEach(function () { 7 | this.eventBus = _.extend({}, uevent) 8 | }) 9 | 10 | it('_eventData', function () { 11 | expect(_.isPlainObject(this.eventBus)).toBe(true) 12 | expect(this.eventBus._eventData).toBe(null) 13 | var f = function () {} 14 | this.eventBus.on('uploaderEvent', f) 15 | this.eventBus.on('uploaderEvent', f) 16 | expect(this.eventBus._eventData).not.toBe(null) 17 | expect(_.isArray(this.eventBus._eventData.uploaderEvent)).toBe(true) 18 | expect(this.eventBus._eventData.uploaderEvent.length).toBe(1) 19 | }) 20 | 21 | it('on & trigger', function () { 22 | var a = 0 23 | var a2 = 0 24 | var f = function () { 25 | a = 1 26 | } 27 | var f2 = function (b) { 28 | a2 = b 29 | } 30 | this.eventBus.on('uploaderEvent', f) 31 | expect(a).toBe(0) 32 | this.eventBus.trigger('uploaderEvent') 33 | expect(a).toBe(1) 34 | this.eventBus.on('uploaderEvent', f2) 35 | expect(a2).toBe(0) 36 | this.eventBus.trigger('uploaderEvent', 3) 37 | expect(a).toBe(1) 38 | expect(a2).toBe(3) 39 | }) 40 | 41 | it('off & trigger', function () { 42 | var a = 0 43 | var a2 = 0 44 | var f = function (b) { 45 | a = b 46 | } 47 | var f2 = function (b) { 48 | a2 = b 49 | } 50 | this.eventBus.on('uploaderEvent', f) 51 | expect(a).toBe(0) 52 | this.eventBus.trigger('uploaderEvent', 1) 53 | expect(a).toBe(1) 54 | this.eventBus.on('uploaderEvent', f2) 55 | expect(a2).toBe(0) 56 | this.eventBus.trigger('uploaderEvent', 3) 57 | expect(a).toBe(3) 58 | expect(a2).toBe(3) 59 | this.eventBus.off('uploaderEvent', f2) 60 | this.eventBus.trigger('uploaderEvent', 4) 61 | expect(a).toBe(4) 62 | expect(a2).toBe(3) 63 | this.eventBus.off('uploaderEvent') 64 | this.eventBus.trigger('uploaderEvent', 5) 65 | expect(a).toBe(4) 66 | expect(a2).toBe(3) 67 | }) 68 | 69 | }) 70 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "simple-uploader.js", 3 | "version": "0.6.0", 4 | "author": "dolymood ", 5 | "license": "MIT", 6 | "description": "Uploader library implements html5 file upload and provides multiple simultaneous, stable, fault tolerant and resumable uploads", 7 | "keywords": [ 8 | "simple-uploader", 9 | "simple-uploader.js", 10 | "uploader", 11 | "uploader.js", 12 | "resumable.js", 13 | "flow.js", 14 | "file upload", 15 | "resumable upload", 16 | "chunk upload", 17 | "html5 upload", 18 | "javascript upload", 19 | "upload" 20 | ], 21 | "main": "dist/uploader.js", 22 | "repository": { 23 | "type": "git", 24 | "url": "https://github.com/simple-uploader/Uploader.git" 25 | }, 26 | "bugs": "https://github.com/simple-uploader/Uploader/issues", 27 | "homepage": "https://github.com/simple-uploader/Uploader", 28 | "scripts": { 29 | "dev": "gulp watch", 30 | "build": "gulp build", 31 | "codecov": "codecov", 32 | "test": "gulp", 33 | "test:unit": "gulp test", 34 | "test:cover": "gulp cover", 35 | "test:ci": "gulp ci", 36 | "release": "gulp release" 37 | }, 38 | "devDependencies": { 39 | "browserify-versionify": "^1.0.6", 40 | "codecov": "^3.6.5", 41 | "eslint": "^4.2.0", 42 | "eslint-config-standard": "^6.1.0", 43 | "eslint-plugin-promise": "^3.5.0", 44 | "eslint-plugin-standard": "^3.0.1", 45 | "gulp": "^3.9.1", 46 | "gulp-browserify": "^0.5.1", 47 | "gulp-concat": "^2.6.1", 48 | "gulp-eslint": "^4.0.0", 49 | "gulp-git": "^2.4.1", 50 | "gulp-header": "^1.8.8", 51 | "gulp-sourcemaps": "^2.6.0", 52 | "gulp-tag-version": "^1.3.0", 53 | "gulp-uglify": "^3.0.0", 54 | "jasmine": "^2.6.0", 55 | "jasmine-core": "^2.6.4", 56 | "karma": "^1.7.0", 57 | "karma-chrome-launcher": "^2.1.1", 58 | "karma-commonjs": "^1.0.0", 59 | "karma-coverage": "^1.1.1", 60 | "karma-firefox-launcher": "^1.0.1", 61 | "karma-jasmine": "^1.1.0", 62 | "karma-phantomjs-launcher": "^1.0.4", 63 | "karma-sauce-launcher": "^1.1.0", 64 | "pump": "^1.0.2", 65 | "sinon": "1.7.3" 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /test/unit/specs/folder.js: -------------------------------------------------------------------------------- 1 | var Uploader = require('../../../src/uploader') 2 | 3 | describe('Uploader.File functions - folder', function () { 4 | var uploader 5 | var file 6 | 7 | beforeEach(function () { 8 | uploader = new Uploader({}) 9 | var rFile = new File(['xx'], 'image.jpg', { 10 | type: 'image/png' 11 | }) 12 | rFile.relativePath = 'a/b/image.jpg' 13 | file = new Uploader.File(uploader, rFile, uploader) 14 | uploader.files.push(file) 15 | }) 16 | 17 | it('should get type', function () { 18 | expect(file.getType()).toBe('png') 19 | file.file.type = '' 20 | expect(file.getType()).toBe('') 21 | }) 22 | 23 | it('should get extension', function () { 24 | expect(file.name).toBe('image.jpg') 25 | expect(file.getExtension()).toBe('jpg') 26 | file.name = '' 27 | expect(file.getExtension()).toBe('') 28 | file.name = 'image' 29 | expect(file.getExtension()).toBe('') 30 | file.name = '.dwq.dq.wd.qdw.E' 31 | expect(file.getExtension()).toBe('e') 32 | }) 33 | 34 | it('getSize', function () { 35 | expect(file.getSize()).toBe(2) 36 | }) 37 | 38 | it('getFormatSize', function () { 39 | expect(file.getFormatSize()).toBe('2 bytes') 40 | }) 41 | 42 | it('error', function () { 43 | expect(file.error).toBe(false) 44 | file.error = true 45 | expect(file.error).toBe(true) 46 | }) 47 | 48 | it('getRoot', function () { 49 | var rootFile = file.getRoot() 50 | expect(rootFile.files[0]).toBe(file) 51 | expect(rootFile.fileList[0].fileList[0]).toBe(file) 52 | expect(rootFile.getSize()).toBe(2) 53 | expect(rootFile.getFormatSize()).toBe('2 bytes') 54 | expect(rootFile.getExtension()).toBe('') 55 | expect(rootFile.getType()).toBe('folder') 56 | expect(rootFile.error).toBe(false) 57 | file._error() 58 | expect(rootFile.error).toBe(true) 59 | file._resetError() 60 | expect(rootFile.isComplete()).toBe(false) 61 | expect(rootFile.isUploading()).toBe(false) 62 | expect(uploader.getRoot()).toBe(uploader) 63 | uploader.removeFile(rootFile) 64 | expect(rootFile.parent).toBe(null) 65 | expect(file.parent).toBe(null) 66 | expect(uploader.files.length).toBe(0) 67 | expect(uploader.fileList.length).toBe(0) 68 | }) 69 | }) 70 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 doly mood 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | The MIT License (MIT) 24 | 25 | Copyright (c) 2011, 23, http://www.23developer.com 26 | 2013, Aidas Klimas 27 | 28 | Permission is hereby granted, free of charge, to any person obtaining a copy of 29 | this software and associated documentation files (the "Software"), to deal in 30 | the Software without restriction, including without limitation the rights to 31 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 32 | the Software, and to permit persons to whom the Software is furnished to do so, 33 | subject to the following conditions: 34 | 35 | The above copyright notice and this permission notice shall be included in all 36 | copies or substantial portions of the Software. 37 | 38 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 39 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 40 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 41 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 42 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 43 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 44 | -------------------------------------------------------------------------------- /samples/Node.js/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | simple-uploader.js - Multiple simultaneous, stable and resumable uploads via the HTML5 File API 5 | 6 | 7 | 8 | 9 | 10 | 11 |
12 | 13 |

simple-uploader.js

14 |

It's a JavaScript library providing multiple simultaneous, stable and resumable uploads via the HTML5 File API.

15 | 16 |

The library is designed to introduce fault-tolerance into the upload of large files through HTTP. This is done by splitting each files into small chunks; whenever the upload of a chunk fails, uploading is retried until the procedure completes. This allows uploads to automatically resume uploading after a network connection is lost either locally or to the server. Additionally, it allows for users to pause and resume uploads without loosing state.

17 | 18 |

simple-uploader.js relies on the HTML5 File API and the ability to chunks files into smaller pieces. Currently, this means that support is limited to Firefox 4+ and Chrome 11+.

19 | 20 |
21 | 22 |

Demo

23 | 24 |
25 | Your browser, unfortunately, is not supported by simple-uploader.js. The library requires support for the HTML5 File API along with file slicing. 26 |
27 |
28 | Drop files here to upload or select folder or select from your computer or select images 29 |
30 |
31 | 32 | 33 | 34 | 35 | 40 | 41 |
36 | 37 | 38 | 39 |
42 |
43 | 44 |
45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /samples/Node.js/public/style.css: -------------------------------------------------------------------------------- 1 | /* Reset */ 2 | body,div,dl,dt,dd,ul,ol,li,h1,h2,h3,h4,h5,h6,pre,form,fieldset,input,textarea,p,blockquote,th,td{margin:0;padding:0;}table{border-collapse:collapse;border-spacing:0;}fieldset,img{border:0;}address,caption,cite,code,dfn,th,var{font-style:normal;font-weight:normal;}ol,ul {list-style:none;}caption,th {text-align:left;}h1,h2,h3,h4,h5,h6{font-size:100%;font-weight:normal;}q:before,q:after{content:'';}abbr,acronym {border:0;} 3 | 4 | /* Baseline */ 5 | body, p, h1, h2, h3, h4, h5, h6 {font:normal 12px/1.3em Helvetica, Arial, sans-serif; color:#333; } 6 | h1 {font-size:22px; font-weight:bold;} 7 | h2 {font-size:19px; font-weight:bold;} 8 | h3 {font-size:16px; font-weight:bold;} 9 | h4 {font-size:14px; font-weight:bold;} 10 | h5 {font-size:12px; font-weight:bold;} 11 | p {margin:10px 0;} 12 | 13 | 14 | body {text-align:center; margin:40px;} 15 | #frame {margin:0 auto; width:800px; text-align:left;} 16 | 17 | 18 | 19 | /* Uploader: Drag & Drop */ 20 | .uploader-error {display:none; font-size:14px; font-style:italic;} 21 | .uploader-drop {padding:15px; font-size:13px; text-align:center; color:#666; font-weight:bold;background-color:#eee; border:2px dashed #aaa; border-radius:10px; margin-top:40px; z-index:9999; display:none;} 22 | .uploader-dragover {padding:30px; color:#555; background-color:#ddd; border:1px solid #999;} 23 | 24 | /* Uploader: Progress bar */ 25 | .uploader-progress {margin:30px 0 30px 0; width:100%; display:none;} 26 | .progress-container {height:7px; background:#9CBD94; position:relative; } 27 | .progress-bar {position:absolute; top:0; left:0; bottom:0; background:#45913A; width:0;} 28 | .progress-text {font-size:11px; line-height:9px; padding-left:10px;} 29 | .progress-pause {padding:0 0 0 7px;} 30 | .progress-resume-link {display:none;} 31 | .is-paused .progress-resume-link {display:inline;} 32 | .is-paused .progress-pause-link {display:none;} 33 | .is-complete .progress-pause {display:none;} 34 | 35 | /* Uploader: List of items being uploaded */ 36 | .uploader-list {overflow:auto; margin-right:-20px; display:none;} 37 | .uploader-item {width:148px; height:90px; background-color:#666; position:relative; border:2px solid black; float:left; margin:0 6px 6px 0;} 38 | .uploader-item-thumbnail {width:100%; height:100%; position:absolute; top:0; left:0;} 39 | .uploader-item img.uploader-item-thumbnail {opacity:0;} 40 | .uploader-item-creating-thumbnail {padding:0 5px; font-size:9px; color:white;} 41 | .uploader-item-title {position:absolute; font-size:9px; line-height:11px; padding:3px 50px 3px 5px; bottom:0; left:0; right:0; color:white; background-color:rgba(0,0,0,0.6); min-height:27px;} 42 | .uploader-item-status {position:absolute; bottom:3px; right:3px;} 43 | 44 | /* Uploader: Hover & Active status */ 45 | .uploader-item:hover, .is-active .uploader-item {border-color:#4a873c; cursor:pointer; } 46 | .uploader-item:hover .uploader-item-title, .is-active .uploader-item .uploader-item-title {background-color:rgba(74,135,60,0.8);} 47 | 48 | /* Uploader: Error status */ 49 | .is-error .uploader-item:hover, .is-active.is-error .uploader-item {border-color:#900;} 50 | .is-error .uploader-item:hover .uploader-item-title, .is-active.is-error .uploader-item .uploader-item-title {background-color:rgba(153,0,0,0.6);} 51 | .is-error .uploader-item-creating-thumbnail {display:none;} 52 | -------------------------------------------------------------------------------- /test/unit/lib/FakeXMLHttpRequestUpload.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Extends sinon.FakeXMLHttpRequest with upload functionality. 3 | * Property `upload` to FakeXMLHttpRequest added. It works with the following events: 4 | * "loadstart", "progress", "abort", "error", "load", "loadend" 5 | * Events are instance of FakeXMLHttpRequestProgressEvent and has following properties: 6 | * loaded - loaded request size. 7 | * total - total request size. 8 | * lengthComputable - boolean indicates if loaded and total attributes were computed. 9 | * Helper method `progress`, such as `sinon.FakeXMLHttpRequest.respond(200...)`, was added. 10 | * 11 | */ 12 | (function() { 13 | function FakeXMLHttpRequestUpload() { 14 | var xhr = this; 15 | var events = ["loadstart", "progress", "abort", "error", "load", "loadend"]; 16 | 17 | function addEventListener(eventName) { 18 | xhr.addEventListener(eventName, function (event) { 19 | var listener = xhr["on" + eventName]; 20 | 21 | if (listener && typeof listener == "function") { 22 | listener(event); 23 | } 24 | }); 25 | } 26 | 27 | for (var i = events.length - 1; i >= 0; i--) { 28 | addEventListener(events[i]); 29 | } 30 | } 31 | 32 | sinon.extend(FakeXMLHttpRequestUpload.prototype, sinon.EventTarget); 33 | 34 | function FakeXMLHttpRequestProgressEvent( 35 | type, bubbles, cancelable, target, loaded, total, lengthComputable 36 | ) { 37 | this.initEvent(type, bubbles, cancelable, target); 38 | this.initProgressEvent(loaded || 0, total || 0, lengthComputable || false); 39 | } 40 | 41 | sinon.extend(FakeXMLHttpRequestProgressEvent.prototype, sinon.Event.prototype, { 42 | initProgressEvent: function initProgressEvent(loaded, total, lengthComputable) { 43 | this.loaded = loaded; 44 | this.total = total; 45 | this.lengthComputable = lengthComputable; 46 | } 47 | }); 48 | 49 | var originalFakeXMLHttpRequest = sinon.FakeXMLHttpRequest; 50 | 51 | function FakeXMLHttpRequestWithUpload() { 52 | sinon.extend(this, new originalFakeXMLHttpRequest()); 53 | this.upload = new FakeXMLHttpRequestUpload(); 54 | if (typeof FakeXMLHttpRequestWithUpload.onCreate == "function") { 55 | FakeXMLHttpRequestWithUpload.onCreate(this); 56 | } 57 | } 58 | 59 | sinon.extend(FakeXMLHttpRequestWithUpload.prototype, originalFakeXMLHttpRequest.prototype, { 60 | send: function send(data) { 61 | originalFakeXMLHttpRequest.prototype.send.call(this, data); 62 | this.upload.dispatchEvent( 63 | new FakeXMLHttpRequestProgressEvent("loadstart", false, false, this) 64 | ); 65 | }, 66 | /** 67 | * Report upload progress 68 | * @name sinon.FakeXMLHttpRequest.progress 69 | * @function 70 | * @param loaded 71 | * @param total 72 | * @param lengthComputable 73 | */ 74 | progress: function progress(loaded, total, lengthComputable) { 75 | this.upload.dispatchEvent( 76 | new FakeXMLHttpRequestProgressEvent( 77 | "progress", false, false, this, loaded, total, lengthComputable) 78 | ); 79 | }, 80 | respond: function respond(status, headers, body) { 81 | originalFakeXMLHttpRequest.prototype.respond.call(this, status, headers, body); 82 | this.upload.dispatchEvent( 83 | new FakeXMLHttpRequestProgressEvent("load", false, false, this) 84 | ); 85 | this.upload.dispatchEvent( 86 | new FakeXMLHttpRequestProgressEvent("loadend", false, false, this) 87 | ); 88 | } 89 | }); 90 | 91 | sinon.FakeXMLHttpRequest = FakeXMLHttpRequestWithUpload; 92 | sinon.FakeXMLHttpRequestProgressEvent = FakeXMLHttpRequestProgressEvent; 93 | sinon.FakeXMLHttpRequestWithUpload = FakeXMLHttpRequestWithUpload; 94 | sinon.originalFakeXMLHttpRequest = originalFakeXMLHttpRequest; 95 | })(); 96 | -------------------------------------------------------------------------------- /test/unit/specs/setup.js: -------------------------------------------------------------------------------- 1 | var Uploader = require('../../../src/uploader') 2 | 3 | describe('setup', function () { 4 | var uploader 5 | 6 | beforeEach(function () { 7 | uploader = new Uploader({ 8 | generateUniqueIdentifier: function (file) { 9 | return file.size 10 | } 11 | }) 12 | }) 13 | 14 | it('should be supported', function () { 15 | expect(uploader.support).toBeTruthy() 16 | }) 17 | 18 | it('files should be empty', function () { 19 | expect(uploader.files).toBeDefined() 20 | expect(uploader.files.length).toBe(0) 21 | }) 22 | 23 | it('set opts', function () { 24 | uploader = new Uploader({ 25 | chunkSize: 123 26 | }) 27 | expect(uploader.opts.chunkSize).toBe(123) 28 | expect(uploader.opts.simultaneousUploads).toBe(Uploader.defaults.simultaneousUploads) 29 | }) 30 | 31 | it('should show methods initial state', function () { 32 | expect(uploader.uploadNextChunk()).toBe(false) 33 | 34 | expect(uploader.progress()).toBe(0) 35 | expect(uploader.isUploading()).toBe(false) 36 | expect(uploader.timeRemaining()).toBe(0) 37 | expect(uploader.sizeUploaded()).toBe(0) 38 | }) 39 | 40 | it('should return total files size', function () { 41 | expect(uploader.getSize()).toBe(0) 42 | uploader.addFile(new File(['1234'], 'test')) 43 | expect(uploader.getSize()).toBe(4) 44 | uploader.addFile(new File(['123'], 'test2')) 45 | expect(uploader.getSize()).toBe(7) 46 | }) 47 | 48 | it('should find file by identifier', function () { 49 | expect(uploader.getFromUniqueIdentifier('')).toBe(false) 50 | uploader.addFile(new File(['1234'], 'test')) 51 | expect(uploader.getFromUniqueIdentifier(4)).toBe(uploader.files[0]) 52 | }) 53 | 54 | describe('assignBrowse', function () { 55 | it('assign to input', function () { 56 | var input = document.createElement('input') 57 | var addFiles = jasmine.createSpy('addFiles') 58 | uploader.addFiles = addFiles 59 | input.type = 'file' 60 | uploader.assignBrowse(input) 61 | expect(input.hasAttribute('multiple')).toBeTruthy() 62 | expect(addFiles).not.toHaveBeenCalled() 63 | var event = document.createEvent('MouseEvents') 64 | event.initEvent('change', true, true) 65 | input.dispatchEvent(event) 66 | expect(addFiles).not.toHaveBeenCalled() 67 | }) 68 | 69 | it('assign to div', function () { 70 | var div = document.createElement('div') 71 | var addFiles = jasmine.createSpy('addFiles') 72 | uploader.addFiles = addFiles 73 | uploader.assignBrowse(div) 74 | expect(div.children.length).toBe(1) 75 | var input = div.children[0] 76 | expect(addFiles).not.toHaveBeenCalled() 77 | var event = document.createEvent('MouseEvents') 78 | event.initEvent('change', true, true) 79 | input.dispatchEvent(event) 80 | expect(addFiles).not.toHaveBeenCalled() 81 | }) 82 | 83 | it('single file', function () { 84 | var input = document.createElement('input') 85 | input.type = 'file' 86 | uploader.assignBrowse(input, false, true) 87 | expect(input.hasAttribute('multiple')).toBeFalsy() 88 | }) 89 | 90 | it('directory', function () { 91 | var input = document.createElement('input') 92 | input.type = 'file' 93 | uploader.assignBrowse(input, true) 94 | expect(input.hasAttribute('webkitdirectory')).toBeTruthy() 95 | }) 96 | }) 97 | 98 | describe('assignDrop', function () { 99 | it('assign to div', function () { 100 | var div = document.createElement('div') 101 | var onDrop = jasmine.createSpy('onDrop') 102 | uploader.onDrop = onDrop 103 | uploader.assignDrop(div) 104 | var event = document.createEvent('MouseEvents') 105 | event.initEvent('drop', true, true) 106 | event.dataTransfer = {files: []} 107 | div.dispatchEvent(event) 108 | expect(onDrop).toHaveBeenCalled() 109 | expect(onDrop.calls.count()).toBe(1) 110 | 111 | uploader.unAssignDrop(div) 112 | div.dispatchEvent(event) 113 | expect(onDrop.calls.count()).toBe(1) 114 | }) 115 | }) 116 | }) 117 | -------------------------------------------------------------------------------- /test/unit/specs/webKitDataTransfer.js: -------------------------------------------------------------------------------- 1 | var Uploader = require('../../../src/uploader') 2 | 3 | describe('webKitDataTransfer', function () { 4 | var uploader 5 | 6 | beforeEach(function () { 7 | uploader = new Uploader() 8 | }) 9 | 10 | var getWebKitFile = function (filename) { 11 | return { 12 | isFile: true, 13 | isDirectory: false, 14 | fullPath: '/home/user/foo/' + filename, 15 | file: function (callback) { 16 | callback({ 17 | relativePath: '/foo/' + filename 18 | }) 19 | } 20 | } 21 | } 22 | 23 | it('should return empty array', function() { 24 | var event = { 25 | dataTransfer: { 26 | items: [ 27 | { 28 | webkitGetAsEntry: function () { 29 | return false 30 | } 31 | } 32 | ] 33 | } 34 | } 35 | spyOn(uploader, 'addFiles') 36 | uploader.webkitReadDataTransfer(event.dataTransfer, event) 37 | expect(uploader.addFiles).toHaveBeenCalledWith([], event) 38 | }) 39 | 40 | it('should return one file', function() { 41 | var event = { 42 | dataTransfer: { 43 | items: [ 44 | { 45 | webkitGetAsEntry: function () { 46 | return getWebKitFile('111.txt') 47 | }, 48 | getAsFile: function () { 49 | return { 50 | relativePath: '/foo/111.txt' 51 | } 52 | } 53 | } 54 | ] 55 | } 56 | } 57 | spyOn(uploader, 'addFiles') 58 | uploader.webkitReadDataTransfer(event.dataTransfer, event) 59 | expect(uploader.addFiles).toHaveBeenCalledWith( 60 | [{relativePath: 'home/user/foo/111.txt'}], 61 | event 62 | ) 63 | }) 64 | 65 | it('should return one file from subdirectory', function() { 66 | var event = { 67 | dataTransfer: { 68 | items: [ 69 | { 70 | webkitGetAsEntry: function () { 71 | return { 72 | isFile: false, 73 | isDirectory: true, 74 | fullPath: '/home/user/foo/', 75 | createReader: function () { 76 | var entries = [ 77 | getWebKitFile('111.txt') 78 | ] 79 | return { 80 | readEntries: function (success, error) { 81 | var entry = entries.shift() 82 | if (entry) { 83 | return success([entry]) 84 | } else { 85 | return success([]) 86 | } 87 | } 88 | } 89 | } 90 | } 91 | } 92 | } 93 | ] 94 | } 95 | } 96 | spyOn(uploader, 'addFiles') 97 | uploader.webkitReadDataTransfer(event.dataTransfer, event) 98 | expect(uploader.addFiles).toHaveBeenCalledWith( 99 | [{relativePath: 'home/user/foo/111.txt'}], 100 | event 101 | ) 102 | }) 103 | 104 | it('should return two files from subdirectory', function() { 105 | var event = { 106 | dataTransfer: { 107 | items: [ 108 | { 109 | webkitGetAsEntry: function () { 110 | return { 111 | isFile: false, 112 | isDirectory: true, 113 | fullPath: '/home/user/foo/', 114 | createReader: function () { 115 | var entries = [ 116 | getWebKitFile('111.txt'), 117 | getWebKitFile('222.txt') 118 | ] 119 | return { 120 | readEntries: function (success, error) { 121 | var entry = entries.shift() 122 | if (entry) { 123 | return success([entry]) 124 | } else { 125 | return success([]) 126 | } 127 | } 128 | } 129 | } 130 | } 131 | } 132 | } 133 | ] 134 | } 135 | } 136 | spyOn(uploader, 'addFiles') 137 | uploader.webkitReadDataTransfer(event.dataTransfer, event) 138 | expect(uploader.addFiles).toHaveBeenCalledWith( 139 | [ 140 | {relativePath: 'home/user/foo/111.txt'}, 141 | {relativePath: 'home/user/foo/222.txt'} 142 | ], 143 | event 144 | ) 145 | }) 146 | }) 147 | -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | var oproto = Object.prototype 2 | var aproto = Array.prototype 3 | var serialize = oproto.toString 4 | 5 | var isFunction = function (fn) { 6 | return serialize.call(fn) === '[object Function]' 7 | } 8 | 9 | var isArray = Array.isArray || /* istanbul ignore next */ function (ary) { 10 | return serialize.call(ary) === '[object Array]' 11 | } 12 | 13 | var isPlainObject = function (obj) { 14 | return serialize.call(obj) === '[object Object]' && Object.getPrototypeOf(obj) === oproto 15 | } 16 | 17 | var i = 0 18 | var utils = { 19 | uid: function () { 20 | return ++i 21 | }, 22 | noop: function () {}, 23 | bind: function (fn, context) { 24 | return function () { 25 | return fn.apply(context, arguments) 26 | } 27 | }, 28 | preventEvent: function (evt) { 29 | evt.preventDefault() 30 | }, 31 | stop: function (evt) { 32 | evt.preventDefault() 33 | evt.stopPropagation() 34 | }, 35 | nextTick: function (fn, context) { 36 | setTimeout(utils.bind(fn, context), 0) 37 | }, 38 | toArray: function (ary, start, end) { 39 | if (start === undefined) start = 0 40 | if (end === undefined) end = ary.length 41 | return aproto.slice.call(ary, start, end) 42 | }, 43 | 44 | isPlainObject: isPlainObject, 45 | isFunction: isFunction, 46 | isArray: isArray, 47 | isObject: function (obj) { 48 | return Object(obj) === obj 49 | }, 50 | isString: function (s) { 51 | return typeof s === 'string' 52 | }, 53 | isUndefined: function (a) { 54 | return typeof a === 'undefined' 55 | }, 56 | isDefined: function (a) { 57 | return typeof a !== 'undefined' 58 | }, 59 | 60 | each: function (ary, func, context) { 61 | if (utils.isDefined(ary.length)) { 62 | for (var i = 0, len = ary.length; i < len; i++) { 63 | if (func.call(context, ary[i], i, ary) === false) { 64 | break 65 | } 66 | } 67 | } else { 68 | for (var k in ary) { 69 | if (func.call(context, ary[k], k, ary) === false) { 70 | break 71 | } 72 | } 73 | } 74 | }, 75 | 76 | /** 77 | * If option is a function, evaluate it with given params 78 | * @param {*} data 79 | * @param {...} args arguments of a callback 80 | * @returns {*} 81 | */ 82 | evalOpts: function (data, args) { 83 | if (utils.isFunction(data)) { 84 | // `arguments` is an object, not array, in FF, so: 85 | args = utils.toArray(arguments) 86 | data = data.apply(null, args.slice(1)) 87 | } 88 | return data 89 | }, 90 | 91 | extend: function () { 92 | var options 93 | var name 94 | var src 95 | var copy 96 | var copyIsArray 97 | var clone 98 | var target = arguments[0] || {} 99 | var i = 1 100 | var length = arguments.length 101 | var force = false 102 | 103 | // 如果第一个参数为布尔,判定是否深拷贝 104 | if (typeof target === 'boolean') { 105 | force = target 106 | target = arguments[1] || {} 107 | i++ 108 | } 109 | 110 | // 确保接受方为一个复杂的数据类型 111 | if (typeof target !== 'object' && !isFunction(target)) { 112 | target = {} 113 | } 114 | 115 | // 如果只有一个参数,那么新成员添加于 extend 所在的对象上 116 | if (i === length) { 117 | target = this 118 | i-- 119 | } 120 | 121 | for (; i < length; i++) { 122 | // 只处理非空参数 123 | if ((options = arguments[i]) != null) { 124 | for (name in options) { 125 | src = target[name] 126 | copy = options[name] 127 | 128 | // 防止环引用 129 | if (target === copy) { 130 | continue 131 | } 132 | if (force && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { 133 | if (copyIsArray) { 134 | copyIsArray = false 135 | clone = src && isArray(src) ? src : [] 136 | } else { 137 | clone = src && isPlainObject(src) ? src : {} 138 | } 139 | target[name] = utils.extend(force, clone, copy) 140 | } else if (copy !== undefined) { 141 | target[name] = copy 142 | } 143 | } 144 | } 145 | } 146 | return target 147 | }, 148 | 149 | formatSize: function (size) { 150 | if (size < 1024) { 151 | return size.toFixed(0) + ' bytes' 152 | } else if (size < 1024 * 1024) { 153 | return (size / 1024.0).toFixed(0) + ' KB' 154 | } else if (size < 1024 * 1024 * 1024) { 155 | return (size / 1024.0 / 1024.0).toFixed(1) + ' MB' 156 | } else { 157 | return (size / 1024.0 / 1024.0 / 1024.0).toFixed(1) + ' GB' 158 | } 159 | }, 160 | 161 | defineNonEnumerable: function (target, key, value) { 162 | Object.defineProperty(target, key, { 163 | enumerable: false, 164 | configurable: true, 165 | writable: true, 166 | value: value 167 | }) 168 | } 169 | } 170 | 171 | module.exports = utils 172 | -------------------------------------------------------------------------------- /samples/Node.js/public/app.js: -------------------------------------------------------------------------------- 1 | (function () { 2 | var r = new Uploader({ 3 | target: '/upload', 4 | chunkSize: 1024 * 1024, 5 | testChunks: true, 6 | checkChunkUploadedByResponse: function (chunk, message) { 7 | var objMessage = {} 8 | try { 9 | objMessage = JSON.parse(message) 10 | } catch (e) {} 11 | // fake response 12 | // objMessage.uploaded_chunks = [2, 3, 4, 5, 6, 8, 10, 11, 12, 13, 17, 20, 21] 13 | // check the chunk is uploaded 14 | return (objMessage.uploaded_chunks || []).indexOf(chunk.offset + 1) >= 0 15 | } 16 | }); 17 | // simple-uploader.js isn't supported, fall back on a different method 18 | if (!r.support) { 19 | $('.uploader-error').show(); 20 | return ; 21 | } 22 | // Show a place for dropping/selecting files 23 | $('.uploader-drop').show(); 24 | r.assignDrop($('.uploader-drop')[0]); 25 | r.assignBrowse($('.uploader-browse')[0]); 26 | r.assignBrowse($('.uploader-browse-folder')[0], true); 27 | r.assignBrowse($('.uploader-browse-image')[0], false, false, {accept: 'image/*'}); 28 | 29 | // Handle file add event 30 | r.on('filesAdded', function (files, fileList) { 31 | // Show progress bar 32 | $('.uploader-progress, .uploader-list').show(); 33 | fileList.forEach(function (file) { 34 | var $self = file.$el = $( 35 | '
  • ' + 36 | 'Uploading ' + 37 | ' ' + 38 | ' ' + 39 | '' + 40 | ' ' + 41 | '' + 42 | '' + 43 | ' ' + 44 | '' + 45 | '' + 46 | ' ' + 47 | '' + 48 | '
  • ' 49 | ); 50 | $self.find('.uploader-file-name').text(file.name); 51 | $self.find('.uploader-file-size').text(file.getFormatSize()); 52 | $self.find('.uploader-file-pause').on('click', function () { 53 | file.pause(); 54 | $self.find('.uploader-file-pause').hide(); 55 | $self.find('.uploader-file-resume').show(); 56 | }); 57 | $self.find('.uploader-file-resume').on('click', function () { 58 | file.resume(); 59 | $self.find('.uploader-file-pause').show(); 60 | $self.find('.uploader-file-resume').hide(); 61 | }); 62 | $self.find('.uploader-file-cancel').on('click', function () { 63 | file.cancel(); 64 | $self.remove(); 65 | }); 66 | $('.uploader-list').append($self); 67 | }); 68 | }); 69 | r.on('filesSubmitted', function (files, fileList) { 70 | window.r.upload(); 71 | }); 72 | r.on('complete', function () { 73 | // Hide pause/resume when the upload has completed 74 | $('.uploader-progress .progress-resume-link, .uploader-progress .progress-pause-link').hide(); 75 | }); 76 | r.on('fileComplete', function (rooFile) { 77 | var $self = rooFile.$el 78 | // Reflect that the file upload has completed 79 | $self.find('.uploader-file-progress').text('(completed)'); 80 | $self.find('.uploader-file-pause, .uploader-file-resume').remove(); 81 | }); 82 | r.on('fileError', function (rootFile, file, message) { 83 | rootFile.$el.find('.uploader-file-progress').html('(file could not be uploaded: ' + message + ')') 84 | }); 85 | r.on('fileProgress', function (rootFile, file) { 86 | // Handle progress for both the file and the overall upload 87 | rootFile.$el.find('.uploader-file-progress') 88 | .html(Math.floor(rootFile.progress() * 100) + '% ' 89 | + Uploader.utils.formatSize(rootFile.averageSpeed) + '/s ' 90 | + secondsToStr(rootFile.timeRemaining()) + ' remaining') ; 91 | $('.progress-bar').css({width:Math.floor(r.progress()*100) + '%'}); 92 | }); 93 | r.on('uploadStart', function () { 94 | // Show pause, hide resume 95 | $('.uploader-progress .progress-resume-link').hide(); 96 | $('.uploader-progress .progress-pause-link').show(); 97 | }); 98 | r.on('catchAll', function () { 99 | console.log.apply(console, arguments); 100 | }); 101 | 102 | window.r = { 103 | pause: function () { 104 | r.pause(); 105 | // Show resume, hide pause 106 | $('.uploader-file-resume').show(); 107 | $('.uploader-file-pause').hide(); 108 | $('.uploader-progress .progress-resume-link').show(); 109 | $('.uploader-progress .progress-pause-link').hide(); 110 | }, 111 | cancel: function () { 112 | r.cancel(); 113 | $('.uploader-file').remove(); 114 | }, 115 | upload: function () { 116 | $('.uploader-file-pause').show(); 117 | $('.uploader-file-resume').hide(); 118 | r.resume(); 119 | }, 120 | uploader: r 121 | }; 122 | })(); 123 | 124 | function secondsToStr (temp) { 125 | function numberEnding (number) { 126 | return (number > 1) ? 's' : ''; 127 | } 128 | var years = Math.floor(temp / 31536000); 129 | if (years) { 130 | return years + ' year' + numberEnding(years); 131 | } 132 | var days = Math.floor((temp %= 31536000) / 86400); 133 | if (days) { 134 | return days + ' day' + numberEnding(days); 135 | } 136 | var hours = Math.floor((temp %= 86400) / 3600); 137 | if (hours) { 138 | return hours + ' hour' + numberEnding(hours); 139 | } 140 | var minutes = Math.floor((temp %= 3600) / 60); 141 | if (minutes) { 142 | return minutes + ' minute' + numberEnding(minutes); 143 | } 144 | var seconds = temp % 60; 145 | return seconds + ' second' + numberEnding(seconds); 146 | } 147 | -------------------------------------------------------------------------------- /gulpfile.js: -------------------------------------------------------------------------------- 1 | var pkg = require('./package.json') 2 | var gulp = require('gulp') 3 | var eslint = require('gulp-eslint') 4 | var browserify = require('gulp-browserify') 5 | var header = require('gulp-header') 6 | var pump = require('pump') 7 | var uglify = require('gulp-uglify') 8 | var concat = require('gulp-concat') 9 | var sourcemaps = require('gulp-sourcemaps') 10 | 11 | var spawn = require('child_process').spawn 12 | var _ = require('./src/utils') 13 | var Server = require('karma').Server 14 | 15 | var name = 'uploader' 16 | var NAME = name.charAt(0).toUpperCase() + name.substr(1) 17 | var fname = name + '.js' 18 | var mname = name + '.min.js' 19 | 20 | var paths = { 21 | src: 'src/', 22 | dist: 'dist/' 23 | } 24 | var allFiles = paths.src + '*.js' 25 | var banner = [ 26 | '/*!', 27 | ' * ' + NAME + ' - <%= pkg.description %>', 28 | ' * @version v<%= pkg.version %>', 29 | ' * @author <%= pkg.author %>', 30 | ' * @link <%= pkg.homepage %>', 31 | ' * @license <%= pkg.license %>', 32 | ' */', 33 | '' 34 | ].join('\n') 35 | 36 | gulp.task('eslint', function () { 37 | return gulp.src(allFiles) 38 | .pipe(eslint({ 39 | useEslintrc: true 40 | })) 41 | .pipe(eslint.format()) 42 | .pipe(eslint.failOnError()) 43 | }) 44 | 45 | gulp.task('scripts', ['eslint'], function() { 46 | return gulp.src(paths.src + fname) 47 | .pipe(browserify({ 48 | debug: false, 49 | standalone: 'Uploader', 50 | transform: ['browserify-versionify'] 51 | })) 52 | .pipe(header(banner, { 53 | pkg: pkg 54 | })) 55 | .pipe(gulp.dest(paths.dist)) 56 | }); 57 | 58 | gulp.task('build', ['scripts'], function (cb) { 59 | pump([ 60 | gulp.src(paths.dist + fname), 61 | sourcemaps.init(), 62 | uglify({ 63 | output: { 64 | comments: /^!/ 65 | } 66 | }), 67 | concat(mname), 68 | sourcemaps.write('./', { 69 | includeContent: false 70 | }), 71 | gulp.dest(paths.dist) 72 | ], cb) 73 | }) 74 | 75 | var karmaBaseConfig = { 76 | basePath: '', 77 | frameworks: ['jasmine', 'commonjs'], 78 | files: [ 79 | 'node_modules/sinon/pkg/sinon-1.7.3.js', 80 | 'test/unit/lib/fakeFile.js', 81 | 'test/unit/lib/FakeXMLHttpRequestUpload.js', 82 | 'src/**/*.js', 83 | 'test/unit/specs/**/*.js' 84 | ], 85 | // list of files to exclude 86 | exclude: [ 87 | ], 88 | preprocessors: { 89 | 'src/**/*.js': ['commonjs'], 90 | 'test/unit/specs/**/*.js': ['commonjs'] 91 | }, 92 | // web server port 93 | port: 9876, 94 | // enable / disable colors in the output (reporters and logs) 95 | colors: true, 96 | autoWatch: false, 97 | captureTimeout: 60000, 98 | singleRun: true 99 | } 100 | 101 | gulp.task('unit', function (done) { 102 | var karmaUnitConfig = _.extend({}, karmaBaseConfig, { 103 | browsers: ['Chrome', 'Firefox'], 104 | reporters: ['progress'] 105 | }) 106 | new Server(karmaUnitConfig, done).start() 107 | }) 108 | 109 | gulp.task('cover', function (done) { 110 | var karmaCoverageConfig = _.extend({}, karmaBaseConfig, { 111 | browsers: ['PhantomJS'], 112 | reporters: ['progress', 'coverage'], 113 | preprocessors: { 114 | 'src/**/*.js': ['commonjs', 'coverage'], 115 | 'test/unit/specs/**/*.js': ['commonjs'] 116 | }, 117 | coverageReporter: { 118 | reporters: [ 119 | { 120 | type: 'lcov', 121 | subdir: '.' 122 | }, 123 | { 124 | type: 'text-summary', 125 | subdir: '.' 126 | } 127 | ] 128 | } 129 | }) 130 | new Server(karmaCoverageConfig, done).start() 131 | }) 132 | 133 | gulp.task('sauce', function (done) { 134 | var customLaunchers = { 135 | sl_chrome: { 136 | base: 'SauceLabs', 137 | browserName: 'chrome', 138 | platform: 'Windows 7' 139 | }, 140 | sl_firefox: { 141 | base: 'SauceLabs', 142 | browserName: 'firefox', 143 | platform: 'Windows 7' 144 | }, 145 | sl_mac_safari: { 146 | base: 'SauceLabs', 147 | browserName: 'safari', 148 | platform: 'OS X 10.10' 149 | }, 150 | 151 | sl_ie_10: { 152 | base: 'SauceLabs', 153 | browserName: 'internet explorer', 154 | platform: 'Windows 7', 155 | version: '10' 156 | }, 157 | sl_ie_11: { 158 | base: 'SauceLabs', 159 | browserName: 'internet explorer', 160 | platform: 'Windows 8.1', 161 | version: '11' 162 | }, 163 | sl_edge: { 164 | base: 'SauceLabs', 165 | browserName: 'MicrosoftEdge', 166 | platform: 'Windows 10' 167 | }, 168 | 169 | sl_ios_safari_10_3: { 170 | base: 'SauceLabs', 171 | browserName: 'iphone', 172 | version: '10.3' 173 | }, 174 | sl_android_6_0: { 175 | base: 'SauceLabs', 176 | browserName: 'android', 177 | version: '6.0' 178 | } 179 | } 180 | new Server(_.extend({}, karmaBaseConfig, { 181 | sauceLabs: { 182 | testName: 'uploader unit tests', 183 | recordScreenshots: false, 184 | build: process.env.TRAVIS_BUILD_NUMBER || process.env.SAUCE_BUILD_ID || Date.now(), 185 | username: 'uploader', 186 | accessKey: 'e2dd6126-05c4-422e-9cfb-4c4e9735e2ab' 187 | }, 188 | // mobile。。。 slow 189 | captureTimeout: 300000, 190 | browserNoActivityTimeout: 300000, 191 | browsers: Object.keys(customLaunchers), 192 | customLaunchers: customLaunchers, 193 | reporters: ['progress', 'saucelabs'] 194 | }), done).start() 195 | }) 196 | 197 | gulp.task('test', ['unit', 'cover']) 198 | 199 | gulp.task('watch', function () { 200 | gulp.watch(allFiles, ['scripts']) 201 | }) 202 | 203 | gulp.task('default', ['build', 'test']) 204 | 205 | var git = require('gulp-git') 206 | var tag_version = require('gulp-tag-version') 207 | gulp.task('git', ['build'], function (done) { 208 | var v = require('./package.json').version 209 | gulp.src('./') 210 | .pipe(git.add({args: '-A'})) 211 | .pipe(git.commit('[release] ' + v)) 212 | .pipe(tag_version({version: v})) 213 | .on('end', function () { 214 | git.push('origin', 'master', {args: '--tags'}) 215 | done() 216 | }) 217 | }) 218 | 219 | gulp.task('npm-publish', ['git'], function (done) { 220 | spawn('npm', ['publish'], {stdio: 'inherit'}).on('close', done) 221 | }) 222 | 223 | gulp.task('release', ['npm-publish']) 224 | 225 | // console.log(typeof process.env.TRAVIS_PULL_REQUEST) 226 | // string 227 | gulp.task('ci', ['eslint', 'cover', 'sauce']) 228 | -------------------------------------------------------------------------------- /test/unit/specs/utils.js: -------------------------------------------------------------------------------- 1 | var _ = require('../../../src/utils') 2 | 3 | describe('utils', function () { 4 | 5 | it('noop', function () { 6 | expect(_.noop).toBeDefined() 7 | expect(_.noop()).toBeUndefined() 8 | }) 9 | 10 | it('bind', function () { 11 | var fn = _.bind(function () { 12 | return this.a 13 | }, {a: 'a'}) 14 | expect(fn()).toBe('a') 15 | }) 16 | 17 | it('preventEvent', function () { 18 | var v = 1 19 | _.preventEvent({ 20 | preventDefault: function () { 21 | v = 2 22 | } 23 | }) 24 | expect(v).toBe(2) 25 | }) 26 | 27 | it('stop', function () { 28 | var v = 1 29 | var v2 = 1 30 | _.stop({ 31 | preventDefault: function () { 32 | v = 2 33 | }, 34 | stopPropagation: function () { 35 | v2 = 2 36 | } 37 | }) 38 | expect(v).toBe(2) 39 | expect(v2).toBe(2) 40 | }) 41 | 42 | it('nextTick', function (done) { 43 | var ct = { 44 | a: 'a' 45 | } 46 | _.nextTick(function () { 47 | this.a = 'b' 48 | }, ct) 49 | expect(ct.a).toBe('a') 50 | setTimeout(function () { 51 | expect(ct.a).toBe('b') 52 | done() 53 | }, 10) 54 | }) 55 | 56 | it('toArray', function () { 57 | var r = _.toArray({ 58 | 0: 0, 59 | 1: 1, 60 | length: 2 61 | }, 0, 1) 62 | expect(r.length).toBe(1) 63 | r.push(2) 64 | expect(r.length).toBe(2) 65 | expect(r[1]).toBe(2) 66 | }) 67 | 68 | it('isPlainObject', function () { 69 | expect(_.isPlainObject({})).toBe(true) 70 | expect(_.isPlainObject([])).toBe(false) 71 | expect(_.isPlainObject(null)).toBe(false) 72 | expect(_.isPlainObject(null)).toBeFalsy() 73 | expect(_.isPlainObject(123)).toBeFalsy() 74 | expect(_.isPlainObject(true)).toBeFalsy() 75 | expect(_.isPlainObject('uploader')).toBeFalsy() 76 | expect(_.isPlainObject(undefined)).toBeFalsy() 77 | expect(_.isPlainObject(function () {})).toBe(false) 78 | if (typeof window !== 'undefined') { 79 | expect(_.isPlainObject(window)).toBe(false) 80 | } 81 | }) 82 | 83 | it('isFunction', function () { 84 | expect(_.isFunction({})).toBe(false) 85 | expect(_.isFunction([])).toBe(false) 86 | expect(_.isFunction(null)).toBe(false) 87 | expect(_.isFunction(null)).toBeFalsy() 88 | expect(_.isFunction(123)).toBeFalsy() 89 | expect(_.isFunction(true)).toBeFalsy() 90 | expect(_.isFunction('uploader')).toBeFalsy() 91 | expect(_.isFunction(undefined)).toBeFalsy() 92 | expect(_.isFunction(function () {})).toBe(true) 93 | }) 94 | 95 | it('isArray', function () { 96 | expect(_.isArray({})).toBe(false) 97 | expect(_.isArray([])).toBe(true) 98 | expect(_.isArray(null)).toBe(false) 99 | expect(_.isArray(null)).toBeFalsy() 100 | expect(_.isArray(123)).toBeFalsy() 101 | expect(_.isArray(true)).toBeFalsy() 102 | expect(_.isArray('uploader')).toBeFalsy() 103 | expect(_.isArray(undefined)).toBeFalsy() 104 | expect(_.isArray(function () {})).toBe(false) 105 | }) 106 | 107 | it('isObject', function () { 108 | expect(_.isObject({})).toBe(true) 109 | expect(_.isObject([])).toBe(true) 110 | expect(_.isObject(null)).toBeFalsy() 111 | expect(_.isObject(123)).toBeFalsy() 112 | expect(_.isObject(true)).toBeFalsy() 113 | expect(_.isObject('uploader')).toBeFalsy() 114 | expect(_.isObject(undefined)).toBeFalsy() 115 | expect(_.isObject(function () {})).toBe(true) 116 | }) 117 | 118 | it('isString', function () { 119 | expect(_.isString({})).toBe(false) 120 | expect(_.isString([])).toBe(false) 121 | expect(_.isString(null)).toBeFalsy() 122 | expect(_.isString(123)).toBeFalsy() 123 | expect(_.isString(true)).toBeFalsy() 124 | expect(_.isString('uploader')).toBe(true) 125 | expect(_.isString(undefined)).toBeFalsy() 126 | expect(_.isString(function () {})).toBe(false) 127 | }) 128 | 129 | it('isUndefined', function () { 130 | expect(_.isUndefined({})).toBe(false) 131 | expect(_.isUndefined([])).toBe(false) 132 | expect(_.isUndefined(null)).toBeFalsy() 133 | expect(_.isUndefined(123)).toBeFalsy() 134 | expect(_.isUndefined(true)).toBeFalsy() 135 | expect(_.isUndefined('uploader')).toBeFalsy() 136 | expect(_.isUndefined(undefined)).toBe(true) 137 | expect(_.isUndefined(function () {})).toBe(false) 138 | }) 139 | 140 | it('isDefined', function () { 141 | expect(_.isDefined({})).toBe(true) 142 | expect(_.isDefined([])).toBe(true) 143 | expect(_.isDefined(null)).toBe(true) 144 | expect(_.isDefined(123)).toBe(true) 145 | expect(_.isDefined(true)).toBe(true) 146 | expect(_.isDefined('uploader')).toBe(true) 147 | expect(_.isDefined(undefined)).toBeFalsy() 148 | expect(_.isDefined(function () {})).toBe(true) 149 | }) 150 | 151 | it('each', function () { 152 | var a = [1, 2] 153 | var r = 0 154 | _.each(a, function (v) { 155 | r++ 156 | return false 157 | }) 158 | expect(r).toBe(1) 159 | r = 0 160 | _.each(a, function (v) { 161 | r++ 162 | }) 163 | expect(r).toBe(2) 164 | a = {a: 1, b: 2} 165 | r = 0 166 | _.each(a, function (v) { 167 | r++ 168 | return false 169 | }) 170 | expect(r).toBe(1) 171 | r = 0 172 | _.each(a, function (v) { 173 | r++ 174 | }) 175 | expect(r).toBe(2) 176 | }) 177 | 178 | it('evalOpts', function () { 179 | var o = {} 180 | expect(_.evalOpts(o)).toBe(o) 181 | expect(_.evalOpts(5)).toBe(5) 182 | expect(_.evalOpts(function () { 183 | return 5 184 | })).toBe(5) 185 | expect(_.evalOpts(function (a) { 186 | return a 187 | }, o)).toBe(o) 188 | }) 189 | 190 | it('extend', function () { 191 | var from = {a: 1, b: 2} 192 | var to = {} 193 | var res = _.extend(to, from) 194 | expect(to.a).toBe(from.a) 195 | expect(to.b).toBe(from.b) 196 | expect(res).toBe(to) 197 | 198 | from = { 199 | a: 1, 200 | b: { 201 | c: 2 202 | } 203 | } 204 | to = {} 205 | res = _.extend(true, to, from) 206 | expect(to.a).toBe(from.a) 207 | expect(to.b).not.toBe(from.b) 208 | expect(to.b.c).toBe(from.b.c) 209 | expect(res).toBe(to) 210 | 211 | // some check cases 212 | _.extend('str', from) 213 | _.extend('', from) 214 | _.extend(true) 215 | _.extend({ 216 | x: 'x' 217 | }) 218 | expect(_.x).toBe('x') 219 | }) 220 | 221 | it('formatSize', function () { 222 | expect(_.formatSize(0)).toBe('0 bytes') 223 | expect(_.formatSize(2.2 * 1024)).toBe('2 KB') 224 | expect(_.formatSize(2.14 * 1024 * 1024)).toBe('2.1 MB') 225 | expect(_.formatSize(5.14 * 1024 * 1024 * 1024)).toBe('5.1 GB') 226 | }) 227 | }) 228 | -------------------------------------------------------------------------------- /samples/Node.js/uploader-node.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | path = require('path'), 3 | util = require('util'), 4 | Stream = require('stream').Stream; 5 | 6 | module.exports = flow = function(temporaryFolder) { 7 | var $ = this; 8 | $.temporaryFolder = temporaryFolder; 9 | $.maxFileSize = null; 10 | $.fileParameterName = 'file'; 11 | 12 | try { 13 | fs.mkdirSync($.temporaryFolder); 14 | } catch (e) {} 15 | 16 | function cleanIdentifier(identifier) { 17 | return identifier.replace(/[^0-9A-Za-z_-]/g, ''); 18 | } 19 | 20 | function getChunkFilename(chunkNumber, identifier) { 21 | // Clean up the identifier 22 | identifier = cleanIdentifier(identifier); 23 | // What would the file name be? 24 | return path.resolve($.temporaryFolder, './uploader-' + identifier + '.' + chunkNumber); 25 | } 26 | 27 | function validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, fileSize) { 28 | // Clean up the identifier 29 | identifier = cleanIdentifier(identifier); 30 | 31 | // Check if the request is sane 32 | if (chunkNumber == 0 || chunkSize == 0 || totalSize == 0 || identifier.length == 0 || filename.length == 0) { 33 | return 'non_uploader_request'; 34 | } 35 | var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1); 36 | if (chunkNumber > numberOfChunks) { 37 | return 'invalid_uploader_request1'; 38 | } 39 | 40 | // Is the file too big? 41 | if ($.maxFileSize && totalSize > $.maxFileSize) { 42 | return 'invalid_uploader_request2'; 43 | } 44 | 45 | if (typeof(fileSize) != 'undefined') { 46 | if (chunkNumber < numberOfChunks && fileSize != chunkSize) { 47 | // The chunk in the POST request isn't the correct size 48 | return 'invalid_uploader_request3'; 49 | } 50 | if (numberOfChunks > 1 && chunkNumber == numberOfChunks && fileSize != ((totalSize % chunkSize) + parseInt(chunkSize))) { 51 | // The chunks in the POST is the last one, and the fil is not the correct size 52 | return 'invalid_uploader_request4'; 53 | } 54 | if (numberOfChunks == 1 && fileSize != totalSize) { 55 | // The file is only a single chunk, and the data size does not fit 56 | return 'invalid_uploader_request5'; 57 | } 58 | } 59 | 60 | return 'valid'; 61 | } 62 | 63 | //'found', filename, original_filename, identifier 64 | //'not_found', null, null, null 65 | $.get = function(req, callback) { 66 | var chunkNumber = req.param('chunkNumber', 0); 67 | var chunkSize = req.param('chunkSize', 0); 68 | var totalSize = req.param('totalSize', 0); 69 | var identifier = req.param('identifier', ""); 70 | var filename = req.param('filename', ""); 71 | 72 | if (validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename) == 'valid') { 73 | var chunkFilename = getChunkFilename(chunkNumber, identifier); 74 | fs.exists(chunkFilename, function(exists) { 75 | if (exists) { 76 | callback('found', chunkFilename, filename, identifier); 77 | } else { 78 | callback('not_found', null, null, null); 79 | } 80 | }); 81 | } else { 82 | callback('not_found', null, null, null); 83 | } 84 | }; 85 | 86 | //'partly_done', filename, original_filename, identifier 87 | //'done', filename, original_filename, identifier 88 | //'invalid_uploader_request', null, null, null 89 | //'non_uploader_request', null, null, null 90 | $.post = function(req, callback) { 91 | 92 | var fields = req.body; 93 | var files = req.files; 94 | 95 | var chunkNumber = fields['chunkNumber']; 96 | var chunkSize = fields['chunkSize']; 97 | var totalSize = fields['totalSize']; 98 | var identifier = cleanIdentifier(fields['identifier']); 99 | var filename = fields['filename']; 100 | 101 | if (!files[$.fileParameterName] || !files[$.fileParameterName].size) { 102 | callback('invalid_uploader_request', null, null, null); 103 | return; 104 | } 105 | 106 | var original_filename = files[$.fileParameterName]['originalFilename']; 107 | var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, files[$.fileParameterName].size); 108 | if (validation == 'valid') { 109 | var chunkFilename = getChunkFilename(chunkNumber, identifier); 110 | 111 | // Save the chunk (TODO: OVERWRITE) 112 | fs.rename(files[$.fileParameterName].path, chunkFilename, function() { 113 | 114 | // Do we have all the chunks? 115 | var currentTestChunk = 1; 116 | var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1); 117 | var testChunkExists = function() { 118 | fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists) { 119 | if (exists) { 120 | currentTestChunk++; 121 | if (currentTestChunk > numberOfChunks) { 122 | callback('done', filename, original_filename, identifier); 123 | } else { 124 | // Recursion 125 | testChunkExists(); 126 | } 127 | } else { 128 | callback('partly_done', filename, original_filename, identifier); 129 | } 130 | }); 131 | }; 132 | testChunkExists(); 133 | }); 134 | } else { 135 | callback(validation, filename, original_filename, identifier); 136 | } 137 | }; 138 | 139 | // Pipe chunks directly in to an existsing WritableStream 140 | // r.write(identifier, response); 141 | // r.write(identifier, response, {end:false}); 142 | // 143 | // var stream = fs.createWriteStream(filename); 144 | // r.write(identifier, stream); 145 | // stream.on('data', function(data){...}); 146 | // stream.on('finish', function(){...}); 147 | $.write = function(identifier, writableStream, options) { 148 | options = options || {}; 149 | options.end = (typeof options['end'] == 'undefined' ? true : options['end']); 150 | 151 | // Iterate over each chunk 152 | var pipeChunk = function(number) { 153 | 154 | var chunkFilename = getChunkFilename(number, identifier); 155 | fs.exists(chunkFilename, function(exists) { 156 | 157 | if (exists) { 158 | // If the chunk with the current number exists, 159 | // then create a ReadStream from the file 160 | // and pipe it to the specified writableStream. 161 | var sourceStream = fs.createReadStream(chunkFilename); 162 | sourceStream.pipe(writableStream, { 163 | end: false 164 | }); 165 | sourceStream.on('end', function() { 166 | // When the chunk is fully streamed, 167 | // jump to the next one 168 | pipeChunk(number + 1); 169 | }); 170 | } else { 171 | // When all the chunks have been piped, end the stream 172 | if (options.end) writableStream.end(); 173 | if (options.onDone) options.onDone(); 174 | } 175 | }); 176 | }; 177 | pipeChunk(1); 178 | }; 179 | 180 | $.clean = function(identifier, options) { 181 | options = options || {}; 182 | 183 | // Iterate over each chunk 184 | var pipeChunkRm = function(number) { 185 | 186 | var chunkFilename = getChunkFilename(number, identifier); 187 | 188 | //console.log('removing pipeChunkRm ', number, 'chunkFilename', chunkFilename); 189 | fs.exists(chunkFilename, function(exists) { 190 | if (exists) { 191 | 192 | console.log('exist removing ', chunkFilename); 193 | fs.unlink(chunkFilename, function(err) { 194 | if (err && options.onError) options.onError(err); 195 | }); 196 | 197 | pipeChunkRm(number + 1); 198 | 199 | } else { 200 | 201 | if (options.onDone) options.onDone(); 202 | 203 | } 204 | }); 205 | }; 206 | pipeChunkRm(1); 207 | }; 208 | 209 | return $; 210 | }; 211 | -------------------------------------------------------------------------------- /src/chunk.js: -------------------------------------------------------------------------------- 1 | var utils = require('./utils') 2 | 3 | function Chunk (uploader, file, offset) { 4 | utils.defineNonEnumerable(this, 'uploader', uploader) 5 | utils.defineNonEnumerable(this, 'file', file) 6 | utils.defineNonEnumerable(this, 'bytes', null) 7 | this.offset = offset 8 | this.tested = false 9 | this.retries = 0 10 | this.pendingRetry = false 11 | this.preprocessState = 0 12 | this.readState = 0 13 | this.loaded = 0 14 | this.total = 0 15 | this.chunkSize = utils.evalOpts(uploader.opts.chunkSize, file, this) 16 | this.startByte = this.offset * this.chunkSize 17 | this.endByte = this.computeEndByte() 18 | this.xhr = null 19 | } 20 | 21 | var STATUS = Chunk.STATUS = { 22 | PENDING: 'pending', 23 | UPLOADING: 'uploading', 24 | READING: 'reading', 25 | SUCCESS: 'success', 26 | ERROR: 'error', 27 | COMPLETE: 'complete', 28 | PROGRESS: 'progress', 29 | RETRY: 'retry' 30 | } 31 | 32 | utils.extend(Chunk.prototype, { 33 | 34 | _event: function (evt, args) { 35 | args = utils.toArray(arguments) 36 | args.unshift(this) 37 | this.file._chunkEvent.apply(this.file, args) 38 | }, 39 | 40 | computeEndByte: function () { 41 | var endByte = Math.min(this.file.size, (this.offset + 1) * this.chunkSize) 42 | if (this.file.size - endByte < this.chunkSize && !this.uploader.opts.forceChunkSize) { 43 | // The last chunk will be bigger than the chunk size, 44 | // but less than 2 * this.chunkSize 45 | endByte = this.file.size 46 | } 47 | return endByte 48 | }, 49 | 50 | getParams: function () { 51 | return { 52 | chunkNumber: this.offset + 1, 53 | chunkSize: this.chunkSize, 54 | currentChunkSize: this.endByte - this.startByte, 55 | totalSize: this.file.size, 56 | identifier: this.file.uniqueIdentifier, 57 | filename: this.file.name, 58 | relativePath: this.file.relativePath, 59 | totalChunks: this.file.chunks.length 60 | } 61 | }, 62 | 63 | getTarget: function (target, params) { 64 | if (!params.length) { 65 | return target 66 | } 67 | if (target.indexOf('?') < 0) { 68 | target += '?' 69 | } else { 70 | target += '&' 71 | } 72 | return target + params.join('&') 73 | }, 74 | 75 | test: function () { 76 | this.xhr = new XMLHttpRequest() 77 | this.xhr.addEventListener('load', testHandler, false) 78 | this.xhr.addEventListener('error', testHandler, false) 79 | var testMethod = utils.evalOpts(this.uploader.opts.testMethod, this.file, this) 80 | var data = this.prepareXhrRequest(testMethod, true) 81 | this.xhr.send(data) 82 | 83 | var $ = this 84 | function testHandler (event) { 85 | var status = $.status(true) 86 | if (status === STATUS.ERROR) { 87 | $._event(status, $.message()) 88 | $.uploader.uploadNextChunk() 89 | } else if (status === STATUS.SUCCESS) { 90 | $._event(status, $.message()) 91 | $.tested = true 92 | } else if (!$.file.paused) { 93 | // Error might be caused by file pause method 94 | // Chunks does not exist on the server side 95 | $.tested = true 96 | $.send() 97 | } 98 | } 99 | }, 100 | 101 | preprocessFinished: function () { 102 | // Compute the endByte after the preprocess function to allow an 103 | // implementer of preprocess to set the fileObj size 104 | this.endByte = this.computeEndByte() 105 | this.preprocessState = 2 106 | this.send() 107 | }, 108 | 109 | readFinished: function (bytes) { 110 | this.readState = 2 111 | this.bytes = bytes 112 | this.send() 113 | }, 114 | 115 | send: function () { 116 | var preprocess = this.uploader.opts.preprocess 117 | var read = this.uploader.opts.readFileFn 118 | if (utils.isFunction(preprocess)) { 119 | switch (this.preprocessState) { 120 | case 0: 121 | this.preprocessState = 1 122 | preprocess(this) 123 | return 124 | case 1: 125 | return 126 | } 127 | } 128 | switch (this.readState) { 129 | case 0: 130 | this.readState = 1 131 | read(this.file, this.file.fileType, this.startByte, this.endByte, this) 132 | return 133 | case 1: 134 | return 135 | } 136 | if (this.uploader.opts.testChunks && !this.tested) { 137 | this.test() 138 | return 139 | } 140 | 141 | this.loaded = 0 142 | this.total = 0 143 | this.pendingRetry = false 144 | 145 | // Set up request and listen for event 146 | this.xhr = new XMLHttpRequest() 147 | this.xhr.upload.addEventListener('progress', progressHandler, false) 148 | this.xhr.addEventListener('load', doneHandler, false) 149 | this.xhr.addEventListener('error', doneHandler, false) 150 | 151 | var uploadMethod = utils.evalOpts(this.uploader.opts.uploadMethod, this.file, this) 152 | var data = this.prepareXhrRequest(uploadMethod, false, this.uploader.opts.method, this.bytes) 153 | this.xhr.send(data) 154 | 155 | var $ = this 156 | function progressHandler (event) { 157 | if (event.lengthComputable) { 158 | $.loaded = event.loaded 159 | $.total = event.total 160 | } 161 | $._event(STATUS.PROGRESS, event) 162 | } 163 | 164 | function doneHandler (event) { 165 | var msg = $.message() 166 | $.processingResponse = true 167 | $.uploader.opts.processResponse(msg, function (err, res) { 168 | $.processingResponse = false 169 | if (!$.xhr) { 170 | return 171 | } 172 | $.processedState = { 173 | err: err, 174 | res: res 175 | } 176 | var status = $.status() 177 | if (status === STATUS.SUCCESS || status === STATUS.ERROR) { 178 | // delete this.data 179 | $._event(status, res) 180 | status === STATUS.ERROR && $.uploader.uploadNextChunk() 181 | } else { 182 | $._event(STATUS.RETRY, res) 183 | $.pendingRetry = true 184 | $.abort() 185 | $.retries++ 186 | var retryInterval = $.uploader.opts.chunkRetryInterval 187 | if (retryInterval !== null) { 188 | setTimeout(function () { 189 | $.send() 190 | }, retryInterval) 191 | } else { 192 | $.send() 193 | } 194 | } 195 | }, $.file, $) 196 | } 197 | }, 198 | 199 | abort: function () { 200 | var xhr = this.xhr 201 | this.xhr = null 202 | this.processingResponse = false 203 | this.processedState = null 204 | if (xhr) { 205 | xhr.abort() 206 | } 207 | }, 208 | 209 | status: function (isTest) { 210 | if (this.readState === 1) { 211 | return STATUS.READING 212 | } else if (this.pendingRetry || this.preprocessState === 1) { 213 | // if pending retry then that's effectively the same as actively uploading, 214 | // there might just be a slight delay before the retry starts 215 | return STATUS.UPLOADING 216 | } else if (!this.xhr) { 217 | return STATUS.PENDING 218 | } else if (this.xhr.readyState < 4 || this.processingResponse) { 219 | // Status is really 'OPENED', 'HEADERS_RECEIVED' 220 | // or 'LOADING' - meaning that stuff is happening 221 | return STATUS.UPLOADING 222 | } else { 223 | var _status 224 | if (this.uploader.opts.successStatuses.indexOf(this.xhr.status) > -1) { 225 | // HTTP 200, perfect 226 | // HTTP 202 Accepted - The request has been accepted for processing, but the processing has not been completed. 227 | _status = STATUS.SUCCESS 228 | } else if (this.uploader.opts.permanentErrors.indexOf(this.xhr.status) > -1 || 229 | !isTest && this.retries >= this.uploader.opts.maxChunkRetries) { 230 | // HTTP 415/500/501, permanent error 231 | _status = STATUS.ERROR 232 | } else { 233 | // this should never happen, but we'll reset and queue a retry 234 | // a likely case for this would be 503 service unavailable 235 | this.abort() 236 | _status = STATUS.PENDING 237 | } 238 | var processedState = this.processedState 239 | if (processedState && processedState.err) { 240 | _status = STATUS.ERROR 241 | } 242 | return _status 243 | } 244 | }, 245 | 246 | message: function () { 247 | return this.xhr ? this.xhr.responseText : '' 248 | }, 249 | 250 | progress: function () { 251 | if (this.pendingRetry) { 252 | return 0 253 | } 254 | var s = this.status() 255 | if (s === STATUS.SUCCESS || s === STATUS.ERROR) { 256 | return 1 257 | } else if (s === STATUS.PENDING) { 258 | return 0 259 | } else { 260 | return this.total > 0 ? this.loaded / this.total : 0 261 | } 262 | }, 263 | 264 | sizeUploaded: function () { 265 | var size = this.endByte - this.startByte 266 | // can't return only chunk.loaded value, because it is bigger than chunk size 267 | if (this.status() !== STATUS.SUCCESS) { 268 | size = this.progress() * size 269 | } 270 | return size 271 | }, 272 | 273 | prepareXhrRequest: function (method, isTest, paramsMethod, blob) { 274 | // Add data from the query options 275 | var query = utils.evalOpts(this.uploader.opts.query, this.file, this, isTest) 276 | query = utils.extend(this.getParams(), query) 277 | 278 | // processParams 279 | query = this.uploader.opts.processParams(query, this.file, this, isTest) 280 | 281 | var target = utils.evalOpts(this.uploader.opts.target, this.file, this, isTest) 282 | var data = null 283 | if (method === 'GET' || paramsMethod === 'octet') { 284 | // Add data from the query options 285 | var params = [] 286 | utils.each(query, function (v, k) { 287 | params.push([encodeURIComponent(k), encodeURIComponent(v)].join('=')) 288 | }) 289 | target = this.getTarget(target, params) 290 | data = blob || null 291 | } else { 292 | // Add data from the query options 293 | data = new FormData() 294 | utils.each(query, function (v, k) { 295 | data.append(k, v) 296 | }) 297 | if (typeof blob !== 'undefined') { 298 | data.append(this.uploader.opts.fileParameterName, blob, this.file.name) 299 | } 300 | } 301 | 302 | this.xhr.open(method, target, true) 303 | this.xhr.withCredentials = this.uploader.opts.withCredentials 304 | 305 | // Add data from header options 306 | utils.each(utils.evalOpts(this.uploader.opts.headers, this.file, this, isTest), function (v, k) { 307 | this.xhr.setRequestHeader(k, v) 308 | }, this) 309 | 310 | return data 311 | } 312 | 313 | }) 314 | 315 | module.exports = Chunk 316 | -------------------------------------------------------------------------------- /README_zh-CN.md: -------------------------------------------------------------------------------- 1 | # simple-uploader.js [![Build Status](https://travis-ci.org/simple-uploader/Uploader.svg?branch=master)](https://travis-ci.org/simple-uploader/Uploader?branch=master) [![codecov.io](http://codecov.io/github/simple-uploader/Uploader/coverage.svg?branch=master)](http://codecov.io/github/simple-uploader/Uploader?branch=master) [![Build Status](https://saucelabs.com/buildstatus/uploader)](https://saucelabs.com/u/uploader) 2 | 3 | [![Sauce Test Status](https://saucelabs.com/browser-matrix/uploader.svg)](https://saucelabs.com/u/uploader) 4 | 5 | ![QQ](https://github.com/simple-uploader/Uploader/blob/develop/assets/simple-uploader-QQ-3.png?raw=true) 6 | 7 | simple-uploader.js(也称 Uploader) 是一个上传库,支持多并发上传,文件夹、拖拽、可暂停继续、秒传、分块上传、出错自动重传、手工重传、进度、剩余时间、上传速度等特性;该上传库依赖 HTML5 File API。 8 | 9 | Fork [flow.js](https://github.com/flowjs/flow.js),但是进行了重构。 10 | 11 | 由于是分块上传,所以依赖文件的分块 API,所以受限于此浏览器支持程度为:Firefox 4+, Chrome 11+, Safari 6+ and Internet Explorer 10+。 12 | 13 | 默认提供了一个 Node.js 的示例,放在 `samples/` 目录下。 14 | 15 | ## 相比 flow.js 的新特性 16 | 17 | * 统一把文件和文件夹对待为 `Uploader.File`,统一管理 18 | 19 | * `Uploader` 本身其实就是一个根文件夹 20 | 21 | * 新增 `fileList` 属性,用来存文件和文件夹合集,只包含根下的文件和文件夹。 22 | 23 | ## 安装 24 | 25 | 从 https://github.com/simple-uploader/Uploader/releases/ 下载最新的发布版本,里边的 `dist/` 文件夹下包含了打包后文件。 26 | 27 | 也可使用 npm: 28 | 29 | ```console 30 | npm install simple-uploader.js 31 | ``` 32 | 33 | 或者直接 git clone: 34 | 35 | ```console 36 | git clone https://github.com/simple-uploader/Uploader 37 | ``` 38 | 39 | ## 使用 40 | 41 | 创建一个 `Uploader` 实例: 42 | 43 | ```javascript 44 | var uploader = new Uploader({ 45 | target: '/api/photo/redeem-upload-token', 46 | query: { upload_token: 'my_token' } 47 | }) 48 | // 如果不支持 需要降级的地方 49 | if (!uploader.support) location.href = '/some-old-crappy-uploader' 50 | ``` 51 | 52 | 如果想要选择文件或者拖拽文件的话,你可以通过如下两个 API 来指定哪些 DOM 节点: 53 | 54 | ```javascript 55 | uploader.assignBrowse(document.getElementById('browseButton')) 56 | uploader.assignDrop(document.getElementById('dropTarget')) 57 | ``` 58 | 59 | 实例化后你还可以选择监听一些事件: 60 | 61 | ```javascript 62 | // 文件添加 单个文件 63 | uploader.on('fileAdded', function (file, event) { 64 | console.log(file, event) 65 | }) 66 | // 单个文件上传成功 67 | uploader.on('fileSuccess', function (rootFile, file, message) { 68 | console.log(rootFile, file, message) 69 | }) 70 | // 根下的单个文件(文件夹)上传完成 71 | uploader.on('fileComplete', function (rootFile) { 72 | console.log(rootFile) 73 | }) 74 | // 某个文件上传失败了 75 | uploader.on('fileError', function (rootFile, file, message) { 76 | console.log(rootFile, file, message) 77 | }) 78 | ``` 79 | 80 | ## 服务端如何接受呢? 81 | 82 | 因为在前端做了一些分块啊等处理,所以也需要服务端做一些特殊处理,这个可以参考 `samples/Node.js/` 实现。 83 | 84 | 每一个上传块都会包含如下分块信息: 85 | 86 | * `chunkNumber`: 当前块的次序,第一个块是 1,注意不是从 0 开始的。 87 | * `totalChunks`: 文件被分成块的总数。 88 | * `chunkSize`: 分块大小,根据 `totalSize` 和这个值你就可以计算出总共的块数。注意最后一块的大小可能会比这个要大。 89 | * `currentChunkSize`: 当前块的大小,实际大小。 90 | * `totalSize`: 文件总大小。 91 | * `identifier`: 这个就是每个文件的唯一标示。 92 | * `filename`: 文件名。 93 | * `relativePath`: 文件夹上传的时候文件的相对路径属性。 94 | 95 | 一个分块可以被上传多次,当然这肯定不是标准行为,但是在实际上传过程中是可能发生这种事情的,这种重传也是本库的特性之一。 96 | 97 | 对于每个请求的响应码你都可以根据 `successStatuses`和`permanentErrors` 配置项是否是认为成功或失败的: 98 | 99 | * `200`, `201`, `202`: 当前块上传成功,不需要重传。 100 | * `404`, `415`. `500`, `501`: 当前块上传失败,会取消整个文件上传。 101 | * _其他状态码_: 出错了,但是会自动重试上传。 102 | 103 | ## 处理 GET (或者 `test()` 请求) 104 | 105 | 如果说 `testChunks` 配置项是 `true` 的话,就可以实现秒传、或者刷新页面后、或者重启浏览器、甚至是跨浏览器还可以继续上传的效果,所有的上传必备的参数数据都会被一并发出: 106 | 107 | * 如果请求返回了 `successStatuses` 配置的状态码,那么假定此块已经上传成功了。 108 | * 如果返回的是 `permanentErrors` 中的状态码,那么就认为此块上传失败。 109 | * 如果是其他状态吗,那么就认为服务端还没有这个块,需要按照标准模式上传。 110 | 111 | 所以有了以上的支持,服务端就可以根据预先发的这个请求来决定是否需要上传这个块内容,所以也就实现了秒传或者跨浏览器上传特性。 112 | 113 | ## API 文档 114 | 115 | ### Uploader 116 | 117 | #### 配置 118 | 119 | 实例化的时候可以传入配置项: 120 | 121 | ```javascript 122 | var r = new Uploader({ opt1: 'val', ...}) 123 | ``` 124 | 125 | 支持的配置项: 126 | 127 | * `target` 目标上传 URL,可以是字符串也可以是函数,如果是函数的话,则会传入 `Uploader.File` 实例、当前块 `Uploader.Chunk` 以及是否是测试模式,默认值为 `'/'`。 128 | * `singleFile` 单文件上传。覆盖式,如果选择了多个会把之前的取消掉。默认 `false`。 129 | * `chunkSize` 分块时按照该值来分。最后一个上传块的大小是可能是大于等于1倍的这个值但是小于两倍的这个值大小,可见这个 [Issue #51](https://github.com/23/resumable.js/issues/51),默认 `1*1024*1024`。 130 | * `forceChunkSize` 是否强制所有的块都是小于等于 `chunkSize` 的值。默认是 `false`。 131 | * `simultaneousUploads` 并发上传数,默认 `3`。 132 | * `fileParameterName` 上传文件时文件的参数名,默认 `file`。 133 | * `query` 其他额外的参数,这个可以是一个对象或者是一个函数,如果是函数的话,则会传入 `Uploader.File` 实例、当前块 `Uploader.Chunk` 以及是否是测试模式,默认为 `{}`。 134 | * `headers` 额外的一些请求头,如果是函数的话,则会传入 `Uploader.File` 实例、当前块 `Uploader.Chunk` 以及是否是测试模式,默认 `{}`。 135 | * `withCredentials` 标准的 CORS 请求是不会带上 cookie 的,如果想要带的话需要设置 `withCredentials` 为 `true`,默认 `false`。 136 | * `method` 当上传的时候所使用的是方式,可选 `multipart`、`octet`,默认 `multipart`,参考 [multipart vs octet](https://stackoverflow.com/questions/29347234/multipart-form-data-vs-application-octet-stream)。 137 | * `testMethod` 测试的时候使用的 HTTP 方法,可以是字符串或者函数,如果是函数的话,则会传入 `Uploader.File` 实例、当前块 `Uploader.Chunk`,默认 `GET`。 138 | * `uploadMethod` 真正上传的时候使用的 HTTP 方法,可以是字符串或者函数,如果是函数的话,则会传入 `Uploader.File` 实例、当前块 `Uploader.Chunk`,默认 `POST`。 139 | * `allowDuplicateUploads ` 如果说一个文件已经上传过了是否还允许再次上传。默认的话如果已经上传了,除非你移除了否则是不会再次重新上传的,所以也就是默认值为 `false`。 140 | * `prioritizeFirstAndLastChunk` 对于文件而言是否高优先级发送第一个和最后一个块。一般用来发送到服务端,然后判断是否是合法文件;例如图片或者视频的 meta 数据一般放在文件第一部分,这样可以根据第一个块就能知道是否支持;默认 `false`。 141 | * `testChunks` 是否测试每个块是否在服务端已经上传了,主要用来实现秒传、跨浏览器上传等,默认 `true`。 142 | * `preprocess` 可选的函数,每个块在测试以及上传前会被调用,参数就是当前上传块实例 `Uploader.Chunk`,注意在这个函数中你需要调用当前上传块实例的 `preprocessFinished` 方法,默认 `null`。 143 | * `initFileFn` 可选函数用于初始化文件对象,传入的参数就是 `Uploader.File` 实例。 144 | * `readFileFn` 可选的函数用于原始文件的读取操作,传入的参数就是 `Uploader.File` 实例、文件类型、开始字节位置 startByte,结束字节位置 endByte、以及当前块 `Uploader.Chunk` 实例。并且当完成后应该调用当前块实例的`readFinished` 方法,且带参数-已读取的 bytes。 145 | * `checkChunkUploadedByResponse` 可选的函数用于根据 XHR 响应内容检测每个块是否上传成功了,传入的参数是:`Uploader.Chunk` 实例以及请求响应信息。这样就没必要上传(测试)所有的块了,具体细节原因参考 [Issue #1](https://github.com/simple-uploader/Uploader/issues/1),[使用示例](https://github.com/simple-uploader/Uploader/blob/develop/samples/Node.js/public/app.js#L15). 146 | * `generateUniqueIdentifier` 可覆盖默认的生成文件唯一标示的函数,默认 `null`。 147 | * `maxChunkRetries` 最大自动失败重试上传次数,值可以是任意正整数,如果是 `undefined` 则代表无限次,默认 `0`。 148 | * `chunkRetryInterval` 重试间隔,值可以是任意正整数,如果是 `null` 则代表立即重试,默认 `null`。 149 | * `progressCallbacksInterval` 进度回调间隔,默认是 `500`。 150 | * `speedSmoothingFactor` 主要用于计算平均速度,值就是从 0 到 1,如果是 1 那么上传的平均速度就等于当前上传速度,如果说长时间上传的话,建议设置为 `0.02`,这样剩余时间预估会更精确,这个参数是需要和 `progressCallbacksInterval` 一起调整的,默认是 `0.1`。 151 | * `successStatuses` 认为响应式成功的响应码,默认 `[200, 201, 152 | 202]`。 153 | * `permanentErrors` 认为是出错的响应码,默认 `[404, 415, 500, 501]`。 154 | * `initialPaused` 初始文件 paused 状态,默认 `false`。 155 | * `processResponse` 处理请求结果,默认 `function (response, cb) { cb(null, response) }`。 0.5.2版本后,`processResponse` 会传入更多参数:(response, cb, Uploader.File, Uploader.Chunk)。 156 | * `processParams` 处理请求参数,默认 `function (params) {return params}`,一般用于修改参数名字或者删除参数。0.5.2版本后,`processParams` 会有更多参数:(params, Uploader.File, Uploader.Chunk, isTest)。 157 | 158 | #### 属性 159 | 160 | * `.support` 当前浏览器是否支持 File API 来上传。 161 | * `.supportDirectory` 当前浏览器是否支持文件夹上传。 162 | * `.opts` 实例的配置项对象。 163 | * `.files` 由 `Uploader.File` 文件对象组成的数组,纯文件列表。 164 | * `.fileList` 由 `Uploader.File` 文件、文件夹对象组成的数组,文件和文件夹共存。 165 | 166 | #### 方法 167 | 168 | * `.assignBrowse(domNodes, isDirectory, singleFile, attributes)` 指定 DOM 元素可以选择上传。 169 | * `domNodes` DOM 元素 170 | * `isDirectory` 如果传入的是 `true` 则代表是要选择文件夹上传的,你可以通过判断 `supportDirectory` 来决定是否设置 171 | * `singleFile` 是否只能选择单个文件 172 | * `attributes` 传入的其他属性值,例如你可以传入 `accept` 属性的值为 `image/*`,这样就意味着点选的时候只能选择图片。全部属性列表:https://www.w3.org/wiki/HTML/Elements/input/file 173 | 174 | Note: 避免使用 `a` 或者 `button` 标签作为选择文件按钮。 175 | * `.assignDrop(domNodes)` 指定 DOM 元素作为拖拽上传目标。 176 | * `.unAssignDrop(domNodes)` 取消指定的 DOM 元素作为拖拽上传目标。 177 | * `.on(event, callback)` 监听事件。 178 | * `.off([event, [callback]])`: 179 | * `.off(event)` 移除指定事件的所有事件回调 180 | * `.off(event, callback)` 移除指定事件的指定回调。`callback` 是一个函数 181 | * `.upload()` 开始或者继续上传。 182 | * `.pause()` 暂停上传。 183 | * `.resume()` 继续上传。 184 | * `.cancel()` 取消所有上传文件,文件会被移除掉。 185 | * `.progress()` 返回一个0-1的浮点数,当前上传进度。 186 | * `.isUploading()` 返回一个布尔值标示是否还有文件正在上传中。 187 | * `.addFile(file)` 添加一个原生的文件对象到上传列表中。 188 | * `.removeFile(file)` 从上传列表中移除一个指定的 `Uploader.File` 实例对象。 189 | * `.getFromUniqueIdentifier(uniqueIdentifier)` 根据唯一标识找到 `Uploader.File` 实例。 190 | * `.getSize()` 上传文件的总大小。 191 | * `.sizeUploaded()` 所有已经成功上传文件大小。 192 | * `.timeRemaining()` 剩余时间,单位秒;这个是基于平均上传速度计算出来的,如果说上传速度为 0,那么这个值就是 `Number.POSITIVE_INFINITY`。 193 | 194 | #### 事件 195 | 196 | * `.change(event)` input 的 change 事件。 197 | * `.dragover(event)` 拖拽区域的 dragover 事件。 198 | * `.dragenter(event)` 拖拽区域的 dragenter 事件。 199 | * `.dragleave(event)` 拖拽区域的 dragleave 事件。 200 | * `.fileSuccess(rootFile, file, message, chunk)` 一个文件上传成功事件,第一个参数 `rootFile` 就是成功上传的文件所属的根 `Uploader.File` 对象,它应该包含或者等于成功上传文件;第二个参数 `file` 就是当前成功的 `Uploader.File` 对象本身;第三个参数就是 `message` 就是服务端响应内容,永远都是字符串;第四个参数 `chunk` 就是 `Uploader.Chunk` 实例,它就是该文件的最后一个块实例,如果你想得到请求响应码的话,`chunk.xhr.status` 就是。 201 | * `.fileComplete(rootFile)` 一个根文件(文件夹)成功上传完成。 202 | * `.fileProgress(rootFile, file, chunk)` 一个文件在上传中。 203 | * `.fileAdded(file, event)` 这个事件一般用作文件校验,如果说返回了 `false`,那么这个文件就会被忽略,不会添加到文件上传列表中。 204 | * `.filesAdded(files, fileList, event)` 和 fileAdded 一样,但是一般用作多个文件的校验。 205 | * `.filesSubmitted(files, fileList, event)` 和 filesAdded 类似,但是是文件已经加入到上传列表中,一般用来开始整个的上传。 206 | * `.fileRemoved(file)` 一个文件(文件夹)被移除。 207 | * `.fileRetry(rootFile, file, chunk)` 文件重试上传事件。 208 | * `.fileError(rootFile, file, message, chunk)` 上传过程中出错了。 209 | * `.uploadStart()` 已经开始上传了。 210 | * `.complete()` 上传完毕。 211 | * `.catchAll(event, ...)` 所有的事件。 212 | 213 | ### Uploader.File 214 | 215 | #### 属性 216 | 217 | * `.uploader` 对 `Uploader` 实例的引用。 218 | * `.name` 文件(夹)名字。 219 | * `.averageSpeed` 平均速度,单位字节每秒。 220 | * `.currentSpeed` 当前速度,单位字节每秒。 221 | * `.paused` 文件是否是暂停的。 222 | * `.error` 文件上传是否出错了。 223 | * `.isFolder` 是否是文件夹。 224 | 225 | 如果不是文件夹的话,那么还会有如下属性: 226 | 227 | * `.file` 原生 HTML5 `File` 对象。 228 | * `.relativePath` 文件相对路径。 229 | * `.size` 文件大小,单位字节。 230 | * `.uniqueIdentifier` 文件唯一标示。 231 | * `.chunks` 由 `Uploader.Chunk` 实例组成数组,分成的块集合,一般场景下并不需要关心它。 232 | 233 | #### 方法 234 | 235 | * `.getRoot()` 得到当前文件所属的根文件,这个根文件就是包含在 `uploader.fileList` 中的. 236 | * `.progress()` 返回一个 0 到 1 的数字,代表当前上传进度。 237 | * `.pause()` 暂停上传文件。 238 | * `.resume()` 继续上传文件。 239 | * `.cancel()` 取消上传且从文件列表中移除。 240 | * `.retry()` 重新上传文件。 241 | * `.bootstrap()` 重新初始化 `Uploader.File` 对象的状态,包括重新分块,重新创建新的 XMLHttpRequest 实例。 242 | * `.isUploading()` 文件是否仍在上传中。 243 | * `.isComplete()` 文件是否已经上传完成。 244 | * `.sizeUploaded()` 已经上传大小。 245 | * `.timeRemaining()` 剩余时间,基于平均速度的,如果说平均速度为 0,那么值就是 `Number.POSITIVE_INFINITY`。 246 | * `.getExtension()` 得到小写的后缀。 247 | * `.getType()` 得到文件类型。 248 | 249 | ## 源 250 | 251 | simple-uploader.js 是 FORK 的 https://github.com/flowjs/flow.js 的,参考了 https://github.com/23/resumable.js。 252 | -------------------------------------------------------------------------------- /src/file.js: -------------------------------------------------------------------------------- 1 | var utils = require('./utils') 2 | var Chunk = require('./chunk') 3 | 4 | function File (uploader, file, parent) { 5 | utils.defineNonEnumerable(this, 'uploader', uploader) 6 | this.isRoot = this.isFolder = uploader === this 7 | utils.defineNonEnumerable(this, 'parent', parent || null) 8 | utils.defineNonEnumerable(this, 'files', []) 9 | utils.defineNonEnumerable(this, 'fileList', []) 10 | utils.defineNonEnumerable(this, 'chunks', []) 11 | utils.defineNonEnumerable(this, '_errorFiles', []) 12 | utils.defineNonEnumerable(this, 'file', null) 13 | this.id = utils.uid() 14 | 15 | if (this.isRoot || !file) { 16 | this.file = null 17 | } else { 18 | if (utils.isString(file)) { 19 | // folder 20 | this.isFolder = true 21 | this.file = null 22 | this.path = file 23 | if (this.parent.path) { 24 | file = file.substr(this.parent.path.length) 25 | } 26 | this.name = file.charAt(file.length - 1) === '/' ? file.substr(0, file.length - 1) : file 27 | } else { 28 | this.file = file 29 | this.fileType = this.file.type 30 | this.name = file.fileName || file.name 31 | this.size = file.size 32 | this.relativePath = file.relativePath || file.webkitRelativePath || this.name 33 | this._parseFile() 34 | } 35 | } 36 | 37 | this.paused = uploader.opts.initialPaused 38 | this.error = false 39 | this.allError = false 40 | this.aborted = false 41 | this.completed = false 42 | this.averageSpeed = 0 43 | this.currentSpeed = 0 44 | this._lastProgressCallback = Date.now() 45 | this._prevUploadedSize = 0 46 | this._prevProgress = 0 47 | 48 | this.bootstrap() 49 | } 50 | 51 | utils.extend(File.prototype, { 52 | 53 | _parseFile: function () { 54 | var ppaths = parsePaths(this.relativePath) 55 | if (ppaths.length) { 56 | var filePaths = this.uploader.filePaths 57 | utils.each(ppaths, function (path, i) { 58 | var folderFile = filePaths[path] 59 | if (!folderFile) { 60 | folderFile = new File(this.uploader, path, this.parent) 61 | filePaths[path] = folderFile 62 | this._updateParentFileList(folderFile) 63 | } 64 | this.parent = folderFile 65 | folderFile.files.push(this) 66 | if (!ppaths[i + 1]) { 67 | folderFile.fileList.push(this) 68 | } 69 | }, this) 70 | } else { 71 | this._updateParentFileList() 72 | } 73 | }, 74 | 75 | _updateParentFileList: function (file) { 76 | if (!file) { 77 | file = this 78 | } 79 | var p = this.parent 80 | if (p) { 81 | p.fileList.push(file) 82 | } 83 | }, 84 | 85 | _eachAccess: function (eachFn, fileFn) { 86 | if (this.isFolder) { 87 | utils.each(this.files, function (f, i) { 88 | return eachFn.call(this, f, i) 89 | }, this) 90 | return 91 | } 92 | fileFn.call(this, this) 93 | }, 94 | 95 | bootstrap: function () { 96 | if (this.isFolder) return 97 | var opts = this.uploader.opts 98 | if (utils.isFunction(opts.initFileFn)) { 99 | opts.initFileFn.call(this, this) 100 | } 101 | 102 | this.abort(true) 103 | this._resetError() 104 | // Rebuild stack of chunks from file 105 | this._prevProgress = 0 106 | var round = opts.forceChunkSize ? Math.ceil : Math.floor 107 | var chunks = Math.max(round(this.size / opts.chunkSize), 1) 108 | for (var offset = 0; offset < chunks; offset++) { 109 | this.chunks.push(new Chunk(this.uploader, this, offset)) 110 | } 111 | }, 112 | 113 | _measureSpeed: function () { 114 | var smoothingFactor = this.uploader.opts.speedSmoothingFactor 115 | var timeSpan = Date.now() - this._lastProgressCallback 116 | if (!timeSpan) { 117 | return 118 | } 119 | var uploaded = this.sizeUploaded() 120 | // Prevent negative upload speed after file upload resume 121 | this.currentSpeed = Math.max((uploaded - this._prevUploadedSize) / timeSpan * 1000, 0) 122 | this.averageSpeed = smoothingFactor * this.currentSpeed + (1 - smoothingFactor) * this.averageSpeed 123 | this._prevUploadedSize = uploaded 124 | if (this.parent && this.parent._checkProgress()) { 125 | this.parent._measureSpeed() 126 | } 127 | }, 128 | 129 | _checkProgress: function (file) { 130 | return Date.now() - this._lastProgressCallback >= this.uploader.opts.progressCallbacksInterval 131 | }, 132 | 133 | _chunkEvent: function (chunk, evt, message) { 134 | var uploader = this.uploader 135 | var STATUS = Chunk.STATUS 136 | var that = this 137 | var rootFile = this.getRoot() 138 | var triggerProgress = function () { 139 | that._measureSpeed() 140 | uploader._trigger('fileProgress', rootFile, that, chunk) 141 | that._lastProgressCallback = Date.now() 142 | } 143 | switch (evt) { 144 | case STATUS.PROGRESS: 145 | if (this._checkProgress()) { 146 | triggerProgress() 147 | } 148 | break 149 | case STATUS.ERROR: 150 | this._error() 151 | this.abort(true) 152 | uploader._trigger('fileError', rootFile, this, message, chunk) 153 | break 154 | case STATUS.SUCCESS: 155 | this._updateUploadedChunks(message, chunk) 156 | if (this.error) { 157 | return 158 | } 159 | clearTimeout(this._progeressId) 160 | this._progeressId = 0 161 | var timeDiff = Date.now() - this._lastProgressCallback 162 | if (timeDiff < uploader.opts.progressCallbacksInterval) { 163 | this._progeressId = setTimeout(triggerProgress, uploader.opts.progressCallbacksInterval - timeDiff) 164 | } 165 | if (this.isComplete()) { 166 | clearTimeout(this._progeressId) 167 | triggerProgress() 168 | this.currentSpeed = 0 169 | this.averageSpeed = 0 170 | uploader._trigger('fileSuccess', rootFile, this, message, chunk) 171 | if (rootFile.isComplete()) { 172 | uploader._trigger('fileComplete', rootFile, this) 173 | } 174 | } else if (!this._progeressId) { 175 | triggerProgress() 176 | } 177 | break 178 | case STATUS.RETRY: 179 | uploader._trigger('fileRetry', rootFile, this, chunk) 180 | break 181 | } 182 | }, 183 | 184 | _updateUploadedChunks: function (message, chunk) { 185 | var checkChunkUploaded = this.uploader.opts.checkChunkUploadedByResponse 186 | if (checkChunkUploaded) { 187 | var xhr = chunk.xhr 188 | utils.each(this.chunks, function (_chunk) { 189 | if (!_chunk.tested) { 190 | var uploaded = checkChunkUploaded.call(this, _chunk, message) 191 | if (_chunk === chunk && !uploaded) { 192 | // fix the first chunk xhr status 193 | // treated as success but checkChunkUploaded is false 194 | // so the current chunk should be uploaded again 195 | _chunk.xhr = null 196 | } 197 | if (uploaded) { 198 | // first success and other chunks are uploaded 199 | // then set xhr, so the uploaded chunks 200 | // will be treated as success too 201 | _chunk.xhr = xhr 202 | } 203 | _chunk.tested = true 204 | } 205 | }, this) 206 | if (!this._firstResponse) { 207 | this._firstResponse = true 208 | this.uploader.upload(true) 209 | } else { 210 | this.uploader.uploadNextChunk() 211 | } 212 | } else { 213 | this.uploader.uploadNextChunk() 214 | } 215 | }, 216 | 217 | _error: function () { 218 | this.error = this.allError = true 219 | var parent = this.parent 220 | while (parent && parent !== this.uploader) { 221 | parent._errorFiles.push(this) 222 | parent.error = true 223 | if (parent._errorFiles.length === parent.files.length) { 224 | parent.allError = true 225 | } 226 | parent = parent.parent 227 | } 228 | }, 229 | 230 | _resetError: function () { 231 | this.error = this.allError = false 232 | var parent = this.parent 233 | var index = -1 234 | while (parent && parent !== this.uploader) { 235 | index = parent._errorFiles.indexOf(this) 236 | parent._errorFiles.splice(index, 1) 237 | parent.allError = false 238 | if (!parent._errorFiles.length) { 239 | parent.error = false 240 | } 241 | parent = parent.parent 242 | } 243 | }, 244 | 245 | isComplete: function () { 246 | if (!this.completed) { 247 | var outstanding = false 248 | this._eachAccess(function (file) { 249 | if (!file.isComplete()) { 250 | outstanding = true 251 | return false 252 | } 253 | }, function () { 254 | if (this.error) { 255 | outstanding = true 256 | } else { 257 | var STATUS = Chunk.STATUS 258 | utils.each(this.chunks, function (chunk) { 259 | var status = chunk.status() 260 | if (status === STATUS.ERROR || status === STATUS.PENDING || status === STATUS.UPLOADING || status === STATUS.READING || chunk.preprocessState === 1 || chunk.readState === 1) { 261 | outstanding = true 262 | return false 263 | } 264 | }) 265 | } 266 | }) 267 | this.completed = !outstanding 268 | } 269 | return this.completed 270 | }, 271 | 272 | isUploading: function () { 273 | var uploading = false 274 | this._eachAccess(function (file) { 275 | if (file.isUploading()) { 276 | uploading = true 277 | return false 278 | } 279 | }, function () { 280 | var uploadingStatus = Chunk.STATUS.UPLOADING 281 | utils.each(this.chunks, function (chunk) { 282 | if (chunk.status() === uploadingStatus) { 283 | uploading = true 284 | return false 285 | } 286 | }) 287 | }) 288 | return uploading 289 | }, 290 | 291 | resume: function () { 292 | this._eachAccess(function (f) { 293 | f.resume() 294 | }, function () { 295 | this.paused = false 296 | this.aborted = false 297 | this.uploader.upload() 298 | }) 299 | this.paused = false 300 | this.aborted = false 301 | }, 302 | 303 | pause: function () { 304 | this._eachAccess(function (f) { 305 | f.pause() 306 | }, function () { 307 | this.paused = true 308 | this.abort() 309 | }) 310 | this.paused = true 311 | }, 312 | 313 | cancel: function () { 314 | this.uploader.removeFile(this) 315 | }, 316 | 317 | retry: function (file) { 318 | var fileRetry = function (file) { 319 | if (file.error) { 320 | file.bootstrap() 321 | } 322 | } 323 | if (file) { 324 | file.bootstrap() 325 | } else { 326 | this._eachAccess(fileRetry, function () { 327 | this.bootstrap() 328 | }) 329 | } 330 | this.uploader.upload() 331 | }, 332 | 333 | abort: function (reset) { 334 | if (this.aborted) { 335 | return 336 | } 337 | this.currentSpeed = 0 338 | this.averageSpeed = 0 339 | this.aborted = !reset 340 | var chunks = this.chunks 341 | if (reset) { 342 | this.chunks = [] 343 | } 344 | var uploadingStatus = Chunk.STATUS.UPLOADING 345 | utils.each(chunks, function (c) { 346 | if (c.status() === uploadingStatus) { 347 | c.abort() 348 | this.uploader.uploadNextChunk() 349 | } 350 | }, this) 351 | }, 352 | 353 | progress: function () { 354 | var totalDone = 0 355 | var totalSize = 0 356 | var ret = 0 357 | this._eachAccess(function (file, index) { 358 | totalDone += file.progress() * file.size 359 | totalSize += file.size 360 | if (index === this.files.length - 1) { 361 | ret = totalSize > 0 ? totalDone / totalSize : this.isComplete() ? 1 : 0 362 | } 363 | }, function () { 364 | if (this.error) { 365 | ret = 1 366 | return 367 | } 368 | if (this.chunks.length === 1) { 369 | this._prevProgress = Math.max(this._prevProgress, this.chunks[0].progress()) 370 | ret = this._prevProgress 371 | return 372 | } 373 | // Sum up progress across everything 374 | var bytesLoaded = 0 375 | utils.each(this.chunks, function (c) { 376 | // get chunk progress relative to entire file 377 | bytesLoaded += c.progress() * (c.endByte - c.startByte) 378 | }) 379 | var percent = bytesLoaded / this.size 380 | // We don't want to lose percentages when an upload is paused 381 | this._prevProgress = Math.max(this._prevProgress, percent > 0.9999 ? 1 : percent) 382 | ret = this._prevProgress 383 | }) 384 | return ret 385 | }, 386 | 387 | getSize: function () { 388 | var size = 0 389 | this._eachAccess(function (file) { 390 | size += file.size 391 | }, function () { 392 | size += this.size 393 | }) 394 | return size 395 | }, 396 | 397 | getFormatSize: function () { 398 | var size = this.getSize() 399 | return utils.formatSize(size) 400 | }, 401 | 402 | getRoot: function () { 403 | if (this.isRoot) { 404 | return this 405 | } 406 | var parent = this.parent 407 | while (parent) { 408 | if (parent.parent === this.uploader) { 409 | // find it 410 | return parent 411 | } 412 | parent = parent.parent 413 | } 414 | return this 415 | }, 416 | 417 | sizeUploaded: function () { 418 | var size = 0 419 | this._eachAccess(function (file) { 420 | size += file.sizeUploaded() 421 | }, function () { 422 | utils.each(this.chunks, function (chunk) { 423 | size += chunk.sizeUploaded() 424 | }) 425 | }) 426 | return size 427 | }, 428 | 429 | timeRemaining: function () { 430 | var ret = 0 431 | var sizeDelta = 0 432 | var averageSpeed = 0 433 | this._eachAccess(function (file, i) { 434 | if (!file.paused && !file.error) { 435 | sizeDelta += file.size - file.sizeUploaded() 436 | averageSpeed += file.averageSpeed 437 | } 438 | if (i === this.files.length - 1) { 439 | ret = calRet(sizeDelta, averageSpeed) 440 | } 441 | }, function () { 442 | if (this.paused || this.error) { 443 | ret = 0 444 | return 445 | } 446 | var delta = this.size - this.sizeUploaded() 447 | ret = calRet(delta, this.averageSpeed) 448 | }) 449 | return ret 450 | function calRet (delta, averageSpeed) { 451 | if (delta && !averageSpeed) { 452 | return Number.POSITIVE_INFINITY 453 | } 454 | if (!delta && !averageSpeed) { 455 | return 0 456 | } 457 | return Math.floor(delta / averageSpeed) 458 | } 459 | }, 460 | 461 | removeFile: function (file) { 462 | if (file.isFolder) { 463 | while (file.files.length) { 464 | var f = file.files[file.files.length - 1] 465 | this._removeFile(f) 466 | } 467 | } 468 | this._removeFile(file) 469 | }, 470 | 471 | _delFilePath: function (file) { 472 | if (file.path && this.filePaths) { 473 | delete this.filePaths[file.path] 474 | } 475 | utils.each(file.fileList, function (file) { 476 | this._delFilePath(file) 477 | }, this) 478 | }, 479 | 480 | _removeFile: function (file) { 481 | if (!file.isFolder) { 482 | utils.each(this.files, function (f, i) { 483 | if (f === file) { 484 | this.files.splice(i, 1) 485 | return false 486 | } 487 | }, this) 488 | file.abort() 489 | var parent = file.parent 490 | var newParent 491 | while (parent && parent !== this) { 492 | newParent = parent.parent 493 | parent._removeFile(file) 494 | parent = newParent 495 | } 496 | } 497 | file.parent === this && utils.each(this.fileList, function (f, i) { 498 | if (f === file) { 499 | this.fileList.splice(i, 1) 500 | return false 501 | } 502 | }, this) 503 | if (!this.isRoot && this.isFolder && !this.files.length) { 504 | this.parent._removeFile(this) 505 | this.uploader._delFilePath(this) 506 | } 507 | file.parent = null 508 | }, 509 | 510 | getType: function () { 511 | if (this.isFolder) { 512 | return 'folder' 513 | } 514 | return this.file.type && this.file.type.split('/')[1] 515 | }, 516 | 517 | getExtension: function () { 518 | if (this.isFolder) { 519 | return '' 520 | } 521 | return this.name.substr((~-this.name.lastIndexOf('.') >>> 0) + 2).toLowerCase() 522 | } 523 | 524 | }) 525 | 526 | module.exports = File 527 | 528 | function parsePaths (path) { 529 | var ret = [] 530 | var paths = path.split('/') 531 | var len = paths.length 532 | var i = 1 533 | paths.splice(len - 1, 1) 534 | len-- 535 | if (paths.length) { 536 | while (i <= len) { 537 | ret.push(paths.slice(0, i++).join('/') + '/') 538 | } 539 | } 540 | return ret 541 | } 542 | -------------------------------------------------------------------------------- /src/uploader.js: -------------------------------------------------------------------------------- 1 | var utils = require('./utils') 2 | var event = require('./event') 3 | var File = require('./file') 4 | var Chunk = require('./chunk') 5 | 6 | var version = '__VERSION__' 7 | 8 | var isServer = typeof window === 'undefined' 9 | 10 | // ie10+ 11 | var ie10plus = isServer ? false : window.navigator.msPointerEnabled 12 | var support = (function () { 13 | if (isServer) { 14 | return false 15 | } 16 | var sliceName = 'slice' 17 | var _support = utils.isDefined(window.File) && utils.isDefined(window.Blob) && 18 | utils.isDefined(window.FileList) 19 | var bproto = null 20 | if (_support) { 21 | bproto = window.Blob.prototype 22 | utils.each(['slice', 'webkitSlice', 'mozSlice'], function (n) { 23 | if (bproto[n]) { 24 | sliceName = n 25 | return false 26 | } 27 | }) 28 | _support = !!bproto[sliceName] 29 | } 30 | if (_support) Uploader.sliceName = sliceName 31 | bproto = null 32 | return _support 33 | })() 34 | 35 | var supportDirectory = (function () { 36 | if (isServer) { 37 | return false 38 | } 39 | var input = window.document.createElement('input') 40 | input.type = 'file' 41 | var sd = 'webkitdirectory' in input || 'directory' in input 42 | input = null 43 | return sd 44 | })() 45 | 46 | function Uploader (opts) { 47 | this.support = support 48 | /* istanbul ignore if */ 49 | if (!this.support) { 50 | return 51 | } 52 | this.supportDirectory = supportDirectory 53 | utils.defineNonEnumerable(this, 'filePaths', {}) 54 | this.opts = utils.extend({}, Uploader.defaults, opts || {}) 55 | 56 | this.preventEvent = utils.bind(this._preventEvent, this) 57 | 58 | File.call(this, this) 59 | } 60 | 61 | /** 62 | * Default read function using the webAPI 63 | * 64 | * @function webAPIFileRead(fileObj, fileType, startByte, endByte, chunk) 65 | * 66 | */ 67 | var webAPIFileRead = function (fileObj, fileType, startByte, endByte, chunk) { 68 | chunk.readFinished(fileObj.file[Uploader.sliceName](startByte, endByte, fileType)) 69 | } 70 | 71 | Uploader.version = version 72 | 73 | Uploader.defaults = { 74 | chunkSize: 1024 * 1024, 75 | forceChunkSize: false, 76 | simultaneousUploads: 3, 77 | singleFile: false, 78 | fileParameterName: 'file', 79 | progressCallbacksInterval: 500, 80 | speedSmoothingFactor: 0.1, 81 | query: {}, 82 | headers: {}, 83 | withCredentials: false, 84 | preprocess: null, 85 | method: 'multipart', 86 | testMethod: 'GET', 87 | uploadMethod: 'POST', 88 | prioritizeFirstAndLastChunk: false, 89 | allowDuplicateUploads: false, 90 | target: '/', 91 | testChunks: true, 92 | generateUniqueIdentifier: null, 93 | maxChunkRetries: 0, 94 | chunkRetryInterval: null, 95 | permanentErrors: [404, 415, 500, 501], 96 | successStatuses: [200, 201, 202], 97 | onDropStopPropagation: false, 98 | initFileFn: null, 99 | readFileFn: webAPIFileRead, 100 | checkChunkUploadedByResponse: null, 101 | initialPaused: false, 102 | processResponse: function (response, cb) { 103 | cb(null, response) 104 | }, 105 | processParams: function (params) { 106 | return params 107 | } 108 | } 109 | 110 | Uploader.utils = utils 111 | Uploader.event = event 112 | Uploader.File = File 113 | Uploader.Chunk = Chunk 114 | 115 | // inherit file 116 | Uploader.prototype = utils.extend({}, File.prototype) 117 | // inherit event 118 | utils.extend(Uploader.prototype, event) 119 | utils.extend(Uploader.prototype, { 120 | 121 | constructor: Uploader, 122 | 123 | _trigger: function (name) { 124 | var args = utils.toArray(arguments) 125 | var preventDefault = !this.trigger.apply(this, arguments) 126 | if (name !== 'catchAll') { 127 | args.unshift('catchAll') 128 | preventDefault = !this.trigger.apply(this, args) || preventDefault 129 | } 130 | return !preventDefault 131 | }, 132 | 133 | _triggerAsync: function () { 134 | var args = arguments 135 | utils.nextTick(function () { 136 | this._trigger.apply(this, args) 137 | }, this) 138 | }, 139 | 140 | addFiles: function (files, evt) { 141 | var _files = [] 142 | var oldFileListLen = this.fileList.length 143 | utils.each(files, function (file) { 144 | // Uploading empty file IE10/IE11 hangs indefinitely 145 | // Directories have size `0` and name `.` 146 | // Ignore already added files if opts.allowDuplicateUploads is set to false 147 | if ((!ie10plus || ie10plus && file.size > 0) && !(file.size % 4096 === 0 && (file.name === '.' || file.fileName === '.'))) { 148 | var uniqueIdentifier = this.generateUniqueIdentifier(file) 149 | if (this.opts.allowDuplicateUploads || !this.getFromUniqueIdentifier(uniqueIdentifier)) { 150 | var _file = new File(this, file, this) 151 | _file.uniqueIdentifier = uniqueIdentifier 152 | if (this._trigger('fileAdded', _file, evt)) { 153 | _files.push(_file) 154 | } else { 155 | File.prototype.removeFile.call(this, _file) 156 | } 157 | } 158 | } 159 | }, this) 160 | // get new fileList 161 | var newFileList = this.fileList.slice(oldFileListLen) 162 | if (this._trigger('filesAdded', _files, newFileList, evt)) { 163 | utils.each(_files, function (file) { 164 | if (this.opts.singleFile && this.files.length > 0) { 165 | this.removeFile(this.files[0]) 166 | } 167 | this.files.push(file) 168 | }, this) 169 | this._trigger('filesSubmitted', _files, newFileList, evt) 170 | } else { 171 | utils.each(newFileList, function (file) { 172 | File.prototype.removeFile.call(this, file) 173 | }, this) 174 | } 175 | }, 176 | 177 | addFile: function (file, evt) { 178 | this.addFiles([file], evt) 179 | }, 180 | 181 | cancel: function () { 182 | for (var i = this.fileList.length - 1; i >= 0; i--) { 183 | this.fileList[i].cancel() 184 | } 185 | }, 186 | 187 | removeFile: function (file) { 188 | File.prototype.removeFile.call(this, file) 189 | this._trigger('fileRemoved', file) 190 | }, 191 | 192 | generateUniqueIdentifier: function (file) { 193 | var custom = this.opts.generateUniqueIdentifier 194 | if (utils.isFunction(custom)) { 195 | return custom(file) 196 | } 197 | /* istanbul ignore next */ 198 | // Some confusion in different versions of Firefox 199 | var relativePath = file.relativePath || file.webkitRelativePath || file.fileName || file.name 200 | /* istanbul ignore next */ 201 | return file.size + '-' + relativePath.replace(/[^0-9a-zA-Z_-]/img, '') 202 | }, 203 | 204 | getFromUniqueIdentifier: function (uniqueIdentifier) { 205 | var ret = false 206 | utils.each(this.files, function (file) { 207 | if (file.uniqueIdentifier === uniqueIdentifier) { 208 | ret = file 209 | return false 210 | } 211 | }) 212 | return ret 213 | }, 214 | 215 | uploadNextChunk: function (preventEvents) { 216 | var found = false 217 | var pendingStatus = Chunk.STATUS.PENDING 218 | var checkChunkUploaded = this.uploader.opts.checkChunkUploadedByResponse 219 | if (this.opts.prioritizeFirstAndLastChunk) { 220 | utils.each(this.files, function (file) { 221 | if (file.paused) { 222 | return 223 | } 224 | if (checkChunkUploaded && !file._firstResponse && file.isUploading()) { 225 | // waiting for current file's first chunk response 226 | return 227 | } 228 | if (file.chunks.length && file.chunks[0].status() === pendingStatus) { 229 | file.chunks[0].send() 230 | found = true 231 | return false 232 | } 233 | if (file.chunks.length > 1 && file.chunks[file.chunks.length - 1].status() === pendingStatus) { 234 | file.chunks[file.chunks.length - 1].send() 235 | found = true 236 | return false 237 | } 238 | }) 239 | if (found) { 240 | return found 241 | } 242 | } 243 | 244 | // Now, simply look for the next, best thing to upload 245 | utils.each(this.files, function (file) { 246 | if (!file.paused) { 247 | if (checkChunkUploaded && !file._firstResponse && file.isUploading()) { 248 | // waiting for current file's first chunk response 249 | return 250 | } 251 | utils.each(file.chunks, function (chunk) { 252 | if (chunk.status() === pendingStatus) { 253 | chunk.send() 254 | found = true 255 | return false 256 | } 257 | }) 258 | } 259 | if (found) { 260 | return false 261 | } 262 | }) 263 | if (found) { 264 | return true 265 | } 266 | 267 | // The are no more outstanding chunks to upload, check is everything is done 268 | var outstanding = false 269 | utils.each(this.files, function (file) { 270 | if (!file.isComplete()) { 271 | outstanding = true 272 | return false 273 | } 274 | }) 275 | // should check files now 276 | // if now files in list 277 | // should not trigger complete event 278 | if (!outstanding && !preventEvents && this.files.length) { 279 | // All chunks have been uploaded, complete 280 | this._triggerAsync('complete') 281 | } 282 | return outstanding 283 | }, 284 | 285 | upload: function (preventEvents) { 286 | // Make sure we don't start too many uploads at once 287 | var ret = this._shouldUploadNext() 288 | if (ret === false) { 289 | return 290 | } 291 | !preventEvents && this._trigger('uploadStart') 292 | var started = false 293 | for (var num = 1; num <= this.opts.simultaneousUploads - ret; num++) { 294 | started = this.uploadNextChunk(!preventEvents) || started 295 | if (!started && preventEvents) { 296 | // completed 297 | break 298 | } 299 | } 300 | if (!started && !preventEvents) { 301 | this._triggerAsync('complete') 302 | } 303 | }, 304 | 305 | /** 306 | * should upload next chunk 307 | * @function 308 | * @returns {Boolean|Number} 309 | */ 310 | _shouldUploadNext: function () { 311 | var num = 0 312 | var should = true 313 | var simultaneousUploads = this.opts.simultaneousUploads 314 | var uploadingStatus = Chunk.STATUS.UPLOADING 315 | utils.each(this.files, function (file) { 316 | utils.each(file.chunks, function (chunk) { 317 | if (chunk.status() === uploadingStatus) { 318 | num++ 319 | if (num >= simultaneousUploads) { 320 | should = false 321 | return false 322 | } 323 | } 324 | }) 325 | return should 326 | }) 327 | // if should is true then return uploading chunks's length 328 | return should && num 329 | }, 330 | 331 | /** 332 | * Assign a browse action to one or more DOM nodes. 333 | * @function 334 | * @param {Element|Array.} domNodes 335 | * @param {boolean} isDirectory Pass in true to allow directories to 336 | * @param {boolean} singleFile prevent multi file upload 337 | * @param {Object} attributes set custom attributes: 338 | * http://www.w3.org/TR/html-markup/input.file.html#input.file-attributes 339 | * eg: accept: 'image/*' 340 | * be selected (Chrome only). 341 | */ 342 | assignBrowse: function (domNodes, isDirectory, singleFile, attributes) { 343 | if (typeof domNodes.length === 'undefined') { 344 | domNodes = [domNodes] 345 | } 346 | 347 | utils.each(domNodes, function (domNode) { 348 | var input 349 | if (domNode.tagName === 'INPUT' && domNode.type === 'file') { 350 | input = domNode 351 | } else { 352 | input = document.createElement('input') 353 | input.setAttribute('type', 'file') 354 | // display:none - not working in opera 12 355 | utils.extend(input.style, { 356 | visibility: 'hidden', 357 | position: 'absolute', 358 | width: '1px', 359 | height: '1px' 360 | }) 361 | // for opera 12 browser, input must be assigned to a document 362 | domNode.appendChild(input) 363 | // https://developer.mozilla.org/en/using_files_from_web_applications) 364 | // event listener is executed two times 365 | // first one - original mouse click event 366 | // second - input.click(), input is inside domNode 367 | domNode.addEventListener('click', function (e) { 368 | if (domNode.tagName.toLowerCase() === 'label') { 369 | return 370 | } 371 | input.click() 372 | }, false) 373 | } 374 | if (!this.opts.singleFile && !singleFile) { 375 | input.setAttribute('multiple', 'multiple') 376 | } 377 | if (isDirectory) { 378 | input.setAttribute('webkitdirectory', 'webkitdirectory') 379 | } 380 | attributes && utils.each(attributes, function (value, key) { 381 | input.setAttribute(key, value) 382 | }) 383 | // When new files are added, simply append them to the overall list 384 | var that = this 385 | input.addEventListener('change', function (e) { 386 | that._trigger(e.type, e) 387 | if (e.target.value) { 388 | that.addFiles(e.target.files, e) 389 | e.target.value = '' 390 | } 391 | }, false) 392 | }, this) 393 | }, 394 | 395 | onDrop: function (evt) { 396 | this._trigger(evt.type, evt) 397 | if (this.opts.onDropStopPropagation) { 398 | evt.stopPropagation() 399 | } 400 | evt.preventDefault() 401 | this._parseDataTransfer(evt.dataTransfer, evt) 402 | }, 403 | 404 | _parseDataTransfer: function (dataTransfer, evt) { 405 | if (dataTransfer.items && dataTransfer.items[0] && 406 | dataTransfer.items[0].webkitGetAsEntry) { 407 | this.webkitReadDataTransfer(dataTransfer, evt) 408 | } else { 409 | this.addFiles(dataTransfer.files, evt) 410 | } 411 | }, 412 | 413 | webkitReadDataTransfer: function (dataTransfer, evt) { 414 | var self = this 415 | var queue = dataTransfer.items.length 416 | var files = [] 417 | utils.each(dataTransfer.items, function (item) { 418 | var entry = item.webkitGetAsEntry() 419 | if (!entry) { 420 | decrement() 421 | return 422 | } 423 | if (entry.isFile) { 424 | // due to a bug in Chrome's File System API impl - #149735 425 | fileReadSuccess(item.getAsFile(), entry.fullPath) 426 | } else { 427 | readDirectory(entry.createReader()) 428 | } 429 | }) 430 | function readDirectory (reader) { 431 | reader.readEntries(function (entries) { 432 | if (entries.length) { 433 | queue += entries.length 434 | utils.each(entries, function (entry) { 435 | if (entry.isFile) { 436 | var fullPath = entry.fullPath 437 | entry.file(function (file) { 438 | fileReadSuccess(file, fullPath) 439 | }, readError) 440 | } else if (entry.isDirectory) { 441 | readDirectory(entry.createReader()) 442 | } 443 | }) 444 | readDirectory(reader) 445 | } else { 446 | decrement() 447 | } 448 | }, readError) 449 | } 450 | function fileReadSuccess (file, fullPath) { 451 | // relative path should not start with "/" 452 | file.relativePath = fullPath.substring(1) 453 | files.push(file) 454 | decrement() 455 | } 456 | function readError (fileError) { 457 | throw fileError 458 | } 459 | function decrement () { 460 | if (--queue === 0) { 461 | self.addFiles(files, evt) 462 | } 463 | } 464 | }, 465 | 466 | _assignHelper: function (domNodes, handles, remove) { 467 | if (typeof domNodes.length === 'undefined') { 468 | domNodes = [domNodes] 469 | } 470 | var evtMethod = remove ? 'removeEventListener' : 'addEventListener' 471 | utils.each(domNodes, function (domNode) { 472 | utils.each(handles, function (handler, name) { 473 | domNode[evtMethod](name, handler, false) 474 | }, this) 475 | }, this) 476 | }, 477 | 478 | _preventEvent: function (e) { 479 | utils.preventEvent(e) 480 | this._trigger(e.type, e) 481 | }, 482 | 483 | /** 484 | * Assign one or more DOM nodes as a drop target. 485 | * @function 486 | * @param {Element|Array.} domNodes 487 | */ 488 | assignDrop: function (domNodes) { 489 | this._onDrop = utils.bind(this.onDrop, this) 490 | this._assignHelper(domNodes, { 491 | dragover: this.preventEvent, 492 | dragenter: this.preventEvent, 493 | dragleave: this.preventEvent, 494 | drop: this._onDrop 495 | }) 496 | }, 497 | 498 | /** 499 | * Un-assign drop event from DOM nodes 500 | * @function 501 | * @param domNodes 502 | */ 503 | unAssignDrop: function (domNodes) { 504 | this._assignHelper(domNodes, { 505 | dragover: this.preventEvent, 506 | dragenter: this.preventEvent, 507 | dragleave: this.preventEvent, 508 | drop: this._onDrop 509 | }, true) 510 | this._onDrop = null 511 | } 512 | }) 513 | 514 | module.exports = Uploader 515 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # simple-uploader.js [![Build Status](https://travis-ci.org/simple-uploader/Uploader.svg?branch=master)](https://travis-ci.org/simple-uploader/Uploader?branch=master) [![codecov.io](http://codecov.io/github/simple-uploader/Uploader/coverage.svg?branch=master)](http://codecov.io/github/simple-uploader/Uploader?branch=master) [![Build Status](https://saucelabs.com/buildstatus/uploader)](https://saucelabs.com/u/uploader) 2 | 3 | [![Sauce Test Status](https://saucelabs.com/browser-matrix/uploader.svg)](https://saucelabs.com/u/uploader) 4 | 5 | [中文](./README_zh-CN.md) 6 | 7 | ![QQ](https://github.com/simple-uploader/Uploader/blob/develop/assets/simple-uploader-QQ-3.png?raw=true) 8 | 9 | A JavaScript library providing multiple simultaneous, stable, fault-tolerant and resumable/restartable file uploads via the HTML5 File API. 10 | 11 | Forked [flow.js](https://github.com/flowjs/flow.js) but refactor it. 12 | 13 | The library is designed to introduce fault-tolerance into the upload of large files through HTTP. This is done by splitting each file into small chunks. Then, whenever the upload of a chunk fails, uploading is retried until the procedure completes. This allows uploads to automatically resume uploading after a network connection is lost either locally or to the server. Additionally, it allows for users to pause, resume and even recover uploads without losing state because only the currently uploading chunks will be aborted, not the entire upload. 14 | 15 | Uploader (simple-uploader.js) does not have any external dependencies other than the `HTML5 File API`. This is relied on for the ability to chunk files into smaller pieces. Currently, this means that support is limited to Firefox 4+, Chrome 11+, Safari 6+ and Internet Explorer 10+. 16 | 17 | Samples and examples are available in the `samples/` folder. Please push your own as Markdown to help document the project. 18 | 19 | ## New Features 20 | 21 | * Treat `Folder` and `File` as `Uploader.File` 22 | 23 | * Treat `Uploader` as a root `Folder` 24 | 25 | * New `fileList` property which contains files and folders 26 | 27 | ## How can I install it? 28 | 29 | Download a latest build from https://github.com/simple-uploader/Uploader/releases/ 30 | it contains development and minified production files in `dist/` folder. 31 | 32 | or use npm: 33 | 34 | ```console 35 | npm install simple-uploader.js 36 | ``` 37 | 38 | or use git clone 39 | 40 | ```console 41 | git clone https://github.com/simple-uploader/Uploader 42 | ``` 43 | 44 | ## How can I use it? 45 | 46 | A new `Uploader` object is created with information of what and where to post: 47 | 48 | ```javascript 49 | var uploader = new Uploader({ 50 | target: '/api/photo/redeem-upload-token', 51 | query: { upload_token: 'my_token' } 52 | }) 53 | // Uploader isn't supported, fall back on a different method 54 | if (!uploader.support) location.href = '/some-old-crappy-uploader' 55 | ``` 56 | 57 | To allow files to be either selected and drag-dropped, you'll assign drop target and a DOM item to be clicked for browsing: 58 | 59 | ```javascript 60 | uploader.assignBrowse(document.getElementById('browseButton')) 61 | uploader.assignDrop(document.getElementById('dropTarget')) 62 | ``` 63 | 64 | After this, interaction with Uploader.js is done by listening to events: 65 | 66 | ```javascript 67 | uploader.on('fileAdded', function (file, event) { 68 | console.log(file, event) 69 | }) 70 | uploader.on('fileSuccess', function (rootFile, file, message) { 71 | console.log(rootFile, file, message) 72 | }) 73 | uploader.on('fileComplete', function (rootFile) { 74 | console.log(rootFile) 75 | }) 76 | uploader.on('fileError', function (rootFile, file, message) { 77 | console.log(rootFile, file, message) 78 | }) 79 | ``` 80 | 81 | ## How do I set it up with my server? 82 | 83 | Most of the magic for Uploader.js happens in the user's browser, but files still need to be reassembled from chunks on the server side. This should be a fairly simple task and can be achieved in any web framework or language, which is able to receive file uploads. 84 | 85 | To handle the state of upload chunks, a number of extra parameters are sent along with all requests: 86 | 87 | * `chunkNumber`: The index of the chunk in the current upload. First chunk is `1` (no base-0 counting here). 88 | * `totalChunks`: The total number of chunks. 89 | * `chunkSize`: The general chunk size. Using this value and `totalSize` you can calculate the total number of chunks. Please note that the size of the data received in the HTTP might be greater than `chunkSize` of this for the last chunk for a file. 90 | * `totalSize`: The total file size. 91 | * `identifier`: A unique identifier for the file contained in the request. 92 | * `filename`: The original file name (since a bug in Firefox results in the file name not being transmitted in chunk multipart posts). 93 | * `relativePath`: The file's relative path when selecting a directory (defaults to file name in all browsers except Chrome). 94 | 95 | You should allow for the same chunk to be uploaded more than once; this isn't standard behaviour, but on an unstable network environment it could happen, and this case is exactly what Uploader.js is designed for. 96 | 97 | For every request, you can confirm reception in HTTP status codes (can be change through the `permanentErrors` option): 98 | 99 | * `200`, `201`, `202`: The chunk was accepted and correct. No need to re-upload. 100 | * `404`, `415`. `500`, `501`: The file for which the chunk was uploaded is not supported, cancel the entire upload. 101 | * _Anything else_: Something went wrong, but try reuploading the file. 102 | 103 | ## Handling GET (or `test()` requests) 104 | 105 | Enabling the `testChunks` option will allow uploads to be resumed after browser restarts and even across browsers (in theory you could even run the same file upload across multiple tabs or different browsers). The `POST` data requests listed are required to use Uploader.js to receive data, but you can extend support by implementing a corresponding `GET` request with the same parameters: 106 | 107 | * If this request returns a `200`, `201` or `202` HTTP code, the chunks is assumed to have been completed. 108 | * If request returns a permanent error status, upload is stopped. 109 | * If request returns anything else, the chunk will be uploaded in the standard fashion. 110 | 111 | After this is done and `testChunks` enabled, an upload can quickly catch up even after a browser restart by simply verifying already uploaded chunks that do not need to be uploaded again. 112 | 113 | ## Full documentation 114 | 115 | ### Uploader 116 | 117 | #### Configuration 118 | 119 | The object is loaded with a configuration options: 120 | ```javascript 121 | var r = new Uploader({ opt1: 'val', ...}) 122 | ``` 123 | Available configuration options are: 124 | 125 | * `target` The target URL for the multipart POST request. This can be a string or a function. If a 126 | function, it will be passed a Uploader.File, a Uploader.Chunk and isTest boolean (Default: `/`) 127 | * `singleFile` Enable single file upload. Once one file is uploaded, second file will overtake existing one, first one will be canceled. (Default: false) 128 | * `chunkSize` The size in bytes of each uploaded chunk of data. The last uploaded chunk will be at least this size and up to two the size, see [Issue #51](https://github.com/23/resumable.js/issues/51) for details and reasons. (Default: `1*1024*1024`) 129 | * `forceChunkSize` Force all chunks to be less or equal than chunkSize. Otherwise, the last chunk will be greater than or equal to `chunkSize`. (Default: `false`) 130 | * `simultaneousUploads` Number of simultaneous uploads (Default: `3`) 131 | * `fileParameterName` The name of the multipart POST parameter to use for the file chunk (Default: `file`) 132 | * `query` Extra parameters to include in the multipart POST with data. This can be an object or a 133 | function. If a function, it will be passed a Uploader.File, a Uploader.Chunk object and a isTest boolean 134 | (Default: `{}`) 135 | * `headers` Extra headers to include in the multipart POST with data. If a function, it will be passed a Uploader.File, a Uploader.Chunk object and a isTest boolean (Default: `{}`) 136 | * `withCredentials` Standard CORS requests do not send or set any cookies by default. In order to 137 | include cookies as part of the request, you need to set the `withCredentials` property to true. 138 | (Default: `false`) 139 | * `method` Method to use when POSTing chunks to the server (`multipart` or `octet`) (Default: `multipart`) 140 | * `testMethod` HTTP method to use when chunks are being tested. If set to a function, it will be passed a Uploader.File and a Uploader.Chunk arguments. (Default: `GET`) 141 | * `uploadMethod` HTTP method to use when chunks are being uploaded. If set to a function, it will be passed a Uploader.File arguments. (Default: `POST`) 142 | * `allowDuplicateUploads ` Once a file is uploaded, allow reupload of the same file. By default, if a file is already uploaded, it will be skipped unless the file is removed from the existing Uploader object. (Default: `false`) 143 | * `prioritizeFirstAndLastChunk` Prioritize first and last chunks of all files. This can be handy if you can determine if a file is valid for your service from only the first or last chunk. For example, photo or video meta data is usually located in the first part of a file, making it easy to test support from only the first chunk. (Default: `false`) 144 | * `testChunks` Make a GET request to the server for each chunks to see if it already exists. If implemented on the server-side, this will allow for upload resumes even after a browser crash or even a computer restart. (Default: `true`) 145 | * `preprocess` Optional function to process each chunk before testing & sending. To the function it will be passed the chunk as parameter, and should call the `preprocessFinished` method on the chunk when finished. (Default: `null`) 146 | * `initFileFn` Optional function to initialize the fileObject. To the function it will be passed a Uploader.File arguments. 147 | * `readFileFn` Optional function wrapping reading operation from the original file. To the function it will be passed the Uploader.File, the startByte and endByte, the fileType and the Uploader.Chunk. And should call the `readFinished` method with bytes arguments on the chunk when finished. 148 | * `checkChunkUploadedByResponse` Optional function to check chunk was uploaded by XHR response. To the function it will be passed the Uploader.Chunk and the response message. You do not need to upload(test) all chunks now, see [Issue #1](https://github.com/simple-uploader/Uploader/issues/1) for details and reasons, [sample](https://github.com/simple-uploader/Uploader/blob/develop/samples/Node.js/public/app.js#L15). 149 | * `generateUniqueIdentifier` Override the function that generates unique identifiers for each file. (Default: `null`) 150 | * `maxChunkRetries` The maximum number of retries for a chunk before the upload is failed. Valid values are any positive integer and `undefined` for no limit. (Default: `0`) 151 | * `chunkRetryInterval` The number of milliseconds to wait before retrying a chunk on a non-permanent error. Valid values are any positive integer and `undefined` for immediate retry. (Default: `undefined`) 152 | * `progressCallbacksInterval` The time interval in milliseconds between progress reports. Set it 153 | to 0 to handle each progress callback. (Default: `500`) 154 | * `speedSmoothingFactor` Used for calculating average upload speed. Number from 1 to 0. Set to 1 155 | and average upload speed wil be equal to current upload speed. For longer file uploads it is 156 | better set this number to 0.02, because time remaining estimation will be more accurate. This 157 | parameter must be adjusted together with `progressCallbacksInterval` parameter. (Default 0.1) 158 | * `successStatuses` Response is success if response status is in this list (Default: `[200,201, 159 | 202]`) 160 | * `permanentErrors` Response fails if response status is in this list (Default: `[404, 415, 500, 501]`) 161 | * `initialPaused` Initial paused state, default `false`. 162 | * `processResponse` Process xhr response, default `function (response, cb) { cb(null, response) }`. After 0.5.2, `processResponse` will be called with arguments: (response, cb, Uploader.File, Uploader.Chunk). 163 | * `processParams` Process xhr params, default `function (params) {return params}`. After 0.5.2, `processParams` will be called with arguments: (params, Uploader.File, Uploader.Chunk, isTest). 164 | 165 | #### Properties 166 | 167 | * `.support` A boolean value indicator whether or not Uploader.js is supported by the current browser. 168 | * `.supportDirectory` A boolean value, which indicates if browser supports directory uploads. 169 | * `.opts` A hash object of the configuration of the Uploader.js instance. 170 | * `.files` An array of `Uploader.File` file objects added by the user (see full docs for this object type below). 171 | * `.fileList` An array of `Uploader.File` file(folder) objects added by the user (see full docs for this object type below), but it treated Folder as a `Uploader.File` Object. 172 | 173 | #### Methods 174 | 175 | * `.assignBrowse(domNodes, isDirectory, singleFile, attributes)` Assign a browse action to one or more DOM nodes. 176 | * `domNodes` array of dom nodes or a single node. 177 | * `isDirectory` Pass in `true` to allow directories to be selected (Chrome only, support can be checked with `supportDirectory` property). 178 | * `singleFile` To prevent multiple file uploads set this to true. Also look at config parameter `singleFile`. 179 | * `attributes` Pass object of keys and values to set custom attributes on input fields. 180 | For example, you can set `accept` attribute to `image/*`. This means that user will be able to select only images. 181 | Full list of attributes: https://www.w3.org/wiki/HTML/Elements/input/file 182 | 183 | Note: avoid using `a` and `button` tags as file upload buttons, use span instead. 184 | * `.assignDrop(domNodes)` Assign one or more DOM nodes as a drop target. 185 | * `.unAssignDrop(domNodes)` Unassign one or more DOM nodes as a drop target. 186 | * `.on(event, callback)` Listen for event from Uploader.js (see below) 187 | * `.off([event, [callback]])`: 188 | * `.off(event)` Remove all callbacks of specific event. 189 | * `.off(event, callback)` Remove specific callback of event. `callback` should be a `Function`. 190 | * `.upload()` Start or resume uploading. 191 | * `.pause()` Pause uploading. 192 | * `.resume()` Resume uploading. 193 | * `.cancel()` Cancel upload of all `Uploader.File` objects and remove them from the list. 194 | * `.progress()` Returns a float between 0 and 1 indicating the current upload progress of all files. 195 | * `.isUploading()` Returns a boolean indicating whether or not the instance is currently uploading anything. 196 | * `.addFile(file)` Add a HTML5 File object to the list of files. 197 | * `.removeFile(file)` Cancel upload of a specific `Uploader.File` object on the list from the list. 198 | * `.getFromUniqueIdentifier(uniqueIdentifier)` Look up a `Uploader.File` object by its unique identifier. 199 | * `.getSize()` Returns the total size of the upload in bytes. 200 | * `.sizeUploaded()` Returns the total size uploaded of all files in bytes. 201 | * `.timeRemaining()` Returns remaining time to upload all files in seconds. Accuracy is based on average speed. If speed is zero, time remaining will be equal to positive infinity `Number.POSITIVE_INFINITY` 202 | 203 | #### Events 204 | 205 | * `.change(event)` File input change event. 206 | * `.dragover(event)` Drop area dragover event. 207 | * `.dragenter(event)` Drop area dragenter event. 208 | * `.dragleave(event)` Drop area dragleave event. 209 | * `.fileSuccess(rootFile, file, message, chunk)` A specific file was completed. First argument `rootFile` is the root `Uploader.File` instance which contains or equal the completed file, second argument `file` argument is instance of `Uploader.File` too, it's the current completed file object, third argument `message` contains server response. Response is always a string. 210 | Fourth argument `chunk` is instance of `Uploader.Chunk`. You can get response status by accessing xhr 211 | object `chunk.xhr.status`. 212 | * `.fileComplete(rootFile)` A root file(Folder) was completed. 213 | * `.fileProgress(rootFile, file, chunk)` Uploading progressed for a specific file. 214 | * `.fileAdded(file, event)` This event is used for file validation. To reject this file return false. 215 | This event is also called before file is added to upload queue, 216 | this means that calling `uploader.upload()` function will not start current file upload. 217 | Optionally, you can use the browser `event` object from when the file was 218 | added. 219 | * `.filesAdded(files, fileList, event)` Same as fileAdded, but used for multiple file validation. 220 | * `.filesSubmitted(files, fileList, event)` Same as filesAdded, but happens after the file is added to upload queue. Can be used to start upload of currently added files. 221 | * `.fileRemoved(file)` The specific file was removed from the upload queue. Combined with filesSubmitted, can be used to notify UI to update its state to match the upload queue. 222 | * `.fileRetry(rootFile, file, chunk)` Something went wrong during upload of a specific file, uploading is being 223 | retried. 224 | * `.fileError(rootFile, file, message, chunk)` An error occurred during upload of a specific file. 225 | * `.uploadStart()` Upload has been started. 226 | * `.complete()` Uploading completed. 227 | * `.catchAll(event, ...)` Listen to all the events listed above with the same callback function. 228 | 229 | ### Uploader.File 230 | 231 | #### Properties 232 | 233 | * `.uploader` A back-reference to the parent `Uploader` object. 234 | * `.name` The name of the file(folder). 235 | * `.averageSpeed` Average upload speed, bytes per second. 236 | * `.currentSpeed` Current upload speed, bytes per second. 237 | * `.paused` Indicated if file(folder) is paused. 238 | * `.error` Indicated if file(folder) has encountered an error. 239 | * `.isFolder` Indicated if file(folder) is an `Directory`. 240 | 241 | If `.isFolder` is `false` then these properties will be added: 242 | 243 | * `.file` The correlating HTML5 `File` object. 244 | * `.relativePath` The relative path to the file (defaults to file name if relative path doesn't exist). 245 | * `.size` Size in bytes of the file. 246 | * `.uniqueIdentifier` A unique identifier assigned to this file object. This value is included in uploads to the server for reference, but can also be used in CSS classes etc when building your upload UI. 247 | * `.chunks` An array of `Uploader.Chunk` items. You shouldn't need to dig into these. 248 | 249 | #### Methods 250 | 251 | * `.getRoot()` Returns the file's root Uploader.File instance in `uploader.fileList`. 252 | * `.progress()` Returns a float between 0 and 1 indicating the current upload progress of the file. 253 | * `.pause()` Pause uploading the file. 254 | * `.resume()` Resume uploading the file. 255 | * `.cancel()` Abort uploading the file and delete it from the list of files to upload. 256 | * `.retry()` Retry uploading the file. 257 | * `.bootstrap()` Rebuild the state of a `Uploader.File` object, including reassigning chunks and XMLHttpRequest instances. 258 | * `.isUploading()` Returns a boolean indicating whether file chunks is uploading. 259 | * `.isComplete()` Returns a boolean indicating whether the file has completed uploading and received a server response. 260 | * `.sizeUploaded()` Returns size uploaded in bytes. 261 | * `.timeRemaining()` Returns remaining time to finish upload file in seconds. Accuracy is based on average speed. If speed is zero, time remaining will be equal to positive infinity `Number.POSITIVE_INFINITY` 262 | * `.getExtension()` Returns file extension in lowercase. 263 | * `.getType()` Returns file type. 264 | 265 | ## Origin 266 | 267 | Uploader.js was inspired by and evolved from https://github.com/flowjs/flow.js and https://github.com/23/resumable.js. 268 | -------------------------------------------------------------------------------- /dist/uploader.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Uploader - Uploader library implements html5 file upload and provides multiple simultaneous, stable, fault tolerant and resumable uploads 3 | * @version v0.6.0 4 | * @author dolymood 5 | * @link https://github.com/simple-uploader/Uploader 6 | * @license MIT 7 | */ 8 | !function(e){if("object"==typeof exports)module.exports=e();else if("function"==typeof define&&define.amd)define(e);else{var t;"undefined"!=typeof window?t=window:"undefined"!=typeof global?t=global:"undefined"!=typeof self&&(t=self),t.Uploader=e()}}(function(){return function r(n,o,a){function h(i,e){if(!o[i]){if(!n[i]){var t="function"==typeof require&&require;if(!e&&t)return t(i,!0);if(u)return u(i,!0);throw new Error("Cannot find module '"+i+"'")}var s=o[i]={exports:{}};n[i][0].call(s.exports,function(e){var t=n[i][1][e];return h(t||e)},s,s.exports,r,n,o,a)}return o[i].exports}for(var u="function"==typeof require&&require,e=0;e=this.uploader.opts.maxChunkRetries?t=o.ERROR:(this.abort(),t=o.PENDING);var i=this.processedState;return i&&i.err&&(t=o.ERROR),t}return o.PENDING},message:function(){return this.xhr?this.xhr.responseText:""},progress:function(){if(this.pendingRetry)return 0;var e=this.status();return e===o.SUCCESS||e===o.ERROR?1:e===o.PENDING?0:0=this.uploader.opts.progressCallbacksInterval},_chunkEvent:function(e,t,i){var s=this.uploader,r=u.STATUS,n=this,o=this.getRoot(),a=function(){n._measureSpeed(),s._trigger("fileProgress",o,n,e),n._lastProgressCallback=Date.now()};switch(t){case r.PROGRESS:this._checkProgress()&&a();break;case r.ERROR:this._error(),this.abort(!0),s._trigger("fileError",o,this,i,e);break;case r.SUCCESS:if(this._updateUploadedChunks(i,e),this.error)return;clearTimeout(this._progeressId),this._progeressId=0;var h=Date.now()-this._lastProgressCallback;h>>0)).toLowerCase()}}),t.exports=n},{"./chunk":1,"./utils":5}],5:[function(e,t,i){var s=Object.prototype,r=Array.prototype,n=s.toString,l=function(e){return"[object Function]"===n.call(e)},p=Array.isArray||function(e){return"[object Array]"===n.call(e)},c=function(e){return"[object Object]"===n.call(e)&&Object.getPrototypeOf(e)===s},o=0,f={uid:function(){return++o},noop:function(){},bind:function(e,t){return function(){return e.apply(t,arguments)}},preventEvent:function(e){e.preventDefault()},stop:function(e){e.preventDefault(),e.stopPropagation()},nextTick:function(e,t){setTimeout(f.bind(e,t),0)},toArray:function(e,t,i){return void 0===t&&(t=0),void 0===i&&(i=e.length),r.slice.call(e,t,i)},isPlainObject:c,isFunction:l,isArray:p,isObject:function(e){return Object(e)===e},isString:function(e){return"string"==typeof e},isUndefined:function(e){return void 0===e},isDefined:function(e){return void 0!==e},each:function(e,t,i){if(f.isDefined(e.length))for(var s=0,r=e.length;s= 1.5.0 < 2", 298 | "toidentifier": "1.0.0" 299 | }, 300 | "dependencies": { 301 | "inherits": { 302 | "version": "2.0.3", 303 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", 304 | "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" 305 | } 306 | } 307 | }, 308 | "iconv-lite": { 309 | "version": "0.4.24", 310 | "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", 311 | "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", 312 | "requires": { 313 | "safer-buffer": ">= 2.1.2 < 3" 314 | } 315 | }, 316 | "inherits": { 317 | "version": "2.0.4", 318 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", 319 | "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" 320 | }, 321 | "ipaddr.js": { 322 | "version": "1.9.1", 323 | "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", 324 | "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" 325 | }, 326 | "media-typer": { 327 | "version": "0.3.0", 328 | "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", 329 | "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" 330 | }, 331 | "merge-descriptors": { 332 | "version": "1.0.1", 333 | "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", 334 | "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" 335 | }, 336 | "methods": { 337 | "version": "1.1.2", 338 | "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", 339 | "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" 340 | }, 341 | "mime": { 342 | "version": "1.6.0", 343 | "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", 344 | "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" 345 | }, 346 | "mime-db": { 347 | "version": "1.45.0", 348 | "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.45.0.tgz", 349 | "integrity": "sha512-CkqLUxUk15hofLoLyljJSrukZi8mAtgd+yE5uO4tqRZsdsAJKv0O+rFMhVDRJgozy+yG6md5KwuXhD4ocIoP+w==" 350 | }, 351 | "mime-types": { 352 | "version": "2.1.28", 353 | "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.28.tgz", 354 | "integrity": "sha512-0TO2yJ5YHYr7M2zzT7gDU1tbwHxEUWBCLt0lscSNpcdAfFyJOVEpRYNS7EXVcTLNj/25QO8gulHC5JtTzSE2UQ==", 355 | "requires": { 356 | "mime-db": "1.45.0" 357 | } 358 | }, 359 | "ms": { 360 | "version": "2.0.0", 361 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", 362 | "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" 363 | }, 364 | "multiparty": { 365 | "version": "4.2.2", 366 | "resolved": "https://registry.npmjs.org/multiparty/-/multiparty-4.2.2.tgz", 367 | "integrity": "sha512-NtZLjlvsjcoGrzojtwQwn/Tm90aWJ6XXtPppYF4WmOk/6ncdwMMKggFY2NlRRN9yiCEIVxpOfPWahVEG2HAG8Q==", 368 | "requires": { 369 | "http-errors": "~1.8.0", 370 | "safe-buffer": "5.2.1", 371 | "uid-safe": "2.1.5" 372 | }, 373 | "dependencies": { 374 | "http-errors": { 375 | "version": "1.8.0", 376 | "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.0.tgz", 377 | "integrity": "sha512-4I8r0C5JDhT5VkvI47QktDW75rNlGVsUf/8hzjCC/wkWI/jdTRmBb9aI7erSG82r1bjKY3F6k28WnsVxB1C73A==", 378 | "requires": { 379 | "depd": "~1.1.2", 380 | "inherits": "2.0.4", 381 | "setprototypeof": "1.2.0", 382 | "statuses": ">= 1.5.0 < 2", 383 | "toidentifier": "1.0.0" 384 | } 385 | }, 386 | "safe-buffer": { 387 | "version": "5.2.1", 388 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", 389 | "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" 390 | }, 391 | "setprototypeof": { 392 | "version": "1.2.0", 393 | "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", 394 | "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" 395 | } 396 | } 397 | }, 398 | "negotiator": { 399 | "version": "0.6.2", 400 | "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", 401 | "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" 402 | }, 403 | "on-finished": { 404 | "version": "2.3.0", 405 | "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", 406 | "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", 407 | "requires": { 408 | "ee-first": "1.1.1" 409 | } 410 | }, 411 | "parseurl": { 412 | "version": "1.3.3", 413 | "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", 414 | "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" 415 | }, 416 | "path-to-regexp": { 417 | "version": "0.1.7", 418 | "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", 419 | "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" 420 | }, 421 | "proxy-addr": { 422 | "version": "2.0.6", 423 | "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz", 424 | "integrity": "sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==", 425 | "requires": { 426 | "forwarded": "~0.1.2", 427 | "ipaddr.js": "1.9.1" 428 | } 429 | }, 430 | "qs": { 431 | "version": "6.5.2", 432 | "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", 433 | "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" 434 | }, 435 | "random-bytes": { 436 | "version": "1.0.0", 437 | "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", 438 | "integrity": "sha1-T2ih3Arli9P7lYSMMDJNt11kNgs=" 439 | }, 440 | "range-parser": { 441 | "version": "1.2.1", 442 | "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", 443 | "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" 444 | }, 445 | "raw-body": { 446 | "version": "2.4.0", 447 | "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", 448 | "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", 449 | "requires": { 450 | "bytes": "3.1.0", 451 | "http-errors": "1.7.2", 452 | "iconv-lite": "0.4.24", 453 | "unpipe": "1.0.0" 454 | } 455 | }, 456 | "safe-buffer": { 457 | "version": "5.1.2", 458 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", 459 | "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" 460 | }, 461 | "safer-buffer": { 462 | "version": "2.1.2", 463 | "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", 464 | "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" 465 | }, 466 | "send": { 467 | "version": "0.17.1", 468 | "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz", 469 | "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==", 470 | "requires": { 471 | "debug": "2.6.9", 472 | "depd": "~1.1.2", 473 | "destroy": "~1.0.4", 474 | "encodeurl": "~1.0.2", 475 | "escape-html": "~1.0.3", 476 | "etag": "~1.8.1", 477 | "fresh": "0.5.2", 478 | "http-errors": "~1.7.2", 479 | "mime": "1.6.0", 480 | "ms": "2.1.1", 481 | "on-finished": "~2.3.0", 482 | "range-parser": "~1.2.1", 483 | "statuses": "~1.5.0" 484 | }, 485 | "dependencies": { 486 | "ee-first": { 487 | "version": "1.1.1", 488 | "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", 489 | "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" 490 | }, 491 | "ms": { 492 | "version": "2.1.1", 493 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", 494 | "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" 495 | }, 496 | "on-finished": { 497 | "version": "2.3.0", 498 | "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", 499 | "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", 500 | "requires": { 501 | "ee-first": "1.1.1" 502 | } 503 | } 504 | } 505 | }, 506 | "serve-static": { 507 | "version": "1.14.1", 508 | "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", 509 | "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==", 510 | "requires": { 511 | "encodeurl": "~1.0.2", 512 | "escape-html": "~1.0.3", 513 | "parseurl": "~1.3.3", 514 | "send": "0.17.1" 515 | } 516 | }, 517 | "setprototypeof": { 518 | "version": "1.1.1", 519 | "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", 520 | "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" 521 | }, 522 | "statuses": { 523 | "version": "1.5.0", 524 | "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", 525 | "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" 526 | }, 527 | "toidentifier": { 528 | "version": "1.0.0", 529 | "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", 530 | "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" 531 | }, 532 | "type-is": { 533 | "version": "1.6.18", 534 | "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", 535 | "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", 536 | "requires": { 537 | "media-typer": "0.3.0", 538 | "mime-types": "~2.1.24" 539 | } 540 | }, 541 | "uid-safe": { 542 | "version": "2.1.5", 543 | "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", 544 | "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", 545 | "requires": { 546 | "random-bytes": "~1.0.0" 547 | } 548 | }, 549 | "unpipe": { 550 | "version": "1.0.0", 551 | "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", 552 | "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" 553 | }, 554 | "utils-merge": { 555 | "version": "1.0.1", 556 | "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", 557 | "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" 558 | }, 559 | "vary": { 560 | "version": "1.1.2", 561 | "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", 562 | "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" 563 | } 564 | } 565 | } 566 | -------------------------------------------------------------------------------- /test/unit/specs/upload.js: -------------------------------------------------------------------------------- 1 | var Uploader = require('../../../src/uploader') 2 | 3 | describe('upload file', function () { 4 | var uploader 5 | var xhr 6 | var requests = [] 7 | 8 | beforeEach(function () { 9 | jasmine.clock().install() 10 | 11 | uploader = new Uploader({ 12 | progressCallbacksInterval: 0, 13 | generateUniqueIdentifier: function (file) { 14 | return file.size + '__' + file.name 15 | } 16 | }) 17 | 18 | requests = [] 19 | xhr = sinon.useFakeXMLHttpRequest() 20 | xhr.onCreate = function (xhr) { 21 | requests.push(xhr) 22 | } 23 | }) 24 | 25 | afterEach(function () { 26 | jasmine.clock().uninstall() 27 | 28 | xhr.restore() 29 | }) 30 | 31 | it('should pass query params', function () { 32 | uploader.opts.query = {} 33 | uploader.opts.target = 'file' 34 | uploader.addFile(new File(['123'], 'file')) 35 | uploader.upload() 36 | expect(requests.length).toBe(1) 37 | expect(requests[0].url).toContain('file') 38 | 39 | uploader.opts.query = {a: 1} 40 | uploader.files[0].retry() 41 | expect(requests.length).toBe(2) 42 | expect(requests[1].url).toContain('file') 43 | expect(requests[1].url).toContain('a=1') 44 | 45 | uploader.opts.query = function (file, chunk) { 46 | expect(file).toBe(uploader.files[0]) 47 | expect(chunk).toBe(uploader.files[0].chunks[0]) 48 | return { 49 | b: 2 50 | } 51 | } 52 | uploader.files[0].retry() 53 | expect(requests.length).toBe(3) 54 | expect(requests[2].url).toContain('file') 55 | expect(requests[2].url).toContain('b=2') 56 | expect(requests[2].url).not.toContain('a=1') 57 | 58 | uploader.opts.target = 'file?w=w' 59 | uploader.opts.query = {} 60 | uploader.files[0].retry() 61 | expect(requests.length).toBe(4) 62 | expect(requests[3].url).toContain('file?w=w&') 63 | expect(requests[3].url).not.toContain('a=1') 64 | expect(requests[3].url).not.toContain('b=2') 65 | }) 66 | 67 | it('should track file upload status with lots of chunks', function () { 68 | uploader.opts.chunkSize = 1 69 | uploader.addFile(new File(['IIIIIIIIII'], 'file2')) 70 | var file = uploader.files[0] 71 | expect(file.chunks.length).toBe(10) 72 | uploader.upload() 73 | expect(file.progress()).toBe(0) 74 | for (var i = 0; i < 9; i++) { 75 | expect(requests[i]).toBeDefined() 76 | expect(file.isComplete()).toBeFalsy() 77 | expect(file.isUploading()).toBeTruthy() 78 | requests[i].respond(200) 79 | expect(file.progress()).toBe((i+1) / 10) 80 | expect(file.isComplete()).toBeFalsy() 81 | expect(file.isUploading()).toBeTruthy() 82 | } 83 | expect(requests[9]).toBeDefined() 84 | expect(file.isComplete()).toBeFalsy() 85 | expect(file.isUploading()).toBeTruthy() 86 | expect(file.progress()).toBe(0.9) 87 | requests[i].respond(200) 88 | expect(file.isComplete()).toBeTruthy() 89 | expect(file.isUploading()).toBeFalsy() 90 | expect(file.progress()).toBe(1) 91 | expect(uploader.progress()).toBe(1) 92 | }) 93 | 94 | it('should throw expected events', function () { 95 | var events = [] 96 | uploader.on('catchAll', function (event) { 97 | events.push(event) 98 | }) 99 | uploader.opts.chunkSize = 1 100 | uploader.opts.progressCallbacksInterval = 0 101 | uploader.addFile(new File(['12'], 'file3')) 102 | var file = uploader.files[0] 103 | expect(file.chunks.length).toBe(2) 104 | uploader.upload() 105 | // Sync events 106 | expect(events.length).toBe(4) 107 | expect(events[0]).toBe('fileAdded') 108 | expect(events[1]).toBe('filesAdded') 109 | expect(events[2]).toBe('filesSubmitted') 110 | expect(events[3]).toBe('uploadStart') 111 | // Async 112 | requests[0].respond(200) 113 | expect(events.length).toBe(5) 114 | expect(events[4]).toBe('fileProgress') 115 | requests[1].respond(400) 116 | expect(events.length).toBe(5) 117 | requests[2].progress(5, 10, true) 118 | expect(events.length).toBe(6) 119 | expect(events[5]).toBe('fileProgress') 120 | requests[2].respond(200) 121 | expect(events.length).toBe(9) 122 | expect(events[6]).toBe('fileProgress') 123 | expect(events[7]).toBe('fileSuccess') 124 | expect(events[8]).toBe('fileComplete') 125 | 126 | jasmine.clock().tick(1) 127 | expect(events.length).toBe(10) 128 | expect(events[9]).toBe('complete') 129 | 130 | uploader.upload() 131 | expect(events.length).toBe(11) 132 | expect(events[10]).toBe('uploadStart') 133 | 134 | // complete event is always asynchronous 135 | jasmine.clock().tick(1) 136 | expect(events.length).toBe(12) 137 | expect(events[11]).toBe('complete') 138 | }) 139 | 140 | it('should pause and resume file', function () { 141 | uploader.opts.chunkSize = 1 142 | uploader.opts.simultaneousUploads = 2 143 | uploader.addFile(new File(['1234'], 'file4')) 144 | uploader.addFile(new File(['56'], 'file5')) 145 | var files = uploader.files 146 | expect(files[0].chunks.length).toBe(4) 147 | expect(files[1].chunks.length).toBe(2) 148 | uploader.upload() 149 | expect(files[0].isUploading()).toBeTruthy() 150 | expect(requests.length).toBe(2) 151 | expect(requests[0].aborted).toBeUndefined() 152 | expect(requests[1].aborted).toBeUndefined() 153 | // should start upload second file 154 | files[0].pause() 155 | expect(files[0].isUploading()).toBeFalsy() 156 | expect(files[1].isUploading()).toBeTruthy() 157 | expect(requests.length).toBe(4) 158 | expect(requests[0].aborted).toBeTruthy() 159 | expect(requests[1].aborted).toBeTruthy() 160 | expect(requests[2].aborted).toBeUndefined() 161 | expect(requests[3].aborted).toBeUndefined() 162 | // Should resume file after second file chunks is uploaded 163 | files[0].resume() 164 | expect(files[0].isUploading()).toBeFalsy() 165 | expect(requests.length).toBe(4) 166 | requests[2].respond(200)// second file chunk 167 | expect(files[0].isUploading()).toBeTruthy() 168 | expect(files[1].isUploading()).toBeTruthy() 169 | expect(requests.length).toBe(5) 170 | requests[3].respond(200) // second file chunk 171 | expect(requests.length).toBe(6) 172 | expect(files[0].isUploading()).toBeTruthy() 173 | expect(files[1].isUploading()).toBeFalsy() 174 | expect(files[1].isComplete()).toBeTruthy() 175 | requests[4].respond(200) 176 | expect(requests.length).toBe(7) 177 | requests[5].respond(200) 178 | expect(requests.length).toBe(8) 179 | requests[6].respond(200) 180 | expect(requests.length).toBe(8) 181 | requests[7].respond(200) 182 | expect(requests.length).toBe(8) 183 | // Upload finished 184 | expect(files[0].isUploading()).toBeFalsy() 185 | expect(files[0].isComplete()).toBeTruthy() 186 | expect(files[0].progress()).toBe(1) 187 | expect(files[1].isUploading()).toBeFalsy() 188 | expect(files[1].isComplete()).toBeTruthy() 189 | expect(files[1].progress()).toBe(1) 190 | expect(uploader.progress()).toBe(1) 191 | }) 192 | 193 | it('should retry file', function () { 194 | uploader.opts.testChunks = false 195 | uploader.opts.chunkSize = 1 196 | uploader.opts.simultaneousUploads = 1 197 | uploader.opts.maxChunkRetries = 1 198 | uploader.opts.permanentErrors = [500] 199 | var error = jasmine.createSpy('error') 200 | var progress = jasmine.createSpy('progress') 201 | var success = jasmine.createSpy('success') 202 | var retry = jasmine.createSpy('retry') 203 | uploader.on('fileError', error) 204 | uploader.on('fileProgress', progress) 205 | uploader.on('fileSuccess', success) 206 | uploader.on('fileRetry', retry) 207 | 208 | uploader.addFile(new File(['12'], 'testfile')) 209 | var file = uploader.files[0] 210 | expect(file.chunks.length).toBe(2) 211 | var firstChunk = file.chunks[0] 212 | var secondChunk = file.chunks[1] 213 | expect(firstChunk.status()).toBe('pending') 214 | expect(secondChunk.status()).toBe('pending') 215 | 216 | uploader.upload() 217 | expect(requests.length).toBe(1) 218 | expect(firstChunk.status()).toBe('uploading') 219 | expect(secondChunk.status()).toBe('pending') 220 | 221 | expect(error).not.toHaveBeenCalled() 222 | expect(progress).not.toHaveBeenCalled() 223 | expect(success).not.toHaveBeenCalled() 224 | expect(retry).not.toHaveBeenCalled() 225 | 226 | requests[0].respond(400) 227 | expect(requests.length).toBe(2) 228 | expect(firstChunk.status()).toBe('uploading') 229 | expect(secondChunk.status()).toBe('pending') 230 | 231 | expect(error).not.toHaveBeenCalled() 232 | expect(progress).not.toHaveBeenCalled() 233 | expect(success).not.toHaveBeenCalled() 234 | expect(retry).toHaveBeenCalled() 235 | 236 | requests[1].respond(200) 237 | expect(requests.length).toBe(3) 238 | expect(firstChunk.status()).toBe('success') 239 | expect(secondChunk.status()).toBe('uploading') 240 | 241 | expect(error).not.toHaveBeenCalled() 242 | expect(progress.calls.count()).toBe(1) 243 | expect(success).not.toHaveBeenCalled() 244 | expect(retry.calls.count()).toBe(1) 245 | 246 | requests[2].respond(400) 247 | expect(requests.length).toBe(4) 248 | expect(firstChunk.status()).toBe('success') 249 | expect(secondChunk.status()).toBe('uploading') 250 | 251 | expect(error).not.toHaveBeenCalled() 252 | expect(progress.calls.count()).toBe(1) 253 | expect(success).not.toHaveBeenCalled() 254 | expect(retry.calls.count()).toBe(2) 255 | 256 | requests[3].respond(400, {}, 'Err') 257 | expect(requests.length).toBe(4) 258 | expect(file.chunks.length).toBe(0) 259 | 260 | expect(error.calls.count()).toBe(1) 261 | expect(error).toHaveBeenCalledWith(file.getRoot(), file, 'Err', secondChunk) 262 | expect(progress.calls.count()).toBe(1) 263 | expect(success).not.toHaveBeenCalled() 264 | expect(retry.calls.count()).toBe(2) 265 | 266 | expect(file.error).toBeTruthy() 267 | expect(file.isComplete()).toBeFalsy() 268 | expect(file.isUploading()).toBeFalsy() 269 | expect(file.progress()).toBe(1) 270 | }) 271 | 272 | it('should retry file with timeout', function () { 273 | uploader.opts.testChunks = false 274 | uploader.opts.maxChunkRetries = 1 275 | uploader.opts.chunkRetryInterval = 100 276 | 277 | var error = jasmine.createSpy('error') 278 | var success = jasmine.createSpy('success') 279 | var retry = jasmine.createSpy('retry') 280 | uploader.on('fileError', error) 281 | uploader.on('fileSuccess', success) 282 | uploader.on('fileRetry', retry) 283 | 284 | uploader.addFile(new File(['12'], 'lalal')) 285 | var file = uploader.files[0] 286 | uploader.upload() 287 | expect(requests.length).toBe(1) 288 | 289 | requests[0].respond(400) 290 | expect(requests.length).toBe(1) 291 | expect(error).not.toHaveBeenCalled() 292 | expect(success).not.toHaveBeenCalled() 293 | expect(retry).toHaveBeenCalled() 294 | expect(file.chunks[0].status()).toBe('uploading') 295 | 296 | jasmine.clock().tick(100) 297 | expect(requests.length).toBe(2) 298 | requests[1].respond(200) 299 | expect(error).not.toHaveBeenCalled() 300 | expect(success).toHaveBeenCalled() 301 | expect(retry).toHaveBeenCalled() 302 | }) 303 | 304 | it('should fail on permanent error', function () { 305 | uploader.opts.testChunks = false 306 | uploader.opts.chunkSize = 1 307 | uploader.opts.simultaneousUploads = 2 308 | uploader.opts.maxChunkRetries = 1 309 | uploader.opts.permanentErrors = [500] 310 | 311 | var error = jasmine.createSpy('error') 312 | var success = jasmine.createSpy('success') 313 | var retry = jasmine.createSpy('retry') 314 | uploader.on('fileError', error) 315 | uploader.on('fileSuccess', success) 316 | uploader.on('fileRetry', retry) 317 | 318 | uploader.addFile(new File(['abc'], 'asdfs')) 319 | var file = uploader.files[0] 320 | expect(file.chunks.length).toBe(3) 321 | uploader.upload() 322 | expect(requests.length).toBe(2) 323 | requests[0].respond(500) 324 | expect(requests.length).toBe(2) 325 | expect(error).toHaveBeenCalled() 326 | expect(retry).not.toHaveBeenCalled() 327 | expect(success).not.toHaveBeenCalled() 328 | }) 329 | 330 | it('should fail on permanent test error', function () { 331 | uploader.opts.testChunks = true 332 | uploader.opts.chunkSize = 1 333 | uploader.opts.simultaneousUploads = 2 334 | uploader.opts.maxChunkRetries = 1 335 | uploader.opts.permanentErrors = [500] 336 | 337 | var error = jasmine.createSpy('error') 338 | var success = jasmine.createSpy('success') 339 | var retry = jasmine.createSpy('retry') 340 | uploader.on('fileError', error) 341 | uploader.on('fileSuccess', success) 342 | uploader.on('fileRetry', retry) 343 | 344 | uploader.addFile(new File(['abc'], 'filedd')) 345 | uploader.upload() 346 | expect(requests.length).toBe(2) 347 | requests[0].respond(500) 348 | expect(requests.length).toBe(2) 349 | expect(error).toHaveBeenCalled() 350 | expect(retry).not.toHaveBeenCalled() 351 | expect(success).not.toHaveBeenCalled() 352 | }) 353 | 354 | it('should upload empty file', function () { 355 | var error = jasmine.createSpy('error') 356 | var success = jasmine.createSpy('success') 357 | uploader.on('fileError', error) 358 | uploader.on('fileSuccess', success) 359 | 360 | uploader.addFile(new File([], 'ls')) 361 | 362 | // https://github.com/uploaderjs/uploader.js/issues/55 363 | if (window.navigator.msPointerEnabled) { 364 | expect(uploader.files.length, 0) 365 | } else { 366 | expect(uploader.files.length, 1) 367 | var file = uploader.files[0] 368 | uploader.upload() 369 | expect(requests.length).toBe(1) 370 | expect(file.progress()).toBe(0) 371 | requests[0].respond(200) 372 | expect(requests.length).toBe(1) 373 | expect(error).not.toHaveBeenCalled() 374 | expect(success).toHaveBeenCalled() 375 | expect(file.progress()).toBe(1) 376 | expect(file.isUploading()).toBe(false) 377 | expect(file.isComplete()).toBe(true) 378 | } 379 | }) 380 | 381 | it('should not upload folder', function () { 382 | // http://stackoveruploader.com/questions/8856628/detecting-folders-directories-in-javascript-filelist-objects 383 | uploader.addFile({ 384 | name: '.', 385 | size: 0 386 | }) 387 | expect(uploader.files.length).toBe(0) 388 | uploader.addFile({ 389 | name: '.', 390 | size: 4096 391 | }) 392 | expect(uploader.files.length).toBe(0) 393 | uploader.addFile({ 394 | name: '.', 395 | size: 4096 * 2 396 | }) 397 | expect(uploader.files.length).toBe(0) 398 | }) 399 | 400 | it('should preprocess chunks', function () { 401 | var preprocess = jasmine.createSpy('preprocess') 402 | var error = jasmine.createSpy('error') 403 | var success = jasmine.createSpy('success') 404 | uploader.on('fileError', error) 405 | uploader.on('fileSuccess', success) 406 | uploader.opts.preprocess = preprocess 407 | uploader.addFile(new File(['abc'], 'abc')) 408 | var file = uploader.files[0] 409 | uploader.upload() 410 | expect(requests.length).toBe(0) 411 | expect(preprocess).toHaveBeenCalledWith(file.chunks[0]) 412 | expect(file.chunks[0].preprocessState).toBe(1) 413 | file.chunks[0].preprocessFinished() 414 | expect(requests.length).toBe(1) 415 | requests[0].respond(200, [], 'response') 416 | expect(success).toHaveBeenCalledWith(file.getRoot(), file, 'response', file.chunks[0]) 417 | expect(error).not.toHaveBeenCalled() 418 | }) 419 | 420 | it('should preprocess chunks and wait for preprocess to finish', function () { 421 | uploader.opts.simultaneousUploads = 1 422 | var preprocess = jasmine.createSpy('preprocess') 423 | uploader.opts.preprocess = preprocess 424 | uploader.addFile(new File(['abc'], 'abc')) 425 | uploader.addFile(new File(['abca'], 'abca')) 426 | var file = uploader.files[0] 427 | var secondFile = uploader.files[1] 428 | uploader.upload() 429 | expect(requests.length).toBe(0) 430 | expect(preprocess).toHaveBeenCalledWith(file.chunks[0]) 431 | expect(preprocess).not.toHaveBeenCalledWith(secondFile.chunks[0]) 432 | 433 | uploader.upload() 434 | expect(preprocess).not.toHaveBeenCalledWith(secondFile.chunks[0]) 435 | }) 436 | 437 | it('should resume preprocess chunks after pause', function () { 438 | uploader.opts.chunkSize = 1 439 | uploader.opts.simultaneousUploads = 1 440 | uploader.opts.testChunks = false 441 | var preprocess = jasmine.createSpy('preprocess') 442 | var error = jasmine.createSpy('error') 443 | var success = jasmine.createSpy('success') 444 | uploader.on('fileError', error) 445 | uploader.on('fileSuccess', success) 446 | uploader.opts.preprocess = preprocess 447 | uploader.addFile(new File(['abc'], 'abcfile')) 448 | var file = uploader.files[0] 449 | uploader.upload() 450 | for (var i = 0; i < file.chunks.length; i++) { 451 | expect(preprocess).toHaveBeenCalledWith(file.chunks[i]) 452 | file.chunks[i].preprocessFinished() 453 | file.pause() 454 | file.resume() 455 | requests[requests.length-1].respond(200, [], 'response') 456 | } 457 | expect(success).toHaveBeenCalledWith(file.getRoot(), file, 'response', file.chunks[file.chunks.length-1]) 458 | expect(error).not.toHaveBeenCalled() 459 | }) 460 | 461 | it('should set chunk as a third event parameter', function () { 462 | var success = jasmine.createSpy('success') 463 | uploader.on('fileSuccess', success) 464 | uploader.addFile(new File(['abc'], 'abccc')) 465 | var file = uploader.files[0] 466 | uploader.upload() 467 | requests[0].respond(200, [], 'response') 468 | expect(success).toHaveBeenCalledWith(file.getRoot(), file, 'response', file.chunks[0]) 469 | }) 470 | 471 | it('should have upload speed', function () { 472 | var clock = sinon.useFakeTimers() 473 | uploader.opts.testChunks = false 474 | uploader.opts.speedSmoothingFactor = 0.5 475 | uploader.opts.simultaneousUploads = 1 476 | var fileProgress = jasmine.createSpy('fileProgress') 477 | uploader.on('fileProgress', fileProgress) 478 | uploader.addFile(new File(['0123456789'], 'adsfsdfs')) 479 | uploader.addFile(new File(['12345'], 'asdfdf')) 480 | var fileFirst = uploader.files[0] 481 | var fileSecond = uploader.files[1] 482 | expect(fileFirst.currentSpeed).toBe(0) 483 | expect(fileFirst.averageSpeed).toBe(0) 484 | expect(fileFirst.sizeUploaded()).toBe(0) 485 | expect(fileFirst.timeRemaining()).toBe(Number.POSITIVE_INFINITY) 486 | expect(uploader.sizeUploaded()).toBe(0) 487 | expect(uploader.timeRemaining()).toBe(Number.POSITIVE_INFINITY) 488 | uploader.upload() 489 | 490 | clock.tick(1000) 491 | requests[0].progress(50, 100, true) 492 | expect(fileProgress).toHaveBeenCalled() 493 | expect(fileFirst.currentSpeed).toBe(5) 494 | expect(fileFirst.averageSpeed).toBe(2.5) 495 | expect(fileFirst.sizeUploaded()).toBe(5) 496 | expect(fileFirst.timeRemaining()).toBe(2) 497 | 498 | expect(uploader.sizeUploaded()).toBe(5) 499 | expect(uploader.timeRemaining()).toBe(4) 500 | 501 | clock.tick(1000) 502 | requests[0].progress(10, 10, true) 503 | expect(fileFirst.currentSpeed).toBe(5) 504 | expect(fileFirst.averageSpeed).toBe(3.75) 505 | 506 | requests[0].respond(200, [], 'response') 507 | expect(fileFirst.currentSpeed).toBe(0) 508 | expect(fileFirst.averageSpeed).toBe(0) 509 | 510 | requests[1].respond(200, [], 'response') 511 | expect(fileFirst.sizeUploaded()).toBe(10) 512 | expect(fileFirst.timeRemaining()).toBe(0) 513 | expect(fileSecond.sizeUploaded()).toBe(5) 514 | expect(fileSecond.timeRemaining()).toBe(0) 515 | expect(uploader.sizeUploaded()).toBe(15) 516 | expect(uploader.timeRemaining()).toBe(0) 517 | 518 | // paused and resumed 519 | uploader.addFile(new File(['012345678901234'], 'sdfasdf')) 520 | var fileThird = uploader.files[2] 521 | expect(fileThird.timeRemaining()).toBe(Number.POSITIVE_INFINITY) 522 | uploader.upload() 523 | clock.tick(1000) 524 | requests[2].progress(10, 15, true) 525 | expect(fileThird.timeRemaining()).toBe(1) 526 | expect(uploader.timeRemaining()).toBe(1) 527 | fileThird.pause() 528 | expect(fileThird.timeRemaining()).toBe(0) 529 | expect(uploader.timeRemaining()).toBe(0) 530 | fileThird.resume() 531 | expect(fileThird.timeRemaining()).toBe(Number.POSITIVE_INFINITY) 532 | expect(uploader.timeRemaining()).toBe(Number.POSITIVE_INFINITY) 533 | clock.tick(1000) 534 | requests[3].progress(11, 15, true) 535 | expect(fileThird.timeRemaining()).toBe(8) 536 | expect(uploader.timeRemaining()).toBe(8) 537 | clock.tick(1000) 538 | requests[3].progress(12, 15, true) 539 | expect(fileThird.timeRemaining()).toBe(4) 540 | expect(uploader.timeRemaining()).toBe(4) 541 | 542 | requests[3].respond(500) 543 | expect(fileThird.currentSpeed).toBe(0) 544 | expect(fileThird.averageSpeed).toBe(0) 545 | expect(fileThird.timeRemaining()).toBe(0) 546 | expect(uploader.timeRemaining()).toBe(0) 547 | }) 548 | 549 | it('should allow to hook initFileFn and readFileFn', function () { 550 | var error = jasmine.createSpy('error') 551 | var success = jasmine.createSpy('success') 552 | uploader.on('fileError', error) 553 | uploader.on('fileSuccess', success) 554 | 555 | uploader.opts.chunkSize = 1 556 | 557 | uploader.opts.simultaneousUploads = 10 558 | 559 | uploader.opts.initFileFn = function(uploaderObj) { 560 | // emulate a compressor that starting from a payload of 10 characters 561 | // will output 6 characters. 562 | var fakeFile = { 563 | size: 6 564 | } 565 | 566 | uploaderObj.file = fakeFile 567 | uploaderObj.size = uploaderObj.file.size 568 | } 569 | 570 | uploader.opts.readFileFn = function(fileObj, startByte, endByte, fileType, chunk) { 571 | chunk.readFinished('X') 572 | } 573 | 574 | uploader.addFile(new File(['0123456789'], 'ldlldl')) 575 | 576 | uploader.upload() 577 | 578 | expect(requests.length).toBe(6) 579 | 580 | for (var i = 0; i < requests.length; i++) { 581 | requests[i].respond(200) 582 | } 583 | 584 | var file = uploader.files[0] 585 | expect(file.progress()).toBe(1) 586 | expect(file.isUploading()).toBe(false) 587 | expect(file.isComplete()).toBe(true) 588 | 589 | expect(requests.length).toBe(6) 590 | }) 591 | 592 | it('should skip upload chunks by response - checkChunkUploadedByResponse', function () { 593 | uploader.opts.testChunks = true 594 | uploader.opts.chunkSize = 1 595 | uploader.opts.simultaneousUploads = 3 596 | uploader.opts.checkChunkUploadedByResponse = function(chunk, message) { 597 | var objMessage = {} 598 | try { 599 | objMessage = JSON.parse(message) 600 | } catch (e) {} 601 | return objMessage.uploaded_chunks.indexOf(chunk.offset + 1) >= 0 602 | } 603 | 604 | uploader.addFile(new File(['0123456789'], 'ldlldl')) 605 | 606 | uploader.upload() 607 | 608 | expect(requests.length).toBe(1) 609 | expect(requests[0].method).toBe('GET') 610 | requests[0].respond(200, [], '{"uploaded_chunks": [2, 3, 4, 5, 9]}') 611 | 612 | expect(requests.length).toBe(1 + 3) 613 | expect(requests[1].method).toBe('POST') 614 | expect(requests[3].method).toBe('POST') 615 | for (var i = 1; i < requests.length; i++) { 616 | requests[i].respond(200) 617 | } 618 | expect(requests.length).toBe(6) 619 | requests[4].respond(200) 620 | requests[5].respond(200) 621 | var file = uploader.files[0] 622 | expect(file.progress()).toBe(1) 623 | expect(file.isUploading()).toBe(false) 624 | expect(file.isComplete()).toBe(true) 625 | }) 626 | 627 | it('should resume one file when initialPaused is true', function () { 628 | uploader.opts.initialPaused = true 629 | uploader.addFile(new File(['IIIIIIIIII'], 'file2')) 630 | uploader.addFile(new File(['IIIIIIIIII'], 'file3')) 631 | 632 | uploader.upload() 633 | 634 | expect(uploader.isUploading()).toBe(false) 635 | expect(uploader.files[0].paused).toBe(true) 636 | expect(uploader.files[1].paused).toBe(true) 637 | uploader.files[0].resume() 638 | expect(uploader.isUploading()).toBe(true) 639 | expect(uploader.files[0].paused).toBe(false) 640 | expect(uploader.files[1].paused).toBe(true) 641 | }) 642 | }) 643 | --------------------------------------------------------------------------------