├── .browserslistrc
├── public
├── favicon.ico
├── index.html
├── hash.js
└── spark-md5.min.js
├── src
├── assets
│ └── logo.png
├── main.js
├── components
│ └── HelloWorld.vue
└── App.vue
├── babel.config.js
├── .gitignore
├── package.json
├── server
├── index.js
└── controller.js
└── README.md
/.browserslistrc:
--------------------------------------------------------------------------------
1 | > 1%
2 | last 2 versions
3 | not dead
4 |
--------------------------------------------------------------------------------
/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/miracle90/big-file-upload/HEAD/public/favicon.ico
--------------------------------------------------------------------------------
/src/assets/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/miracle90/big-file-upload/HEAD/src/assets/logo.png
--------------------------------------------------------------------------------
/babel.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | presets: [
3 | '@vue/cli-plugin-babel/preset'
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/src/main.js:
--------------------------------------------------------------------------------
1 | import Vue from "vue";
2 | import App from "./App.vue";
3 | import ElementUI from "element-ui";
4 | import "element-ui/lib/theme-chalk/index.css";
5 |
6 | Vue.use(ElementUI)
7 |
8 | Vue.config.productionTip = false;
9 |
10 | new Vue({
11 | render: (h) => h(App),
12 | }).$mount("#app");
13 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | node_modules
3 | /dist
4 |
5 |
6 | # local env files
7 | .env.local
8 | .env.*.local
9 |
10 | # Log files
11 | npm-debug.log*
12 | yarn-debug.log*
13 | yarn-error.log*
14 | pnpm-debug.log*
15 |
16 | # Editor directories and files
17 | .idea
18 | .vscode
19 | *.suo
20 | *.ntvs*
21 | *.njsproj
22 | *.sln
23 | *.sw?
24 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "big-file-upload",
3 | "version": "0.1.0",
4 | "private": true,
5 | "scripts": {
6 | "serve": "vue-cli-service serve",
7 | "build": "vue-cli-service build"
8 | },
9 | "dependencies": {
10 | "core-js": "^3.6.5",
11 | "element-ui": "^2.15.6",
12 | "multiparty": "^4.2.3",
13 | "vue": "^2.6.11"
14 | },
15 | "devDependencies": {
16 | "@vue/cli-plugin-babel": "~4.5.13",
17 | "@vue/cli-service": "~4.5.13",
18 | "vue-template-compiler": "^2.6.11"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | <%= htmlWebpackPlugin.options.title %>
9 |
10 |
11 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/server/index.js:
--------------------------------------------------------------------------------
1 | const Controller = require("./controller");
2 | const http = require("http");
3 | const server = http.createServer();
4 |
5 | const controller = new Controller();
6 |
7 | server.on("request", async (req, res) => {
8 | res.setHeader("Access-Control-Allow-Origin", "*");
9 | res.setHeader("Access-Control-Allow-Headers", "*");
10 | if (req.method === "OPTIONS") {
11 | res.status = 200;
12 | res.end();
13 | return;
14 | }
15 | if (req.url === "/verify") {
16 | await controller.handleVerifyUpload(req, res);
17 | return;
18 | }
19 |
20 | if (req.url === "/merge") {
21 | await controller.handleMerge(req, res);
22 | return;
23 | }
24 |
25 | if (req.url === "/") {
26 | await controller.handleFormData(req, res);
27 | }
28 | });
29 |
30 | server.listen(3000, () => console.log("正在监听 3000 端口"));
--------------------------------------------------------------------------------
/public/hash.js:
--------------------------------------------------------------------------------
1 | self.importScripts("/spark-md5.min.js"); // 导入脚本
2 |
3 | // 生成文件 hash
4 | self.onmessage = (e) => {
5 | const { fileChunkList } = e.data;
6 | const spark = new self.SparkMD5.ArrayBuffer();
7 | let percentage = 0;
8 | let count = 0;
9 | const loadNext = (index) => {
10 | const reader = new FileReader();
11 | reader.readAsArrayBuffer(fileChunkList[index].file);
12 | reader.onload = (e) => {
13 | count++;
14 | spark.append(e.target.result);
15 | if (count === fileChunkList.length) {
16 | self.postMessage({
17 | percentage: 100,
18 | hash: spark.end(),
19 | });
20 | self.close();
21 | } else {
22 | percentage += 100 / fileChunkList.length;
23 | self.postMessage({
24 | percentage,
25 | });
26 | loadNext(count);
27 | }
28 | };
29 | };
30 | loadNext(0);
31 | };
32 |
--------------------------------------------------------------------------------
/src/components/HelloWorld.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
{{ msg }}
4 |
5 | For a guide and recipes on how to configure / customize this project,
6 | check out the
7 | vue-cli documentation.
8 |
9 |
Installed CLI Plugins
10 |
13 |
Essential Links
14 |
21 |
Ecosystem
22 |
29 |
30 |
31 |
32 |
40 |
41 |
42 |
58 |
--------------------------------------------------------------------------------
/server/controller.js:
--------------------------------------------------------------------------------
1 | const multiparty = require("multiparty");
2 | const path = require("path");
3 | const fse = require("fs-extra");
4 |
5 | const extractExt = (filename) =>
6 | filename.slice(filename.lastIndexOf("."), filename.length); // 提取后缀名
7 | const UPLOAD_DIR = path.resolve(__dirname, "..", "target"); // 大文件存储目录
8 |
9 | const pipeStream = (path, writeStream) =>
10 | new Promise((resolve) => {
11 | const readStream = fse.createReadStream(path);
12 | readStream.on("end", () => {
13 | fse.unlinkSync(path);
14 | resolve();
15 | });
16 | readStream.pipe(writeStream);
17 | });
18 |
19 | // 合并切片
20 | const mergeFileChunk = async (filePath, fileHash, size) => {
21 | const chunkDir = path.resolve(UPLOAD_DIR, fileHash);
22 | const chunkPaths = await fse.readdir(chunkDir);
23 | // 根据切片下标进行排序
24 | // 否则直接读取目录的获得的顺序可能会错乱
25 | chunkPaths.sort((a, b) => a.split("-")[1] - b.split("-")[1]);
26 | await Promise.all(
27 | chunkPaths.map((chunkPath, index) =>
28 | pipeStream(
29 | path.resolve(chunkDir, chunkPath),
30 | // 指定位置创建可写流
31 | fse.createWriteStream(filePath, {
32 | start: index * size,
33 | end: (index + 1) * size,
34 | })
35 | )
36 | )
37 | );
38 | fse.rmdirSync(chunkDir); // 合并后删除保存切片的目录
39 | };
40 |
41 | const resolvePost = (req) =>
42 | new Promise((resolve) => {
43 | let chunk = "";
44 | req.on("data", (data) => {
45 | chunk += data;
46 | });
47 | req.on("end", () => {
48 | resolve(JSON.parse(chunk));
49 | });
50 | });
51 |
52 | // 返回已经上传切片名
53 | const createUploadedList = async (fileHash) =>
54 | fse.existsSync(path.resolve(UPLOAD_DIR, fileHash))
55 | ? await fse.readdir(path.resolve(UPLOAD_DIR, fileHash))
56 | : [];
57 |
58 | module.exports = class {
59 | // 合并切片
60 | async handleMerge(req, res) {
61 | const data = await resolvePost(req);
62 | const { fileHash, filename, size } = data;
63 | const ext = extractExt(filename);
64 | const filePath = path.resolve(UPLOAD_DIR, `${fileHash}${ext}`);
65 | await mergeFileChunk(filePath, fileHash, size);
66 | res.end(
67 | JSON.stringify({
68 | code: 0,
69 | message: "file merged success",
70 | })
71 | );
72 | }
73 | // 处理切片
74 | async handleFormData(req, res) {
75 | const multipart = new multiparty.Form();
76 |
77 | multipart.parse(req, async (err, fields, files) => {
78 | if (err) {
79 | console.error(err);
80 | res.status = 500;
81 | res.end("process file chunk failed");
82 | return;
83 | }
84 | const [chunk] = files.chunk;
85 | const [hash] = fields.hash;
86 | const [fileHash] = fields.fileHash;
87 | const [filename] = fields.filename;
88 | const filePath = path.resolve(
89 | UPLOAD_DIR,
90 | `${fileHash}${extractExt(filename)}`
91 | );
92 | const chunkDir = path.resolve(UPLOAD_DIR, fileHash);
93 |
94 | // 文件存在直接返回
95 | if (fse.existsSync(filePath)) {
96 | res.end("file exist");
97 | return;
98 | }
99 |
100 | // 切片目录不存在,创建切片目录
101 | if (!fse.existsSync(chunkDir)) {
102 | await fse.mkdirs(chunkDir);
103 | }
104 | // fs-extra 专用方法,类似 fs.rename 并且跨平台
105 | // fs-extra 的 rename 方法 windows 平台会有权限问题
106 | // https://github.com/meteor/meteor/issues/7852#issuecomment-255767835
107 | await fse.move(chunk.path, path.resolve(chunkDir, hash));
108 | res.end("received file chunk");
109 | });
110 | }
111 | // 验证是否已上传/已上传切片下标
112 | async handleVerifyUpload(req, res) {
113 | const data = await resolvePost(req);
114 | const { fileHash, filename } = data;
115 | const ext = extractExt(filename);
116 | const filePath = path.resolve(UPLOAD_DIR, `${fileHash}${ext}`);
117 | if (fse.existsSync(filePath)) {
118 | res.end(
119 | JSON.stringify({
120 | shouldUpload: false,
121 | })
122 | );
123 | } else {
124 | res.end(
125 | JSON.stringify({
126 | shouldUpload: true,
127 | uploadedList: await createUploadedList(fileHash),
128 | })
129 | );
130 | }
131 | }
132 | };
133 |
--------------------------------------------------------------------------------
/src/App.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
9 | 上传
12 | 恢复
15 | 暂停
21 |
22 |
23 |
计算文件 hash
24 |
25 |
总进度
26 |
27 |
28 |
29 |
34 |
35 |
36 | {{ row.size | transformByte }}
37 |
38 |
39 |
40 |
41 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
277 |
--------------------------------------------------------------------------------
/public/spark-md5.min.js:
--------------------------------------------------------------------------------
1 | (function (factory) {
2 | if (typeof exports === "object") {
3 | module.exports = factory();
4 | } else if (typeof define === "function" && define.amd) {
5 | define(factory);
6 | } else {
7 | var glob;
8 | try {
9 | glob = window;
10 | } catch (e) {
11 | glob = self;
12 | }
13 | glob.SparkMD5 = factory();
14 | }
15 | })(function (undefined) {
16 | "use strict";
17 | var add32 = function (a, b) {
18 | return (a + b) & 4294967295;
19 | },
20 | hex_chr = [
21 | "0",
22 | "1",
23 | "2",
24 | "3",
25 | "4",
26 | "5",
27 | "6",
28 | "7",
29 | "8",
30 | "9",
31 | "a",
32 | "b",
33 | "c",
34 | "d",
35 | "e",
36 | "f",
37 | ];
38 | function cmn(q, a, b, x, s, t) {
39 | a = add32(add32(a, q), add32(x, t));
40 | return add32((a << s) | (a >>> (32 - s)), b);
41 | }
42 | function md5cycle(x, k) {
43 | var a = x[0],
44 | b = x[1],
45 | c = x[2],
46 | d = x[3];
47 | a += (((b & c) | (~b & d)) + k[0] - 680876936) | 0;
48 | a = (((a << 7) | (a >>> 25)) + b) | 0;
49 | d += (((a & b) | (~a & c)) + k[1] - 389564586) | 0;
50 | d = (((d << 12) | (d >>> 20)) + a) | 0;
51 | c += (((d & a) | (~d & b)) + k[2] + 606105819) | 0;
52 | c = (((c << 17) | (c >>> 15)) + d) | 0;
53 | b += (((c & d) | (~c & a)) + k[3] - 1044525330) | 0;
54 | b = (((b << 22) | (b >>> 10)) + c) | 0;
55 | a += (((b & c) | (~b & d)) + k[4] - 176418897) | 0;
56 | a = (((a << 7) | (a >>> 25)) + b) | 0;
57 | d += (((a & b) | (~a & c)) + k[5] + 1200080426) | 0;
58 | d = (((d << 12) | (d >>> 20)) + a) | 0;
59 | c += (((d & a) | (~d & b)) + k[6] - 1473231341) | 0;
60 | c = (((c << 17) | (c >>> 15)) + d) | 0;
61 | b += (((c & d) | (~c & a)) + k[7] - 45705983) | 0;
62 | b = (((b << 22) | (b >>> 10)) + c) | 0;
63 | a += (((b & c) | (~b & d)) + k[8] + 1770035416) | 0;
64 | a = (((a << 7) | (a >>> 25)) + b) | 0;
65 | d += (((a & b) | (~a & c)) + k[9] - 1958414417) | 0;
66 | d = (((d << 12) | (d >>> 20)) + a) | 0;
67 | c += (((d & a) | (~d & b)) + k[10] - 42063) | 0;
68 | c = (((c << 17) | (c >>> 15)) + d) | 0;
69 | b += (((c & d) | (~c & a)) + k[11] - 1990404162) | 0;
70 | b = (((b << 22) | (b >>> 10)) + c) | 0;
71 | a += (((b & c) | (~b & d)) + k[12] + 1804603682) | 0;
72 | a = (((a << 7) | (a >>> 25)) + b) | 0;
73 | d += (((a & b) | (~a & c)) + k[13] - 40341101) | 0;
74 | d = (((d << 12) | (d >>> 20)) + a) | 0;
75 | c += (((d & a) | (~d & b)) + k[14] - 1502002290) | 0;
76 | c = (((c << 17) | (c >>> 15)) + d) | 0;
77 | b += (((c & d) | (~c & a)) + k[15] + 1236535329) | 0;
78 | b = (((b << 22) | (b >>> 10)) + c) | 0;
79 | a += (((b & d) | (c & ~d)) + k[1] - 165796510) | 0;
80 | a = (((a << 5) | (a >>> 27)) + b) | 0;
81 | d += (((a & c) | (b & ~c)) + k[6] - 1069501632) | 0;
82 | d = (((d << 9) | (d >>> 23)) + a) | 0;
83 | c += (((d & b) | (a & ~b)) + k[11] + 643717713) | 0;
84 | c = (((c << 14) | (c >>> 18)) + d) | 0;
85 | b += (((c & a) | (d & ~a)) + k[0] - 373897302) | 0;
86 | b = (((b << 20) | (b >>> 12)) + c) | 0;
87 | a += (((b & d) | (c & ~d)) + k[5] - 701558691) | 0;
88 | a = (((a << 5) | (a >>> 27)) + b) | 0;
89 | d += (((a & c) | (b & ~c)) + k[10] + 38016083) | 0;
90 | d = (((d << 9) | (d >>> 23)) + a) | 0;
91 | c += (((d & b) | (a & ~b)) + k[15] - 660478335) | 0;
92 | c = (((c << 14) | (c >>> 18)) + d) | 0;
93 | b += (((c & a) | (d & ~a)) + k[4] - 405537848) | 0;
94 | b = (((b << 20) | (b >>> 12)) + c) | 0;
95 | a += (((b & d) | (c & ~d)) + k[9] + 568446438) | 0;
96 | a = (((a << 5) | (a >>> 27)) + b) | 0;
97 | d += (((a & c) | (b & ~c)) + k[14] - 1019803690) | 0;
98 | d = (((d << 9) | (d >>> 23)) + a) | 0;
99 | c += (((d & b) | (a & ~b)) + k[3] - 187363961) | 0;
100 | c = (((c << 14) | (c >>> 18)) + d) | 0;
101 | b += (((c & a) | (d & ~a)) + k[8] + 1163531501) | 0;
102 | b = (((b << 20) | (b >>> 12)) + c) | 0;
103 | a += (((b & d) | (c & ~d)) + k[13] - 1444681467) | 0;
104 | a = (((a << 5) | (a >>> 27)) + b) | 0;
105 | d += (((a & c) | (b & ~c)) + k[2] - 51403784) | 0;
106 | d = (((d << 9) | (d >>> 23)) + a) | 0;
107 | c += (((d & b) | (a & ~b)) + k[7] + 1735328473) | 0;
108 | c = (((c << 14) | (c >>> 18)) + d) | 0;
109 | b += (((c & a) | (d & ~a)) + k[12] - 1926607734) | 0;
110 | b = (((b << 20) | (b >>> 12)) + c) | 0;
111 | a += ((b ^ c ^ d) + k[5] - 378558) | 0;
112 | a = (((a << 4) | (a >>> 28)) + b) | 0;
113 | d += ((a ^ b ^ c) + k[8] - 2022574463) | 0;
114 | d = (((d << 11) | (d >>> 21)) + a) | 0;
115 | c += ((d ^ a ^ b) + k[11] + 1839030562) | 0;
116 | c = (((c << 16) | (c >>> 16)) + d) | 0;
117 | b += ((c ^ d ^ a) + k[14] - 35309556) | 0;
118 | b = (((b << 23) | (b >>> 9)) + c) | 0;
119 | a += ((b ^ c ^ d) + k[1] - 1530992060) | 0;
120 | a = (((a << 4) | (a >>> 28)) + b) | 0;
121 | d += ((a ^ b ^ c) + k[4] + 1272893353) | 0;
122 | d = (((d << 11) | (d >>> 21)) + a) | 0;
123 | c += ((d ^ a ^ b) + k[7] - 155497632) | 0;
124 | c = (((c << 16) | (c >>> 16)) + d) | 0;
125 | b += ((c ^ d ^ a) + k[10] - 1094730640) | 0;
126 | b = (((b << 23) | (b >>> 9)) + c) | 0;
127 | a += ((b ^ c ^ d) + k[13] + 681279174) | 0;
128 | a = (((a << 4) | (a >>> 28)) + b) | 0;
129 | d += ((a ^ b ^ c) + k[0] - 358537222) | 0;
130 | d = (((d << 11) | (d >>> 21)) + a) | 0;
131 | c += ((d ^ a ^ b) + k[3] - 722521979) | 0;
132 | c = (((c << 16) | (c >>> 16)) + d) | 0;
133 | b += ((c ^ d ^ a) + k[6] + 76029189) | 0;
134 | b = (((b << 23) | (b >>> 9)) + c) | 0;
135 | a += ((b ^ c ^ d) + k[9] - 640364487) | 0;
136 | a = (((a << 4) | (a >>> 28)) + b) | 0;
137 | d += ((a ^ b ^ c) + k[12] - 421815835) | 0;
138 | d = (((d << 11) | (d >>> 21)) + a) | 0;
139 | c += ((d ^ a ^ b) + k[15] + 530742520) | 0;
140 | c = (((c << 16) | (c >>> 16)) + d) | 0;
141 | b += ((c ^ d ^ a) + k[2] - 995338651) | 0;
142 | b = (((b << 23) | (b >>> 9)) + c) | 0;
143 | a += ((c ^ (b | ~d)) + k[0] - 198630844) | 0;
144 | a = (((a << 6) | (a >>> 26)) + b) | 0;
145 | d += ((b ^ (a | ~c)) + k[7] + 1126891415) | 0;
146 | d = (((d << 10) | (d >>> 22)) + a) | 0;
147 | c += ((a ^ (d | ~b)) + k[14] - 1416354905) | 0;
148 | c = (((c << 15) | (c >>> 17)) + d) | 0;
149 | b += ((d ^ (c | ~a)) + k[5] - 57434055) | 0;
150 | b = (((b << 21) | (b >>> 11)) + c) | 0;
151 | a += ((c ^ (b | ~d)) + k[12] + 1700485571) | 0;
152 | a = (((a << 6) | (a >>> 26)) + b) | 0;
153 | d += ((b ^ (a | ~c)) + k[3] - 1894986606) | 0;
154 | d = (((d << 10) | (d >>> 22)) + a) | 0;
155 | c += ((a ^ (d | ~b)) + k[10] - 1051523) | 0;
156 | c = (((c << 15) | (c >>> 17)) + d) | 0;
157 | b += ((d ^ (c | ~a)) + k[1] - 2054922799) | 0;
158 | b = (((b << 21) | (b >>> 11)) + c) | 0;
159 | a += ((c ^ (b | ~d)) + k[8] + 1873313359) | 0;
160 | a = (((a << 6) | (a >>> 26)) + b) | 0;
161 | d += ((b ^ (a | ~c)) + k[15] - 30611744) | 0;
162 | d = (((d << 10) | (d >>> 22)) + a) | 0;
163 | c += ((a ^ (d | ~b)) + k[6] - 1560198380) | 0;
164 | c = (((c << 15) | (c >>> 17)) + d) | 0;
165 | b += ((d ^ (c | ~a)) + k[13] + 1309151649) | 0;
166 | b = (((b << 21) | (b >>> 11)) + c) | 0;
167 | a += ((c ^ (b | ~d)) + k[4] - 145523070) | 0;
168 | a = (((a << 6) | (a >>> 26)) + b) | 0;
169 | d += ((b ^ (a | ~c)) + k[11] - 1120210379) | 0;
170 | d = (((d << 10) | (d >>> 22)) + a) | 0;
171 | c += ((a ^ (d | ~b)) + k[2] + 718787259) | 0;
172 | c = (((c << 15) | (c >>> 17)) + d) | 0;
173 | b += ((d ^ (c | ~a)) + k[9] - 343485551) | 0;
174 | b = (((b << 21) | (b >>> 11)) + c) | 0;
175 | x[0] = (a + x[0]) | 0;
176 | x[1] = (b + x[1]) | 0;
177 | x[2] = (c + x[2]) | 0;
178 | x[3] = (d + x[3]) | 0;
179 | }
180 | function md5blk(s) {
181 | var md5blks = [],
182 | i;
183 | for (i = 0; i < 64; i += 4) {
184 | md5blks[i >> 2] =
185 | s.charCodeAt(i) +
186 | (s.charCodeAt(i + 1) << 8) +
187 | (s.charCodeAt(i + 2) << 16) +
188 | (s.charCodeAt(i + 3) << 24);
189 | }
190 | return md5blks;
191 | }
192 | function md5blk_array(a) {
193 | var md5blks = [],
194 | i;
195 | for (i = 0; i < 64; i += 4) {
196 | md5blks[i >> 2] =
197 | a[i] + (a[i + 1] << 8) + (a[i + 2] << 16) + (a[i + 3] << 24);
198 | }
199 | return md5blks;
200 | }
201 | function md51(s) {
202 | var n = s.length,
203 | state = [1732584193, -271733879, -1732584194, 271733878],
204 | i,
205 | length,
206 | tail,
207 | tmp,
208 | lo,
209 | hi;
210 | for (i = 64; i <= n; i += 64) {
211 | md5cycle(state, md5blk(s.substring(i - 64, i)));
212 | }
213 | s = s.substring(i - 64);
214 | length = s.length;
215 | tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
216 | for (i = 0; i < length; i += 1) {
217 | tail[i >> 2] |= s.charCodeAt(i) << (i % 4 << 3);
218 | }
219 | tail[i >> 2] |= 128 << (i % 4 << 3);
220 | if (i > 55) {
221 | md5cycle(state, tail);
222 | for (i = 0; i < 16; i += 1) {
223 | tail[i] = 0;
224 | }
225 | }
226 | tmp = n * 8;
227 | tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
228 | lo = parseInt(tmp[2], 16);
229 | hi = parseInt(tmp[1], 16) || 0;
230 | tail[14] = lo;
231 | tail[15] = hi;
232 | md5cycle(state, tail);
233 | return state;
234 | }
235 | function md51_array(a) {
236 | var n = a.length,
237 | state = [1732584193, -271733879, -1732584194, 271733878],
238 | i,
239 | length,
240 | tail,
241 | tmp,
242 | lo,
243 | hi;
244 | for (i = 64; i <= n; i += 64) {
245 | md5cycle(state, md5blk_array(a.subarray(i - 64, i)));
246 | }
247 | a = i - 64 < n ? a.subarray(i - 64) : new Uint8Array(0);
248 | length = a.length;
249 | tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
250 | for (i = 0; i < length; i += 1) {
251 | tail[i >> 2] |= a[i] << (i % 4 << 3);
252 | }
253 | tail[i >> 2] |= 128 << (i % 4 << 3);
254 | if (i > 55) {
255 | md5cycle(state, tail);
256 | for (i = 0; i < 16; i += 1) {
257 | tail[i] = 0;
258 | }
259 | }
260 | tmp = n * 8;
261 | tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
262 | lo = parseInt(tmp[2], 16);
263 | hi = parseInt(tmp[1], 16) || 0;
264 | tail[14] = lo;
265 | tail[15] = hi;
266 | md5cycle(state, tail);
267 | return state;
268 | }
269 | function rhex(n) {
270 | var s = "",
271 | j;
272 | for (j = 0; j < 4; j += 1) {
273 | s += hex_chr[(n >> (j * 8 + 4)) & 15] + hex_chr[(n >> (j * 8)) & 15];
274 | }
275 | return s;
276 | }
277 | function hex(x) {
278 | var i;
279 | for (i = 0; i < x.length; i += 1) {
280 | x[i] = rhex(x[i]);
281 | }
282 | return x.join("");
283 | }
284 | if (hex(md51("hello")) !== "5d41402abc4b2a76b9719d911017c592") {
285 | add32 = function (x, y) {
286 | var lsw = (x & 65535) + (y & 65535),
287 | msw = (x >> 16) + (y >> 16) + (lsw >> 16);
288 | return (msw << 16) | (lsw & 65535);
289 | };
290 | }
291 | if (typeof ArrayBuffer !== "undefined" && !ArrayBuffer.prototype.slice) {
292 | (function () {
293 | function clamp(val, length) {
294 | val = val | 0 || 0;
295 | if (val < 0) {
296 | return Math.max(val + length, 0);
297 | }
298 | return Math.min(val, length);
299 | }
300 | ArrayBuffer.prototype.slice = function (from, to) {
301 | var length = this.byteLength,
302 | begin = clamp(from, length),
303 | end = length,
304 | num,
305 | target,
306 | targetArray,
307 | sourceArray;
308 | if (to !== undefined) {
309 | end = clamp(to, length);
310 | }
311 | if (begin > end) {
312 | return new ArrayBuffer(0);
313 | }
314 | num = end - begin;
315 | target = new ArrayBuffer(num);
316 | targetArray = new Uint8Array(target);
317 | sourceArray = new Uint8Array(this, begin, num);
318 | targetArray.set(sourceArray);
319 | return target;
320 | };
321 | })();
322 | }
323 | function toUtf8(str) {
324 | if (/[\u0080-\uFFFF]/.test(str)) {
325 | str = unescape(encodeURIComponent(str));
326 | }
327 | return str;
328 | }
329 | function utf8Str2ArrayBuffer(str, returnUInt8Array) {
330 | var length = str.length,
331 | buff = new ArrayBuffer(length),
332 | arr = new Uint8Array(buff),
333 | i;
334 | for (i = 0; i < length; i += 1) {
335 | arr[i] = str.charCodeAt(i);
336 | }
337 | return returnUInt8Array ? arr : buff;
338 | }
339 | function arrayBuffer2Utf8Str(buff) {
340 | return String.fromCharCode.apply(null, new Uint8Array(buff));
341 | }
342 | function concatenateArrayBuffers(first, second, returnUInt8Array) {
343 | var result = new Uint8Array(first.byteLength + second.byteLength);
344 | result.set(new Uint8Array(first));
345 | result.set(new Uint8Array(second), first.byteLength);
346 | return returnUInt8Array ? result : result.buffer;
347 | }
348 | function hexToBinaryString(hex) {
349 | var bytes = [],
350 | length = hex.length,
351 | x;
352 | for (x = 0; x < length - 1; x += 2) {
353 | bytes.push(parseInt(hex.substr(x, 2), 16));
354 | }
355 | return String.fromCharCode.apply(String, bytes);
356 | }
357 | function SparkMD5() {
358 | this.reset();
359 | }
360 | SparkMD5.prototype.append = function (str) {
361 | this.appendBinary(toUtf8(str));
362 | return this;
363 | };
364 | SparkMD5.prototype.appendBinary = function (contents) {
365 | this._buff += contents;
366 | this._length += contents.length;
367 | var length = this._buff.length,
368 | i;
369 | for (i = 64; i <= length; i += 64) {
370 | md5cycle(this._hash, md5blk(this._buff.substring(i - 64, i)));
371 | }
372 | this._buff = this._buff.substring(i - 64);
373 | return this;
374 | };
375 | SparkMD5.prototype.end = function (raw) {
376 | var buff = this._buff,
377 | length = buff.length,
378 | i,
379 | tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
380 | ret;
381 | for (i = 0; i < length; i += 1) {
382 | tail[i >> 2] |= buff.charCodeAt(i) << (i % 4 << 3);
383 | }
384 | this._finish(tail, length);
385 | ret = hex(this._hash);
386 | if (raw) {
387 | ret = hexToBinaryString(ret);
388 | }
389 | this.reset();
390 | return ret;
391 | };
392 | SparkMD5.prototype.reset = function () {
393 | this._buff = "";
394 | this._length = 0;
395 | this._hash = [1732584193, -271733879, -1732584194, 271733878];
396 | return this;
397 | };
398 | SparkMD5.prototype.getState = function () {
399 | return { buff: this._buff, length: this._length, hash: this._hash };
400 | };
401 | SparkMD5.prototype.setState = function (state) {
402 | this._buff = state.buff;
403 | this._length = state.length;
404 | this._hash = state.hash;
405 | return this;
406 | };
407 | SparkMD5.prototype.destroy = function () {
408 | delete this._hash;
409 | delete this._buff;
410 | delete this._length;
411 | };
412 | SparkMD5.prototype._finish = function (tail, length) {
413 | var i = length,
414 | tmp,
415 | lo,
416 | hi;
417 | tail[i >> 2] |= 128 << (i % 4 << 3);
418 | if (i > 55) {
419 | md5cycle(this._hash, tail);
420 | for (i = 0; i < 16; i += 1) {
421 | tail[i] = 0;
422 | }
423 | }
424 | tmp = this._length * 8;
425 | tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
426 | lo = parseInt(tmp[2], 16);
427 | hi = parseInt(tmp[1], 16) || 0;
428 | tail[14] = lo;
429 | tail[15] = hi;
430 | md5cycle(this._hash, tail);
431 | };
432 | SparkMD5.hash = function (str, raw) {
433 | return SparkMD5.hashBinary(toUtf8(str), raw);
434 | };
435 | SparkMD5.hashBinary = function (content, raw) {
436 | var hash = md51(content),
437 | ret = hex(hash);
438 | return raw ? hexToBinaryString(ret) : ret;
439 | };
440 | SparkMD5.ArrayBuffer = function () {
441 | this.reset();
442 | };
443 | SparkMD5.ArrayBuffer.prototype.append = function (arr) {
444 | var buff = concatenateArrayBuffers(this._buff.buffer, arr, true),
445 | length = buff.length,
446 | i;
447 | this._length += arr.byteLength;
448 | for (i = 64; i <= length; i += 64) {
449 | md5cycle(this._hash, md5blk_array(buff.subarray(i - 64, i)));
450 | }
451 | this._buff =
452 | i - 64 < length
453 | ? new Uint8Array(buff.buffer.slice(i - 64))
454 | : new Uint8Array(0);
455 | return this;
456 | };
457 | SparkMD5.ArrayBuffer.prototype.end = function (raw) {
458 | var buff = this._buff,
459 | length = buff.length,
460 | tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
461 | i,
462 | ret;
463 | for (i = 0; i < length; i += 1) {
464 | tail[i >> 2] |= buff[i] << (i % 4 << 3);
465 | }
466 | this._finish(tail, length);
467 | ret = hex(this._hash);
468 | if (raw) {
469 | ret = hexToBinaryString(ret);
470 | }
471 | this.reset();
472 | return ret;
473 | };
474 | SparkMD5.ArrayBuffer.prototype.reset = function () {
475 | this._buff = new Uint8Array(0);
476 | this._length = 0;
477 | this._hash = [1732584193, -271733879, -1732584194, 271733878];
478 | return this;
479 | };
480 | SparkMD5.ArrayBuffer.prototype.getState = function () {
481 | var state = SparkMD5.prototype.getState.call(this);
482 | state.buff = arrayBuffer2Utf8Str(state.buff);
483 | return state;
484 | };
485 | SparkMD5.ArrayBuffer.prototype.setState = function (state) {
486 | state.buff = utf8Str2ArrayBuffer(state.buff, true);
487 | return SparkMD5.prototype.setState.call(this, state);
488 | };
489 | SparkMD5.ArrayBuffer.prototype.destroy = SparkMD5.prototype.destroy;
490 | SparkMD5.ArrayBuffer.prototype._finish = SparkMD5.prototype._finish;
491 | SparkMD5.ArrayBuffer.hash = function (arr, raw) {
492 | var hash = md51_array(new Uint8Array(arr)),
493 | ret = hex(hash);
494 | return raw ? hexToBinaryString(ret) : ret;
495 | };
496 | return SparkMD5;
497 | });
498 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # 大文件的分片上传、断点续传及其相关拓展
2 |
3 | ### 大文件分片上传核心方法
4 |
5 |
6 | * 在JavaScript中,文件FIle对象是Blob对象的子类,Blob对象包含一个重要的方法slice通过这个方法,我们就可以对二进制文件进行拆分
7 | * 使用 FormData 格式进行上传
8 | * 服务端接口接受到数据,通过 multiparty 库对数据进行处理
9 | * 区分 files 和 fields,通过 fse.move 将上传的文件移动到目标路径下
10 | * 客户端使用 Promise.all 方法,当监听到所有切片已上传完,调用 merge 接口,通知服务端进行切片的合并
11 | * 使用 Stream 对切片边读边写,设置可写流的 start
12 | * Promise.all判断所有切片是否写入完毕
13 |
14 | ### 进度条
15 |
16 | * 使用浏览器 XMLHttpRequest 的 onprogress 的方法对进度进行监听
17 |
18 | ```js
19 | // 作为request的入参
20 | const xhr = new XMLHttpRequest();
21 | xhr.upload.onprogress = onProgress;
22 | // 回调方法
23 | onProgress: this.createProgressHandler(this.data[index])
24 | // 接受回调,通过 e.loaded 和 e.total 获取进度
25 | createProgressHandler(item) {
26 | return (e) => {
27 | item.percentage = parseInt(String((e.loaded / e.total) * 100));
28 | };
29 | },
30 | ```
31 |
32 | ### 断点续传核心方法
33 |
34 | #### 通过xhr的 abort 方法,主动放弃当前请求
35 |
36 | ```js
37 | this.requestList.forEach((xhr) => xhr?.abort());
38 | ```
39 |
40 | #### 拓展:断点续传服务端做法
41 |
42 | * 当用户在听一首歌的时候,如果听到一半(网络下载了一半),网络断掉了,用户需要继续听的时候,文件服务器不支持断点的话,则用户需要重新下载这个文件。而Range支持的话,客户端应该记录了之前已经读取的文件范围,网络恢复之后,则向服务器发送读取剩余Range的请求,服务端只需要发送客户端请求的那部分内容,而不用整个文件发送回客户端,以此节省网络带宽。
43 |
44 | * 如果Server支持Range,首先就要告诉客户端,咱支持Range,之后客户端才可能发起带Range的请求。这里套用唐僧的一句话,你不说我怎么知道呢。response.setHeader('Accept-Ranges', 'bytes');
45 |
46 | * Server通过请求头中的Range: bytes=0-xxx来判断是否是做Range请求,如果这个值存在而且有效,则只发回请求的那部分文件内容,响应的状态码变成206,表示Partial Content,并设置Content-Range。如果无效,则返回416状态码,表明Request Range Not Satisfiable(http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.4.17 )。如果不包含Range的请求头,则继续通过常规的方式响应。
47 |
48 | ```js
49 | getStream(req, res, filepath, fileStat) {
50 | res.setHeader('Accept-Range', 'bytes'); //告诉客户端服务器支持Range
51 | let range = req.headers['range'];
52 | let start = 0;
53 | let end = fileStat.size;
54 | if (range) {
55 | let reg = /bytes=(\d*)-(\d*)/;
56 | let result = range.match(reg);
57 | if (result) {
58 | start = isNaN(result[1]) ? 0 : parseInt(result[1]);
59 | end = isNaN(result[2]) ? 0 : parseInt(result[2]);
60 | }
61 | };
62 | debug(`start=${start},end=${end}`);
63 | return fs.createReadStream(filepath, {
64 | start,
65 | end
66 | });
67 | }
68 | ```
69 |
70 | ### 提高篇
71 |
72 | 1. `时间切片计算文件hash:`计算hash耗时的问题,不仅可以通过web-workder,还可以参考React的Fiber架构,通过requestIdleCallback来利用浏览器的空闲时间计算,也不会卡死主线程
73 | 1. `抽样hash:`文件hash的计算,是为了判断文件是否存在,进而实现秒传的功能,所以我们可以参考布隆过滤器的理念, 牺牲一点点的识别率来换取时间,比如我们可以抽样算hash
74 | 1. `根据文件名 + 文件修改时间 + size 生成hash`
75 | 1. `网络请求并发控制:`大文件由于切片过多,过多的HTTP链接过去,也会把浏览器打挂, 我们可以通过控制异步请求的并发数来解决,这也是头条的一个面试题
76 | 1. `慢启动策略:`由于文件大小不一,我们每个切片的大小设置成固定的也有点略显笨拙,我们可以参考TCP协议的`慢启动`策略, 设置一个初始大小,根据上传任务完成的时候,来动态调整下一个切片的大小, 确保文件切片的大小和当前网速匹配
77 | 1. `并发重试+报错:`并发上传中,报错如何重试,比如每个切片我们允许重试两次,三次再终止
78 | 1. `文件碎片清理`
79 |
80 | #### 1、时间切片计算文件hash
81 |
82 | 其实就是`time-slice`概念,React中Fiber架构的核心理念,利用浏览器的空闲时间,计算大的diff过程,中途又任何的高优先级任务,比如动画和输入,都会中断diff任务, 虽然整个计算量没有减小,但是大大提高了用户的交互体验
83 |
84 | ##### requestIdleCallback
85 |
86 | 
87 |
88 | ```js
89 | requestIdelCallback(myNonEssentialWork);
90 |
91 | function myNonEssentialWork (deadline) {
92 | // deadline.timeRemaining()可以获取到当前帧剩余时间
93 | // 当前帧还有时间 并且任务队列不为空
94 | while (deadline.timeRemaining() > 0 && tasks.length > 0) {
95 | doWorkIfNeeded();
96 | }
97 | if (tasks.length > 0){
98 | requestIdleCallback(myNonEssentialWork);
99 | }
100 | }
101 | ```
102 |
103 | #### 2、抽样hash
104 |
105 | 计算文件md5值的作用,无非就是为了判定文件是否存在,我们可以考虑设计一个抽样的hash,牺牲一些命中率的同时,提升效率,设计思路如下
106 |
107 | 1. 文件切成大小为 XXX Mb的切片
108 | 1. 第一个和最后一个切片全部内容,其他切片的取 首中尾三个地方各2个字节
109 | 1. 合并后的内容,计算md5,称之为影分身Hash
110 | 1. 这个hash的结果,就是文件存在,有小概率误判,但是如果不存在,是100%准的的 ,和`布隆过滤器`的思路有些相似, 可以考虑两个hash配合使用
111 | 1. 我在自己电脑上试了下1.5G的文件,全量大概要20秒,抽样大概1秒还是很不错的, 可以先用来判断文件是不是不存在
112 |
113 | 
114 |
115 | #### 3、根据文件名 + 文件修改时间 + size 生成hash
116 |
117 | 可根据File的lastModified、name、size生成hash,避免通过spark-md5对大文件进行hash计算,大大的节省时间
118 |
119 | ```
120 | lastModified: 1633436262311
121 | lastModifiedDate: Tue Oct 05 2021 20:17:42 GMT+0800 (中国标准时间) {}
122 | name: "2021.docx"
123 | size: 1696681
124 | type: "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
125 | ```
126 |
127 | #### 4、网络请求并发控制
128 |
129 | 大文件hash计算后,一次发几百个http请求,计算哈希没卡,结果TCP建立的过程就把浏览器弄死了
130 |
131 | 思路其实也不难,就是我们把异步请求放在一个队列里,比如并发数是3,就先同时发起3个请求,然后有请求结束了,再发起下一个请求即可
132 |
133 | 我们通过并发数max来管理并发数,发起一个请求max--,结束一个请求max++即可
134 |
135 | ```js
136 | async sendRequest(forms, max=4) {
137 | return new Promise(resolve => {
138 | const len = forms.length;
139 | let idx = 0;
140 | let counter = 0;
141 | const start = async ()=> {
142 | // 有请求,有通道
143 | while (idx < len && max > 0) {
144 | max--; // 占用通道
145 | console.log(idx, "start");
146 | const form = forms[idx].form;
147 | const index = forms[idx].index;
148 | idx++
149 | request({
150 | url: '/upload',
151 | data: form,
152 | onProgress: this.createProgresshandler(this.chunks[index]),
153 | requestList: this.requestList
154 | }).then(() => {
155 | max++; // 释放通道
156 | counter++;
157 | if (counter === len) {
158 | resolve();
159 | } else {
160 | start();
161 | }
162 | });
163 | }
164 | }
165 | start();
166 | });
167 | }
168 | ```
169 |
170 | #### 5、慢启动策略实现
171 |
172 | 1. chunk中带上size值,不过进度条数量不确定了,修改createFileChunk, 请求加上时间统计
173 | 1. 比如我们理想是30秒传递一个
174 | 1. 初始大小定为1M,如果上传花了10秒,那下一个区块大小变成3M
175 | 1. 如果上传花了60秒,那下一个区块大小变成500KB 以此类推
176 |
177 | #### 6、并发重试+报错
178 |
179 | 1. 请求出错.catch 把任务重新放在队列中
180 | 1. 出错后progress设置为-1 进度条显示红色
181 | 1. 数组存储每个文件hash请求的重试次数,做累加 比如[1,0,2],就是第0个文件切片报错1次,第2个报错2次
182 | 1. 超过3的直接reject
183 |
184 | #### 7、服务器碎片文件清理
185 |
186 | 如果很多人传了一半就离开了,这些切片存在就没意义了,可以考虑定期清理
187 |
188 | 我们可以使用 `node-schedule` 来管理定时任务 比如我们每天扫一次存放文件目录,如果文件的修改时间是一个月以前了,就直接删除把
189 |
190 | ```js
191 | // 为了方便测试,我改成每5秒扫一次, 过期1钟的删除做演示
192 | const fse = require('fs-extra')
193 | const path = require('path')
194 | const schedule = require('node-schedule')
195 |
196 |
197 | // 空目录删除
198 | function remove(file,stats){
199 | const now = new Date().getTime()
200 | const offset = now - stats.ctimeMs
201 | if(offset>1000*60){
202 | // 大于60秒的碎片
203 | console.log(file,'过期了,浪费空间的玩意,删除')
204 | fse.unlinkSync(file)
205 | }
206 | }
207 |
208 | async function scan(dir,callback){
209 | const files = fse.readdirSync(dir)
210 | files.forEach(filename=>{
211 | const fileDir = path.resolve(dir,filename)
212 | const stats = fse.statSync(fileDir)
213 | if(stats.isDirectory()){
214 | return scan(fileDir,remove)
215 | }
216 | if(callback){
217 | callback(fileDir,stats)
218 | }
219 | })
220 | }
221 | // * * * * * *
222 | // ┬ ┬ ┬ ┬ ┬ ┬
223 | // │ │ │ │ │ │
224 | // │ │ │ │ │ └ day of week (0 - 7) (0 or 7 is Sun)
225 | // │ │ │ │ └───── month (1 - 12)
226 | // │ │ │ └────────── day of month (1 - 31)
227 | // │ │ └─────────────── hour (0 - 23)
228 | // │ └──────────────────── minute (0 - 59)
229 | // └───────────────────────── second (0 - 59, OPTIONAL)
230 | let start = function(UPLOAD_DIR){
231 | // 每5秒
232 | schedule.scheduleJob("*/5 * * * * *",function(){
233 | console.log('开始扫描')
234 | scan(UPLOAD_DIR)
235 | })
236 | }
237 | exports.start = start
238 | ```
239 |
240 | ### 客户端核心代码
241 |
242 | ```html
243 |
244 |
245 |
246 |
251 | 上传
254 | 恢复
257 | 暂停
263 |
264 |
265 |
计算文件 hash
266 |
267 |
总进度
268 |
269 |
270 |
271 |
276 |
277 |
278 | {{ row.size | transformByte }}
279 |
280 |
281 |
282 |
283 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
519 | ```
520 |
521 | ### 服务端核心代码
522 |
523 | index.js
524 |
525 | ```js
526 | const Controller = require("./controller");
527 | const http = require("http");
528 | const server = http.createServer();
529 |
530 | const controller = new Controller();
531 |
532 | server.on("request", async (req, res) => {
533 | res.setHeader("Access-Control-Allow-Origin", "*");
534 | res.setHeader("Access-Control-Allow-Headers", "*");
535 | if (req.method === "OPTIONS") {
536 | res.status = 200;
537 | res.end();
538 | return;
539 | }
540 | if (req.url === "/verify") {
541 | await controller.handleVerifyUpload(req, res);
542 | return;
543 | }
544 |
545 | if (req.url === "/merge") {
546 | await controller.handleMerge(req, res);
547 | return;
548 | }
549 |
550 | if (req.url === "/") {
551 | await controller.handleFormData(req, res);
552 | }
553 | });
554 |
555 | server.listen(3000, () => console.log("正在监听 3000 端口"));
556 | ```
557 |
558 | controller.js
559 |
560 | ```js
561 | const multiparty = require("multiparty");
562 | const path = require("path");
563 | const fse = require("fs-extra");
564 |
565 | const extractExt = (filename) =>
566 | filename.slice(filename.lastIndexOf("."), filename.length); // 提取后缀名
567 | const UPLOAD_DIR = path.resolve(__dirname, "..", "target"); // 大文件存储目录
568 |
569 | const pipeStream = (path, writeStream) =>
570 | new Promise((resolve) => {
571 | const readStream = fse.createReadStream(path);
572 | readStream.on("end", () => {
573 | fse.unlinkSync(path);
574 | resolve();
575 | });
576 | readStream.pipe(writeStream);
577 | });
578 |
579 | // 合并切片
580 | const mergeFileChunk = async (filePath, fileHash, size) => {
581 | const chunkDir = path.resolve(UPLOAD_DIR, fileHash);
582 | const chunkPaths = await fse.readdir(chunkDir);
583 | // 根据切片下标进行排序
584 | // 否则直接读取目录的获得的顺序可能会错乱
585 | chunkPaths.sort((a, b) => a.split("-")[1] - b.split("-")[1]);
586 | await Promise.all(
587 | chunkPaths.map((chunkPath, index) =>
588 | pipeStream(
589 | path.resolve(chunkDir, chunkPath),
590 | // 指定位置创建可写流
591 | fse.createWriteStream(filePath, {
592 | start: index * size,
593 | end: (index + 1) * size,
594 | })
595 | )
596 | )
597 | );
598 | fse.rmdirSync(chunkDir); // 合并后删除保存切片的目录
599 | };
600 |
601 | const resolvePost = (req) =>
602 | new Promise((resolve) => {
603 | let chunk = "";
604 | req.on("data", (data) => {
605 | chunk += data;
606 | });
607 | req.on("end", () => {
608 | resolve(JSON.parse(chunk));
609 | });
610 | });
611 |
612 | // 返回已经上传切片名
613 | const createUploadedList = async (fileHash) =>
614 | fse.existsSync(path.resolve(UPLOAD_DIR, fileHash))
615 | ? await fse.readdir(path.resolve(UPLOAD_DIR, fileHash))
616 | : [];
617 |
618 | module.exports = class {
619 | // 合并切片
620 | async handleMerge(req, res) {
621 | const data = await resolvePost(req);
622 | const { fileHash, filename, size } = data;
623 | const ext = extractExt(filename);
624 | const filePath = path.resolve(UPLOAD_DIR, `${fileHash}${ext}`);
625 | await mergeFileChunk(filePath, fileHash, size);
626 | res.end(
627 | JSON.stringify({
628 | code: 0,
629 | message: "file merged success",
630 | })
631 | );
632 | }
633 | // 处理切片
634 | async handleFormData(req, res) {
635 | const multipart = new multiparty.Form();
636 |
637 | multipart.parse(req, async (err, fields, files) => {
638 | if (err) {
639 | console.error(err);
640 | res.status = 500;
641 | res.end("process file chunk failed");
642 | return;
643 | }
644 | const [chunk] = files.chunk;
645 | const [hash] = fields.hash;
646 | const [fileHash] = fields.fileHash;
647 | const [filename] = fields.filename;
648 | const filePath = path.resolve(
649 | UPLOAD_DIR,
650 | `${fileHash}${extractExt(filename)}`
651 | );
652 | const chunkDir = path.resolve(UPLOAD_DIR, fileHash);
653 |
654 | // 文件存在直接返回
655 | if (fse.existsSync(filePath)) {
656 | res.end("file exist");
657 | return;
658 | }
659 |
660 | // 切片目录不存在,创建切片目录
661 | if (!fse.existsSync(chunkDir)) {
662 | await fse.mkdirs(chunkDir);
663 | }
664 | // fs-extra 专用方法,类似 fs.rename 并且跨平台
665 | // fs-extra 的 rename 方法 windows 平台会有权限问题
666 | // https://github.com/meteor/meteor/issues/7852#issuecomment-255767835
667 | await fse.move(chunk.path, path.resolve(chunkDir, hash));
668 | res.end("received file chunk");
669 | });
670 | }
671 | // 验证是否已上传/已上传切片下标
672 | async handleVerifyUpload(req, res) {
673 | const data = await resolvePost(req);
674 | const { fileHash, filename } = data;
675 | const ext = extractExt(filename);
676 | const filePath = path.resolve(UPLOAD_DIR, `${fileHash}${ext}`);
677 | if (fse.existsSync(filePath)) {
678 | res.end(
679 | JSON.stringify({
680 | shouldUpload: false,
681 | })
682 | );
683 | } else {
684 | res.end(
685 | JSON.stringify({
686 | shouldUpload: true,
687 | uploadedList: await createUploadedList(fileHash),
688 | })
689 | );
690 | }
691 | }
692 | };
693 | ```
694 |
695 | ### 完整代码
696 |
697 | [https://github.com/miracle90/big-file-upload](https://github.com/miracle90/big-file-upload)
698 |
699 | ### 参考链接
700 |
701 | * [字节跳动面试官:请你实现一个大文件上传和断点续传](https://juejin.cn/post/6844904046436843527)
702 | * [字节跳动面试官,我也实现了大文件上传和断点续传](https://juejin.cn/post/6844904055819468808#heading-4)
703 | * [前端上传大文件怎么处理](https://juejin.cn/post/7053658552472174605#heading-7)
704 |
705 |
706 |
--------------------------------------------------------------------------------