Skip to content

Commit a3a4d24

Browse files
committed
fix(uplaod): 文件合并后损坏的问题
1 parent 6121cf2 commit a3a4d24

File tree

3 files changed

+31
-26
lines changed

3 files changed

+31
-26
lines changed

file-upload/public/generate-hash.js

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,11 @@
11
self.importScripts('/spark-md5.min.js');
22

33
self.onmessage = e => {
4-
const { fileChunkList, fileSize } = e.data;
4+
const { fileChunkList, fileSize, chunkSize } = e.data;
55
const spark = new self.SparkMD5.ArrayBuffer();
66
let count = 0;
77
let percentage = 0;
88
let fileReader = new FileReader();
9-
let size = fileChunkList[0].file.size;
109
fileReader.onload = e => {
1110
spark.append(e.target.result); // Append array buffer
1211
count++;
@@ -22,9 +21,9 @@ self.onmessage = e => {
2221
};
2322

2423
fileReader.onprogress = e => {
25-
// console.log('计算文件hash进度', (e.loaded + size * count) / fileSize * 100);
24+
// console.log('计算文件hash进度', (e.loaded + chunkSize * count) / fileSize * 100);
2625
self.postMessage({
27-
percentage: (e.loaded + size * count) / fileSize * 100
26+
percentage: (e.loaded + chunkSize * count) / fileSize * 100
2827
});
2928
};
3029

file-upload/server/controller.js

Lines changed: 22 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -17,28 +17,34 @@ const resolvePost = req =>
1717
});
1818
});
1919

20-
const pipStream = (path, writeStream) =>
21-
new Promise(resolve => {
22-
const readStream = fse.createReadStream(path);
23-
writeStream.on('finish', () => {
24-
writeStream.end();
25-
fse.unlinkSync(path);
20+
const pipeStream = (path, writeStream, chunkSize) =>
21+
new Promise((resolve, reject) => {
22+
const readStream = fse.createReadStream(path, {
23+
highWaterMark: chunkSize
24+
});
25+
readStream.on('end', () => {
26+
// fse.unlinkSync(path);
27+
readStream.unpipe();
2628
resolve();
2729
});
28-
readStream.pipe(writeStream);
30+
readStream.pipe(writeStream, { end: false });
31+
readStream.on('error', reject);
2932
});
3033

31-
const mergeFileChunk = async (fileHash, fileName, size) => {
34+
const mergeFileChunk = async (fileHash, fileName, chunkSize) => {
3235
const filePath = path.resolve(UPLOAD_DIR, `${fileHash}${extractExt(fileName)}`);
3336
const chunkDir = path.resolve(UPLOAD_DIR, fileHash);
3437
const chunkPaths = await fse.readdir(chunkDir);
3538
chunkPaths.sort((a, b) => a.slice(a.lastIndexOf('-') + 1) - b.slice(b.lastIndexOf('-') + 1));
36-
await Promise.all(chunkPaths.map((chunkPath, index) =>
37-
pipStream(path.resolve(chunkDir, chunkPath), fse.createWriteStream(filePath, {
38-
start: index * size
39-
}))
40-
));
41-
fse.rmdirSync(chunkDir); // 合并完成后删除切片目录
39+
const writeStream = fse.createWriteStream(filePath);
40+
await Promise.all(chunkPaths.map((chunkPath, index) =>
41+
pipeStream(path.resolve(chunkDir, chunkPath), writeStream, chunkSize)
42+
)).then(() => {
43+
// close the stream to prevent memory leaks
44+
writeStream.close();
45+
return Promise.resolve(filePath);
46+
})
47+
// fse.rmdirSync(chunkDir); // 合并完成后删除切片目录
4248
};
4349

4450
module.exports = {
@@ -47,8 +53,8 @@ module.exports = {
4753
},
4854
async handleMerge (req, res) {
4955
const data = await resolvePost(req);
50-
const { fileHash, fileName, size } = data;
51-
await mergeFileChunk(fileHash, fileName, size);
56+
const { fileHash, fileName, chunkSize } = data;
57+
await mergeFileChunk(fileHash, fileName, chunkSize);
5258
res.end(JSON.stringify({
5359
code: 0,
5460
msg: 'file merged success'

file-upload/src/views/Home.vue

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@
1515
<script>
1616
import { Button } from 'element-ui';
1717
18-
// const SIZE = 10 * 1024 * 1024; // 切片大小
19-
const SIZE = 10 * 1024;
18+
// const CHUNK_SIZE = 10 * 1024 * 1024; // 切片大小
19+
const CHUNK_SIZE = 10 * 1024;
2020
2121
export default {
2222
data: () => ({
@@ -55,9 +55,9 @@ export default {
5555
let cur = 0;
5656
while (cur < file.size) {
5757
fileChunkList.push({
58-
file: file.slice(cur, cur + SIZE),
58+
file: file.slice(cur, cur + CHUNK_SIZE),
5959
});
60-
cur += SIZE;
60+
cur += CHUNK_SIZE;
6161
}
6262
return fileChunkList;
6363
},
@@ -95,7 +95,7 @@ export default {
9595
'Content-Type': 'application/json'
9696
},
9797
data: JSON.stringify({
98-
size: SIZE,
98+
chunkSize: CHUNK_SIZE,
9999
fileHash: this.container.hash,
100100
fileName: this.container.file.name
101101
})
@@ -105,7 +105,7 @@ export default {
105105
calculateHash (fileChunkList) {
106106
return new Promise (resolve => {
107107
const worker = new Worker('/generate-hash.js');
108-
worker.postMessage({ fileChunkList, fileSize: this.container.file.size });
108+
worker.postMessage({ fileChunkList, fileSize: this.container.file.size, chunkSize: CHUNK_SIZE });
109109
worker.onmessage = e => {
110110
const { percentage, hash } = e.data;
111111
this.hashPercentage = percentage;

0 commit comments

Comments
 (0)