断点续传,分片上传,秒传,利用Web worke
let fileName, setFileName =””
let fileHash, setFileHash = “”
let chunkList, setChunkList =””
let hashPercentage, setHashPercentage = “”
//获取文件后缀名
const getfileName=(filename)=>{
let arr =filename.split(“.”)
if(arr.length>0){
return arr[arr.length-1]
}
return “”
}
//拆分文件
const splitFile (file,size=110241024){
let fileChunkList=[]
let index=0
while( index<=file.size){
const chunk = file.slice(index,index+size);
fileChunkList.push({chunk:chunl})
index+=size
}
return “”;
}
//选择文件
const handleFileChange=(e)=>{
const {files}=e.target;
if(files.length===0) retrun;
//保存文件名字
setFileName=files[0].name;
//文件分片
const chunkList=splitFile(files[0]);
setChunkList=chunkList;
}
//上传文件
const handleUpload=async(e)=>{
if(!fileName){
return “请先选择文件”
}
if(chunkList.lenth===0){
“文件拆分中,请稍后…..”
return
}
const hash = await calculateHash(chunkList);
console.log(“文件hash为”,hash)
setFileHash=hash
const {shouldUpload,uploadedChunkList}= await verfileIsExist(hash,getFileSuffix(fileName));
console.log(shouldUpload)
if(!shouldUpload){
“文件已存在,无需重复上传”
return
}
let uploadedChunkIndexList=[];
if(uploadedChunkList&&uploadedChunkIndexList.length>0){
uploadedChunkIndexList=uploadedChunkList.map(item=>{
const arr = item.split(“-“);
return parseInt(arr[arr.length-1])
})
console.log(uploadedChunkIndexList)
“已上传的区块号:”+uploadedChunkIndexList.toString()
}
const chunksData=chunkList.map(({chunk},index)=>({
chunk:chunk,
hash:hash+”-“+index,
progress:0
})).filter(item2=>{
const arr = item2.hash.split(“-“)
return uploadedChunkIndexList.indexOf(parseInt(arr[arr.length-1])) ===-1
})
console.log(chunksData)
//保持分片数据
setChunklist=chunksData
//开始上传分片
uploadChunks(chunksData,hash)
}
//秒传:验证文件是否存在服务器
const verfileIsExist =async(fileHash,suffix)=>{
const {data}=await request({
url:””,
headers:{
“content-type”:”application/json”
},
data:JOSN.stringify({
fileHash:fileHash,
suffix:suffix
})
})
return JSON.parse(data)
}
//计算Hash
const calculateHash= (chunkList)=>{
return new Promise( resolve=>{
let woker = new WorkerBuilder(hashWorker)
woker.postMessage({chunkList:chunkList})
woker.onmessage=e=>{
const {percentage,hash}=e.data
setHashPercentage=percentage
if(hash){
//当hash计算完成,输出
resolve(hash)
}
}
})
}
//上传分片
const uploadChunks=async(chunksData,hash)=>{
const formDataList=chunlsData.map(({chunk,hash})=>{
const formData=new FormData()
formData.append(“chunk”,chunk);
formData.append(“hash”,hash)
formData.append(“suffix”,getFileSuffix(fileName))
return {formData}
})
const requestList = formDataList.map(({formData},index)=>{
return request({
url:””,
data:formData,
onprogress:e=>{
let list=[…chunksData];
list[index].progress=parseInt(String((e.loaded/e.total)*100));
setChunkList=list
}
})
})
//合并上传
Promise.all(requestList ).then(()=>{
mergeRequest(hash)
})
}
///以上是代码逻辑接下来是写web worke
webWorke.js——————->
const webWorker=()=>{
self.importScripts(“http://localhost:3000/spark-md5.min.js“)
self.onmessage=(e)=>{
const {chunkList}=e.data
const spark = new self.SparkMD5.ArrayBuffer();
let percentage=0,count=0;
const loadNext=index=>{
const reader = new FileReader();
reader.readAsArrayBuffer(chunkList[index].chunk);
reader.onload=event=>{
count++;
spark.append(event.target.result);
if(count === chunkList.length){
self.postMessage({
percentage:100,
hash:spark.end()
})
self.close();
}else{
percentage+=(100/chunkList.lenght)
self.postMessage({
percentage
})
loadNext(count)
}
}
}
loadNext(count)
}
}
export default hashWorker
worker-build.js
expoer default class WorkerBuilder extends Worker{
cpnstructor(worker){
const code=worker.toString()
const blob=new Blob([(${code})()
])
return new Worker(URL.createObjectURL(blob))
}
}
request.js
const request=({
url,
method=”post”,
data,
headers={},
onprogress
})=>{
return new Promise(resolve=>{
const xhr = new XMLHttpRequest();
xhr.open(method,url);
Object.keys(headers).forEach(key=>{
xhr.setRequestHeader(key,headers[key])
});
xhr.upload.onprogress=onprogress
xhr.send(data);
xhr.onload=e=>{
resolve({
data:e.target.response
})
}
})
}
export default request;
///———-以上结束
服务端
import express from’express’
import path from”path”;
import fse from”fs-extra”;
import multiparty from”multiparty”;
import bodyParser from”body-parser”;
let app = express()
const dirname = path.resolve(path.dirname(‘’));
const UPLOAD_FILES_DIR = path.resolve(dirname, “./filelist”)
// 配置请求参数解析器const jsonParser = bodyParser.json({ extended: false });
// 配置跨域 app.use(function (req, res, next) {
res.setHeader(“Access-Control-Allow-Origin”, ““);
res.setHeader(“Access-Control-Allow-Headers”, ““);
next() })
// 获取已上传的文件列表
const getUploadedChunkList = async (fileHash) => {
const isExist = fse.existsSync(path.resolve(UPLOAD_FILES_DIR, fileHash))
if (isExist) {
return await fse.readdir(path.resolve(UPLOAD_FILES_DIR, fileHash))
}
return []
}
app.post(‘/verFileIsExist’, jsonParser, async (req, res) => {
const { fileHash, suffix } = req.body;
const filePath = path.resolve(UPLOAD_FILES_DIR, fileHash + “.” + suffix);
if (fse.existsSync(filePath)) {
res.send({ code: 200, shouldUpload: false })
return;
}
const list = await getUploadedChunkList(fileHash);
if (list.length > 0) {
res.send({ code: 200, shouldUpload: true, uploadedChunkList: list }) return; }
res.send({ code: 200, shouldUpload: true, uploadedChunkList: [] }) })
app.post(‘/upload’, async (req, res) => {
const multipart = new multiparty.Form();
multipart.parse(req, async (err, fields, files) => {
if (err) return;
const [chunk] = files.chunk;
const [hash] = fields.hash; const [suffix] = fields.suffix;
// 注意这里的hash包含文件的hash和块的索引,所以需要使用split切分
const chunksDir = path.resolve(UPLOAD_FILES_DIR, hash.split(“-“)[0]);
if (!fse.existsSync(chunksDir)) { await fse.mkdirs(chunksDir); }
await fse.move(chunk.path, chunksDir + “/“ + hash); })
res.status(200).send(“received file chunk”) })
const pipeStream = (path, writeStream) =>newPromise(resolve => {
const readStream = fse.createReadStream(path);
readStream.on(“end”, () => { fse.unlinkSync(path); resolve(); });
readStream.pipe(writeStream); });
// 合并切片
const mergeFileChunk = async (filePath, fileHash, size) => {
const chunksDir = path.resolve(UPLOAD_FILES_DIR, fileHash);
const chunkPaths = await fse.readdir(chunksDir);
chunkPaths.sort((a, b) => a.split(“-“)[1] - b.split(“-“)[1]);
console.log(“指定位置创建可写流”, filePath);
await Promise.all( chunkPaths.map((chunkPath, index) =>
pipeStream( path.resolve(chunksDir, chunkPath),
// 指定位置创建可写流
fse.createWriteStream(filePath, { start: index size, end: (index + 1) size }) ) ) );
// 合并后删除保存切片的目录
fse.rmdirSync(chunksDir); };
app.post(‘/merge’, jsonParser, async (req, res) => {
const { fileHash, suffix, size } = req.body;
const filePath = path.resolve(UPLOAD_FILES_DIR, fileHash + “.” + suffix);
await mergeFileChunk(filePath, fileHash, size);
res.send({ code: 200, message: “success” }); })
app.listen(3001, () => { console.log(‘listen:3001’) })