0.0.3 • Published 3 years ago

upload-big-file v0.0.3

Weekly downloads
-
License
ISC
Repository
-
Last release
3 years ago

upload-big-file

大文件上传

npm install

npm install upload-big-file

下载下来后需要 复制 文件夹下的 hash.js 和 spark-md5.min.js 到 项目根目录下,跟 index.html 同级

然后在需要用的页面引入

import UploadFile from "upload-big-file";

一个简单例子

<template>
  <div>
    <div>
      <input
        type="file"
        :disabled="uploadData && uploadData.status !== 'wait'"
        @change="handleFileChange"
      />
      <el-button
        v-if="uploadData && uploadData.container && uploadData.container.file"
        @click="handleUpload"
        :disabled="!uploadData.container.file || ['pause', 'uploading'].includes(uploadData.status)"
      >
        上传
      </el-button>
      <el-button @click="handleResume" v-if="uploadData && uploadData.status === 'pause'">
        恢复
      </el-button>
      <!-- Status.uploading 或者 没有生成 hash 的时候都是不可点击暂停的 -->
      <el-button
        v-if="uploadData && uploadData.status !== 'pause'"
        :disabled="uploadData.status !== 'uploading' || !uploadData.container.hash"
        @click="handlePause"
      >
        暂停
      </el-button>
    </div>
    <div v-if="uploadData && uploadData.uploadStatus" style="font-size: 30px;color: #f97249;">
      {{ uploadData.uploadStatus === "success" ? "上传成功!" : "" }}
      {{ uploadData.uploadStatus === "start" ? "上传开始了!" : "" }}
    </div>
    <div v-if="uploadData && uploadData.container && uploadData.container.file">
      <div>计算文件 hash</div>
      <el-progress :percentage="uploadData.hashPercentage"></el-progress>
      <div>总进度</div>
      <el-progress :percentage="uploadData.fakeUploadPercentage"></el-progress>
    </div>
    <el-table
      v-if="uploadData && uploadData.container && uploadData.container.file"
      :data="uploadData.data"
    >
      <el-table-column prop="hash" label="切片hash" align="center"></el-table-column>
      <el-table-column label="大小(KB)" align="center" width="120">
        <template v-slot="{ row }">
          {{ row.size | transformByte }}
        </template>
      </el-table-column>
      <el-table-column label="进度" align="center">
        <template v-slot="{ row }">
          <el-progress :percentage="row.percentage" color="#909399"></el-progress>
        </template>
      </el-table-column>
    </el-table>
  </div>
</template>

<script>
import UploadFile from "upload-big-file";

export default {
  name: "upload",
  filters: {
    transformByte(val) {
      return Number((val / 1024).toFixed(0));
    }
  },
  data: () => ({
    uploadFile: null,
    uploadData: null
  }),
  methods: {
    // 暂停
    handlePause() {
      this.uploadFile.pause();
    },
    // 恢复按钮,恢复上传
    handleResume() {
      this.uploadFile.resume();
    },
    // 文件选择
    handleFileChange(e) {
      this.uploadFile = new UploadFile(e, {
        upload: "http://localhost:3000", // 文件上传接口
        verify: "http://localhost:3000/verify", // 获取文件上传信息接口
        merge: "http://localhost:3000/merge" // 通知服务端合并切片接口
      });
      this.uploadData = this.uploadFile.uploadData;
    },
    // 上传按钮,上传事件开始
    handleUpload() {
      this.uploadFile.upload();
    }
  }
};
</script>

用 node 开发接口测试用的例子

可以启动个 node 服务测试下

node index.js

index.js

const Controller = require("./controller");
const http = require("http");
const server = http.createServer();

const controller = new Controller();

server.on("request", async (req, res) => {
  res.setHeader("Access-Control-Allow-Origin", "*");
  res.setHeader("Access-Control-Allow-Headers", "*");
  if (req.method === "OPTIONS") {
    res.status = 200;
    res.end();
    return;
  }
  if (req.url === "/verify") {
    await controller.handleVerifyUpload(req, res);
    return;
  }

  if (req.url === "/merge") {
    await controller.handleMerge(req, res);
    return;
  }

  if (req.url === "/") {
    await controller.handleFormData(req, res);
  }
});

server.listen(3000, () => console.log("正在监听 3000 端口"));

controller.js

const multiparty = require("multiparty");
const path = require("path");
const fse = require("fs-extra");

const extractExt = filename => filename.slice(filename.lastIndexOf("."), filename.length); // 提取后缀名
const UPLOAD_DIR = path.resolve(__dirname, "..", "target"); // 大文件存储目录

const pipeStream = (path, writeStream) =>
  new Promise(resolve => {
    const readStream = fse.createReadStream(path);
    readStream.on("end", () => {
      fse.unlinkSync(path);
      resolve();
    });
    readStream.pipe(writeStream);
  });

// 合并切片
const mergeFileChunk = async (filePath, fileHash, size) => {
  const chunkDir = path.resolve(UPLOAD_DIR, fileHash);
  const chunkPaths = await fse.readdir(chunkDir);
  // 根据切片下标进行排序
  // 否则直接读取目录的获得的顺序可能会错乱
  chunkPaths.sort((a, b) => a.split("-")[1] - b.split("-")[1]);
  await Promise.all(
    chunkPaths.map((chunkPath, index) =>
      pipeStream(
        path.resolve(chunkDir, chunkPath),
        // 指定位置创建可写流
        fse.createWriteStream(filePath, {
          start: index * size,
          end: (index + 1) * size
        })
      )
    )
  );
  fse.rmdirSync(chunkDir); // 合并后删除保存切片的目录
};

const resolvePost = req =>
  new Promise(resolve => {
    let chunk = "";
    req.on("data", data => {
      chunk += data;
    });
    req.on("end", () => {
      resolve(JSON.parse(chunk));
    });
  });

// 利用fs.readdir读取当前目录的所有文件名
// 返回已经上传切片名
const createUploadedList = async fileHash =>
  fse.existsSync(path.resolve(UPLOAD_DIR, fileHash))
    ? await fse.readdir(path.resolve(UPLOAD_DIR, fileHash))
    : [];

module.exports = class {
  // 合并切片
  async handleMerge(req, res) {
    const data = await resolvePost(req);
    const { fileHash, filename, size } = data;
    const ext = extractExt(filename);
    const filePath = path.resolve(UPLOAD_DIR, `${fileHash}${ext}`);
    await mergeFileChunk(filePath, fileHash, size);
    res.end(
      JSON.stringify({
        code: 0,
        message: "file merged success"
      })
    );
  }
  // 处理切片
  async handleFormData(req, res) {
    const multipart = new multiparty.Form();

    multipart.parse(req, async (err, fields, files) => {
      if (err) {
        console.error(err);
        res.status = 500;
        res.end("process file chunk failed");
        return;
      }
      const [chunk] = files.chunk;
      const [hash] = fields.hash;
      const [fileHash] = fields.fileHash;
      const [filename] = fields.filename;
      const filePath = path.resolve(UPLOAD_DIR, `${fileHash}${extractExt(filename)}`);
      const chunkDir = path.resolve(UPLOAD_DIR, fileHash);

      // 文件存在直接返回
      if (fse.existsSync(filePath)) {
        res.end("file exist");
        return;
      }

      // 切片目录不存在,创建切片目录
      if (!fse.existsSync(chunkDir)) {
        await fse.mkdirs(chunkDir);
      }
      // fs-extra 专用方法,类似 fs.rename 并且跨平台
      // fs-extra 的 rename 方法 windows 平台会有权限问题
      // https://github.com/meteor/meteor/issues/7852#issuecomment-255767835
      await fse.move(chunk.path, path.resolve(chunkDir, hash));
      res.end("received file chunk");
    });
  }
  // 验证是否已上传/已上传切片下标
  async handleVerifyUpload(req, res) {
    const data = await resolvePost(req);
    const { fileHash, filename } = data;
    const ext = extractExt(filename);
    const filePath = path.resolve(UPLOAD_DIR, `${fileHash}${ext}`);
    // 是否有这个文件
    if (fse.existsSync(filePath)) {
      res.end(
        JSON.stringify({
          shouldUpload: false
        })
      );
    } else {
      res.end(
        JSON.stringify({
          shouldUpload: true,
          uploadedList: await createUploadedList(fileHash)
        })
      );
    }
  }
};