Works with any backend npm install Zero dependencies
← All Backend Guides

Node.js

Handle MultipleUpload file uploads with the Node.js built-in http module and formidable for multipart parsing. This guide covers both standard multipart form uploads and chunked uploads with resume support.

Multipart Form Upload

Parse standard multipart/form-data uploads using formidable. Each file is saved to an uploads directory and the response includes a unique GUID for client-side tracking.

const http = require("http");
const { IncomingForm } = require("formidable");
const path = require("path");
const crypto = require("crypto");
const fs = require("fs");

const UPLOAD_DIR = path.join(__dirname, "uploads");
if (!fs.existsSync(UPLOAD_DIR)) fs.mkdirSync(UPLOAD_DIR, { recursive: true });

const server = http.createServer((req, res) => {
  // Set CORS headers
  res.setHeader("Access-Control-Allow-Origin", "*");
  res.setHeader("Access-Control-Allow-Methods", "POST, OPTIONS");
  res.setHeader("Access-Control-Allow-Headers",
    "Content-Type, X-Upload-Id, X-Chunk-Index, X-Chunk-Count, X-File-Name, X-File-Size");

  if (req.method === "OPTIONS") {
    res.writeHead(204);
    return res.end();
  }

  if (req.method === "POST" && req.url === "/upload") {
    const form = new IncomingForm({
      uploadDir: UPLOAD_DIR,
      keepExtensions: true,
      maxFileSize: 200 * 1024 * 1024 // 200 MB
    });

    form.parse(req, (err, fields, files) => {
      if (err) {
        res.writeHead(400, { "Content-Type": "application/json" });
        return res.end(JSON.stringify({ success: false, error: err.message }));
      }

      const file = files.file?.[0] || files.file;
      const fileGuid = crypto.randomUUID();
      const ext = path.extname(file.originalFilename || "");
      const destPath = path.join(UPLOAD_DIR, fileGuid + ext);

      fs.renameSync(file.filepath, destPath);

      res.writeHead(200, { "Content-Type": "application/json" });
      res.end(JSON.stringify({
        success: true,
        fileGuid: fileGuid,
        fileName: file.originalFilename,
        fileSize: file.size
      }));
    });
    return;
  }

  res.writeHead(404);
  res.end("Not found");
});

server.listen(3000, () => console.log("Upload server on http://localhost:3000"));

Chunked Upload

For large files, MultipleUpload sends chunks with X-Upload-Id, X-Chunk-Index, X-Chunk-Count, X-File-Name, and X-File-Size headers. The server stores each chunk and assembles the final file when all chunks arrive.

// Add this route to the server above
if (req.method === "POST" && req.url === "/upload-chunk") {
  const uploadId   = req.headers["x-upload-id"];
  const chunkIndex = parseInt(req.headers["x-chunk-index"], 10);
  const chunkCount = parseInt(req.headers["x-chunk-count"], 10);
  const fileName   = req.headers["x-file-name"];
  const fileSize   = parseInt(req.headers["x-file-size"], 10);

  const chunkDir = path.join(UPLOAD_DIR, "chunks", uploadId);
  if (!fs.existsSync(chunkDir)) fs.mkdirSync(chunkDir, { recursive: true });

  const chunkPath = path.join(chunkDir, `chunk_${chunkIndex}`);
  const writeStream = fs.createWriteStream(chunkPath);
  req.pipe(writeStream);

  writeStream.on("finish", () => {
    // Check if all chunks have arrived
    const received = fs.readdirSync(chunkDir).length;

    if (received === chunkCount) {
      // Assemble chunks into final file
      const fileGuid = crypto.randomUUID();
      const ext = path.extname(fileName);
      const finalPath = path.join(UPLOAD_DIR, fileGuid + ext);
      const output = fs.createWriteStream(finalPath);

      let i = 0;
      function appendNext() {
        if (i >= chunkCount) {
          output.end();
          // Clean up chunk directory
          fs.rmSync(chunkDir, { recursive: true });

          res.writeHead(200, { "Content-Type": "application/json" });
          res.end(JSON.stringify({
            success: true,
            fileGuid: fileGuid,
            fileName: fileName,
            fileSize: fileSize
          }));
          return;
        }
        const chunkFile = path.join(chunkDir, `chunk_${i}`);
        const rs = fs.createReadStream(chunkFile);
        rs.pipe(output, { end: false });
        rs.on("end", () => { i++; appendNext(); });
      }
      appendNext();
    } else {
      res.writeHead(200, { "Content-Type": "application/json" });
      res.end(JSON.stringify({ success: true, chunksReceived: received }));
    }
  });
  return;
}

Expected JSON Response

Both endpoints must return this JSON structure on successful upload completion.

{
  "success": true,
  "fileGuid": "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
  "fileName": "photo.jpg",
  "fileSize": 204800
}

Client-Side JavaScript

Initialize MultipleUpload on the client, pointing to your Node.js server endpoints.

<link rel="stylesheet" href="multipleupload.css" />
<div id="uploader"></div>
<script src="multipleupload.js"></script>
<script>
  var uploader = new MultipleUpload("#uploader", {
    uploadUrl: "http://localhost:3000/upload",
    chunkUploadUrl: "http://localhost:3000/upload-chunk",
    chunkSize: 2 * 1024 * 1024, // 2 MB chunks
    onFileUploaded: function (file, response) {
      console.log("Uploaded:", response.fileName, response.fileGuid);
    }
  });
</script>