Frontend Development 11 min read

Parallel Large File Download in JavaScript Using asyncPool

This article explains how to implement parallel downloading of large files in the browser by leveraging HTTP Range requests, the asyncPool concurrency‑control library, and auxiliary functions such as getContentLength, getBinaryContent, concatenate, and saveAs, complete with code examples and usage guidance.

Sohu Tech Products
Sohu Tech Products
Sohu Tech Products
Parallel Large File Download in JavaScript Using asyncPool

1. Introduction

Building on the previous article about concurrency control in JavaScript, this guide shows how to use the asyncPool function from the async-pool library to download large files in parallel.

2. HTTP Range Requests

HTTP Range requests let a server send only a part of a file, which is useful for large media files or resumable downloads. If the response includes an Accept-Ranges header (value not "none"), the server supports range requests. Valid responses use status 206 Partial Content ; invalid ranges return 416 Range Not Satisfiable , while the server may ignore the Range header and return the whole file with status 200.

2.1 Range Syntax

Range:
=
-
Range:
=
-
Range:
=
-
,
-
Range:
=
-
,
-
,
-

unit : usually bytes

<range-start> : integer start offset

<range-end> : integer end offset (optional, defaults to file end)

3. Implementing Large File Download

The following helper functions are defined:

3.1 getContentLength

function getContentLength(url) {
  return new Promise((resolve, reject) => {
    let xhr = new XMLHttpRequest();
    xhr.open("HEAD", url);
    xhr.send();
    xhr.onload = function() {
      resolve(~~xhr.getResponseHeader("Content-Length"));
    };
    xhr.onerror = reject;
  });
}

3.2 asyncPool

async function asyncPool(poolLimit, array, iteratorFn) {
  const ret = [];
  const executing = [];
  for (const item of array) {
    const p = Promise.resolve().then(() => iteratorFn(item, array));
    ret.push(p);
    if (poolLimit <= array.length) {
      const e = p.then(() => executing.splice(executing.indexOf(e), 1));
      executing.push(e);
      if (executing.length >= poolLimit) {
        await Promise.race(executing);
      }
    }
  }
  return Promise.all(ret);
}

3.3 getBinaryContent

function getBinaryContent(url, start, end, i) {
  return new Promise((resolve, reject) => {
    try {
      let xhr = new XMLHttpRequest();
      xhr.open("GET", url, true);
      xhr.setRequestHeader("range", `bytes=${start}-${end}`);
      xhr.responseType = "arraybuffer";
      xhr.onload = function() {
        resolve({ index: i, buffer: xhr.response });
      };
      xhr.send();
    } catch (err) {
      reject(new Error(err));
    }
  });
}

3.4 concatenate

function concatenate(arrays) {
  if (!arrays.length) return null;
  let totalLength = arrays.reduce((acc, value) => acc + value.length, 0);
  let result = new Uint8Array(totalLength);
  let length = 0;
  for (let array of arrays) {
    result.set(array, length);
    length += array.length;
  }
  return result;
}

3.5 saveAs

function saveAs({ name, buffers, mime = "application/octet-stream" }) {
  const blob = new Blob([buffers], { type: mime });
  const blobUrl = URL.createObjectURL(blob);
  const a = document.createElement("a");
  a.download = name || Math.random();
  a.href = blobUrl;
  a.click();
  URL.revokeObjectURL(blob);
}

3.6 download

async function download({ url, chunkSize, poolLimit = 1 }) {
  const contentLength = await getContentLength(url);
  const chunks = typeof chunkSize === "number" ? Math.ceil(contentLength / chunkSize) : 1;
  const results = await asyncPool(
    poolLimit,
    [...Array(chunks).keys()],
    i => {
      let start = i * chunkSize;
      let end = i + 1 == chunks ? contentLength - 1 : (i + 1) * chunkSize - 1;
      return getBinaryContent(url, start, end, i);
    }
  );
  const sortedBuffers = results.map(item => new Uint8Array(item.buffer));
  return concatenate(sortedBuffers);
}

3.7 Usage Example

function multiThreadedDownload() {
  const url = document.querySelector("#fileUrl").value;
  if (!url || !/https?/.test(url)) return;
  console.log("Multi‑threaded download start: " + new Date());
  download({
    url,
    chunkSize: 0.1 * 1024 * 1024,
    poolLimit: 6,
  }).then(buffers => {
    console.log("Multi‑threaded download end: " + new Date());
    saveAs({ buffers, name: "my‑archive", mime: "application/zip" });
  });
}

The full example code is available at the provided Gist link.

4. Conclusion

The article demonstrates how to use asyncPool for parallel downloading (and potentially uploading) of large files in JavaScript, covering HEAD requests for file size, HTTP Range requests, binary data handling with ArrayBuffer , and client‑side file saving.

5. References

You Don't Know Blob

MDN – ArrayBuffer

MDN – HTTP Range Requests

JavaScript Concurrency Control

JavaScriptHTTP RangeblobasyncPoolparallel download
Sohu Tech Products
Written by

Sohu Tech Products

A knowledge-sharing platform for Sohu's technology products. As a leading Chinese internet brand with media, video, search, and gaming services and over 700 million users, Sohu continuously drives tech innovation and practice. We’ll share practical insights and tech news here.

0 followers
Reader feedback

How this landed with the community

login Sign in to like

Rate this article

Was this worth your time?

Sign in to rate
Discussion

0 Comments

Thoughtful readers leave field notes, pushback, and hard-won operational detail here.