<!DOCTYPE html>
<html>

  <head>
    <title>Stream video frames from Worker to main thread</title>
  </head>

  <body>
    <video autoplay muted controls></video><br>
    <code></code>
    <script>
      const video = document.querySelector("video");
      const canvas = document.createElement("canvas");
      const ctx = canvas.getContext("2d");
      const code = document.querySelector("code");
      ctx.globalComposite = "copy";
      const canvasStream = canvas.captureStream(0);
      const [canvasTrack] = canvasStream.getVideoTracks();
      const mediaStream = [canvasStream, canvasTrack].find(({requestFrame:rf}) => rf);
      const worker = new Worker("worker.js");
      const readStream = async e => {
        if (e.data === "stream done") {
          console.log(e.data);
          // canvasTrack.stop();
          // canvasTrack.enabled = false;
          worker.removeEventListener("message", readStream);
          return;
        }
        const {imageBitmap, width, height} = e.data;
        canvas.width = width;
        canvas.height = height;
        ctx.drawImage(imageBitmap, 0, 0);
        mediaStream.requestFrame();
        imageBitmap.close();
      }
      video.srcObject = canvasStream;
      video.ontimeupdate = e => code.textContent = video.currentTime;
      worker.addEventListener("message", readStream);
    </script>
  </body>

</html>
(async() => {
  const url = "https://gist.githubusercontent.com/guest271314/895e9961e914ad39a3365a42ec6a945c/raw/97b4d51ae42e17bdda41f16708700e3ebf1d6de4/frames.json";
  const frames = await (await fetch(url)).json();
  console.log(frames);

  const rs = new ReadableStream({
    async pull(controller) {
      for (const frame of frames) {
        const [{
          duration, frameRate, width, height
        }] = frame;
        const framesLength = frame.length -1;
        const frameDuration = Math.ceil((duration * 1000) / framesLength);
        for (let i = 1; i < framesLength; i++) {
          const osc = new OffscreenCanvas(width, height);
          const osctx = osc.getContext("2d");
          const blob = await (await fetch(frame[i])).blob();
          const bmp = await createImageBitmap(blob);
          osctx.drawImage(bmp, 0, 0);
          const imageData = osctx.getImageData(0, 0, width, height);
          // manipulate pixels here
          const imageBitmap = await createImageBitmap(imageData);
          controller.enqueue({imageBitmap, frameDuration});
        }
      }
      controller.close();
    }
  });
  
  const reader = rs.getReader();
  const processStream = async({value, done}) => {
    if (done) {
      await reader.closed;
      return "stream done";
    }
    const {imageBitmap, frameDuration} = value;
    const {width, height} = imageBitmap;
    postMessage({imageBitmap, width, height}, [imageBitmap]);
    await new Promise(resolve => setTimeout(resolve, frameDuration));
    return processStream(await reader.read());
  }
  const done = await processStream(await reader.read());
  postMessage(done);

})();

Stream video frames from Worker to main thread

https://discourse.wicg.io/t/proposal-offscreenvideo/3952