<!DOCTYPE html>
<html>

<head>
  <title>Stream video frames from Worker to main thread to Worker to main thread</title>
</head>

<body>
  <video></video>
  <script>
    (async() => {
      const url = "https://gist.githubusercontent.com/guest271314/895e9961e914ad39a3365a42ec6a945c/raw/97b4d51ae42e17bdda41f16708700e3ebf1d6de4/frames.json";
      const video = document.querySelector("video");
      video.autoplay = video.controls = true;
      const canvas = document.createElement("canvas");
      // OffscreenCanvasRenderingContext2D not implemented at Firefox 69
      // https://bugzilla.mozilla.org/show_bug.cgi?id=801176
      /*
        URL, MessagePort => Worker 
        Fetch video frame as ArrayBuffer => ArrayBuffer to main thread
        ArrayBuffer to Blob to ImageBitmap to ImageData => ImageData to Worker
        Process ImageData => ImageData to main thread 
        ImageData to ImageBitmap => draw ImageBitmap onto CanvasRenderingContext2D => ImageBitmap.close()
        CanvasCaptureMediaStream|CanvasCaptureMediaStreamTrack.requestFrame() => ImageBitmap.close()
      */
      const ctx = canvas.getContext("2d");
      ctx.globalComposite = "copy";
      const canvasStream = canvas.captureStream(0);
      const [videoTrack] = canvasStream.getVideoTracks();
      video.onended = e => console.log(e);
      videoTrack.onmute = e => console.log(e);
      videoTrack.onunmute = e => console.log(e);
      videoTrack.onended = e => console.log(e);
      const videoStream = [canvasStream, videoTrack].find(({
        requestFrame: rf
      }) => rf);
      video.srcObject = canvasStream;
      const worker = new Worker("worker.js");
      const channel = new MessageChannel();
      const {
        port1, port2
      } = channel;
      let timeStamps = [];
      port1.onmessage = async e => {
        timeStamps.push(e.timeStamp);
        const {
          response, imageData
        } = e.data;
        if (response !== undefined && imageData === undefined) {
          const bitmap = await self.createImageBitmap(new Blob([response]));
          canvas.width = bitmap.width;
          canvas.height = bitmap.height;
          ctx.drawImage(await self.createImageBitmap(bitmap), 0, 0, bitmap.width, bitmap.height);
          const imageData = ctx.getImageData(0, 0, bitmap.width, bitmap.height);
          bitmap.close();
          port1.postMessage({
            imageData
          }, [imageData.data.buffer]);
        } else {
          if (imageData) {
            const bitmap = await self.createImageBitmap(imageData);
            video.width = bitmap.width;
            video.height = bitmap.height;
            ctx.drawImage(bitmap, 0, 0, bitmap.width, bitmap.height);
            videoStream.requestFrame();
            bitmap.close();
          }
        }
      }
      worker.postMessage({
        url
      }, [port2]);
      worker.onmessage = e => {
        videoTrack.enabled = false;
        videoTrack.stop();
        port1.close();
        port2.close();
        console.log(e.data, video.currentTime, videoTrack, canvasStream);
        const timeStampMap = [];
        timeStamps.reduce((a, b, index) => {
          timeStampMap.push(`MessagePort timeStamp ${index} - MessagePort timeStamp ${index -1}: ${(b - a) / 1000}`);
          return b;
        });
        console.log(JSON.stringify(timeStampMap, null, 2));
      };
    })();
  </script>
</body>
</html>
// https://developer.mozilla.org/en-US/docs/Web/API/Canvas_API/Tutorial/Pixel_manipulation_with_canvas#Grayscaling_and_inverting_colors
const invert = imageData => {
    const data = imageData.data;
    for (let i = 0; i < data.length; i += 4) {
      data[i]     = 255 - data[i];     // red
      data[i + 1] = 255 - data[i + 1]; // green
      data[i + 2] = 255 - data[i + 2]; // blue
    }
  };

let channel;

onmessage = async e => {
  const {url} = e.data;
  try {
    if (!channel) {
      ([channel] = e.ports);
      channel.onmessage = async e => {
        let {
          imageData
        } = e.data;
        // manipulate pixels here
        invert(imageData);
        channel.postMessage({
          imageData
        }, [imageData.data.buffer]);
      }
    }

    const frames = await (await fetch(url)).json();
    console.log(frames);

    const rs = new ReadableStream({
      async pull(controller) {
        for (const frame of frames) {
          const [{
            duration, frameRate, width, height
          }] = frame;
          const framesLength = frame.length - 1;
          const frameDuration = Math.ceil((duration * 1000) / framesLength);
          for (let i = 1; i < framesLength; i++) {
            const response = await (await fetch(frame[i])).arrayBuffer();
            controller.enqueue({
              response, frameDuration
            });
          }
        }
        controller.close();
      }
    });

    const reader = rs.getReader();
    const processStream = async({
      value, done
    }) => {
      if (done) {
        await reader.closed;
        return "stream done";
      }
      const {
        response, frameDuration
      } = value;
      channel.postMessage({
        response
      }, [response]);
      await new Promise(resolve => setTimeout(resolve, frameDuration));
      return processStream(await reader.read());
    }
    const done = await processStream(await reader.read());
    postMessage(done);
  } catch (e) {
    console.error(e);
  }
}
Stream video frames from main thread to Worker to main thread

https://discourse.wicg.io/t/proposal-offscreenvideo/3952/5