<!DOCTYPE html>

<html>
  <head>
    <title>WebRTC video with transparent background</title>
    <link rel="stylesheet" href="lib/style.css" />
    <script src="lib/script.js"></script>
  </head>

  <body>
    <a href="https://groups.google.com/g/discuss-webrtc/c/abe2lVcjVtE/m/4Z05l0CRAgAJ" target="_blank">MediaStreamTrack transparent background</a>
    <canvas width="320" height="240"></canvas>
    <video
      src="https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm"
      autoplay
      controls
      loop
      muted
      crossorigin=""
    ></video>
  </body>
</html>
/* Add your styles here */
body {
  background: skyblue;
  width: 500px;
  height: 500px;
  display: block;
}
canvas {
  border:1px solid red;
}
// Add your code here
onload = () => {
  const canvas = document.querySelector('canvas');
  const video = document.querySelector('video');
  const ctx = canvas.getContext('2d');

  video.onplay = async (e) => {
    video.onplay = null;
    console.log(e);
    try {
      const canvasStream = canvas.captureStream(0);
      const [canvasTrack] = canvasStream.getVideoTracks();
      canvasTrack.onmute = canvasTrack.onunmute = (e) => {
        console.log(e);
      };

      const renderStream = document.createElement('video');
      renderStream.autoplay = renderStream.controls = true;
      document.body.appendChild(renderStream);
      const ms = new MediaSource();
      const msRender = document.createElement('video');
      document.body.appendChild(msRender);
      msRender.autoplay = msRender.controls = true;
      let sourceBuffer;
      ms.onsourceopen = (e) => {
        sourceBuffer = ms.addSourceBuffer('video/webm;codecs=vp8');
      };
      msRender.src = URL.createObjectURL(ms);
      renderStream.srcObject = canvasStream;
      const recorder = new MediaRecorder(renderStream.srcObject);
      recorder.ondataavailable = async ({ data }) => {
        const buffer = await data.arrayBuffer();
        // send buffer to RTCDataChannel
        sourceBuffer.appendBuffer(buffer);
      };
      recorder.start(100);
      //setTimeout(() => recorder.stop(), 30000)
      const rs = new ReadableStream({
        async pull(controller) {
          if (!video.paused) {
            const frame = await createImageBitmap(video, {
              resizeWidth: 50,
              resizeHeight: 50,
            });
            ctx.drawImage(frame, 0, 0, 50, 50);
            canvasTrack.requestFrame();
            controller.enqueue(
              await new Promise((resolve) => setTimeout(resolve, 1000 / 60))
            );
          } else {
            controller.close();
          }
        },
      }).pipeTo(new WritableStream());
    } catch (err) {
      console.error(err);
    }
  };
};