<!DOCTYPE html>
<html>
<head>
</head>
<body>
<canvas style="border:1px solid blue"></canvas>
<script>
const canvas = document.querySelector("canvas");
const width = 300;
const height = 200;
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext("2d");
const canvasStream = canvas.captureStream(0);
const [videoTrack] = canvasStream.getVideoTracks();
const stream = [canvasStream, videoTrack].find(({
requestFrame: rF
}) => rF);
const colors = ["red", "blue", "green", "yellow", "orange", "purple"];
const len = colors.length;
// stream frames from a video
async function* streamFrames() {
while (true) {
yield await new Promise(async resolve => {
let i = 0;
let recorder = new MediaRecorder(canvasStream, {mimeType:"video/webm;codecs=vp9"});
recorder.ondataavailable = async e => {
try {
resolve(await e.data.arrayBuffer());
} catch (e) {
console.error(e);
console.trace();
}
};
recorder.start();
do {
for (; i < len; i++) {
for (let j = 0; j < 30; j++) {
ctx.clearRect(0, 0, width, height);
ctx.fillStyle = colors[i];
ctx.fillRect(0, 0, width, height);
stream.requestFrame();
if (recorder.state !== "recording") {
break;
}
await new Promise(_resolve => setTimeout(_resolve, 1000 / 30));
}
}
i = 0;
recorder.stop();
} while (recorder.state === "recording");
});
}
}
const clientStream = window.open("client.html", "_blank");
const startStream = async _ => {
for await (const frame of streamFrames()) {
clientStream.postMessage(frame, [frame]);
}
}
onmessage = e => {
console.log(e.data, e.origin);
if (e.origin === location.origin) {
startStream();
}
}
</script>
</body>
</html>
MediaStream addTrack() and removeTrack() using Blob from MediaRecorder converted to ArrayBuffer
https://github.com/guest271314/MediaFragmentRecorder/issues/8
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<!-- focus to avoid stalled playback -->
<video controls autoplay muted></video>
<script>
console.log("loaded");
const video = document.querySelector("video");
const mediaStream = new MediaStream();
video.srcObject = mediaStream;
const videoStream = document.createElement("video");
videoStream.oncanplay = async e => {
if (videoStream.buffered.length) {
console.log(videoStream.buffered.start(0));
}
if (videoStream.paused) {
try {
await videoStream.play().catch(e => {
throw e;
});
} catch (e) {
console.error(e);
console.trace();
}
} else {
console.log(videoStream.readyState);
}
};
opener.postMessage("ready");
onmessage = e => {
videoStream.onplay = _ => {
videoStream.onplay = null;
const stream = videoStream.captureStream();
const [audioTrack] = stream.getAudioTracks();
const [videoTrack] = stream.getVideoTracks();
stream.getTracks().forEach(track => {
mediaStream.addTrack(track);
});
[mediaStream.getAudioTracks().find(({
id
}) => id !== audioTrack.id),
mediaStream.getVideoTracks().find(({
id
}) => id !== videoTrack.id)
]
.forEach(track => {
if (track) {
// set track enabled to false, stop() track
track.enabled = false;
track.stop();
mediaStream.removeTrack(track);
}
});
};
const blob = new Blob([e.data], {
type: "video/webm;codecs=vp9"
});
console.log(blob);
videoStream.src = URL.createObjectURL(blob);
};
</script>
</body>
</html>