<!DOCTYPE html>
<html>
<head>
<title>Record media fragments to single webm video using canvas.captureStream(), requestAnimationFrame(), AudioContext.createMediaStreamDestination(), AudioContext.createMediaElementSource(), MediaRecorder()</title>
</head>
<body>
<h1>click</h1>
<script>
// Based on approaches at https://stackoverflow.com/a/39302994, https://stackoverflow.com/a/45343042
let width = 320;
let height = 240;
const canvas = document.createElement("canvas");
const ctx = canvas.getContext("2d");
ctx.globalCompositeOperation = "copy";
canvas.width = width;
canvas.height = height;
document.body.appendChild(canvas);
const video = document.createElement("video");
video.controls = true;
video.width = width;
video.height = height;
document.body.appendChild(video);
let urls = [{
src: "https://upload.wikimedia.org/wikipedia/commons/a/a4/Xacti-AC8EX-Sample_video-001.ogv",
from: 0,
to: 4
}, {
src: "https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=10,20"
}, {
from: 55,
to: 60,
src: "https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4"
}, {
from: 0,
to: 5,
src: "https://raw.githubusercontent.com/w3c/web-platform-tests/master/media-source/mp4/test.mp4"
}, {
from: 0,
to: 5,
src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerBlazes.mp4"
}, {
from: 0,
to: 5,
src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerJoyrides.mp4"
}, {
src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerMeltdowns.mp4#t=0,6"
}];
document.querySelector("h1")
.addEventListener('click', async e => {
try {
let result;
const promise = new Promise(resolve => result = resolve);
urls = await Promise.all(urls.map(async({...props
}) => {
const {
src
} = props;
const blob = (await (await fetch(src)).blob());
return {
blob, ...props
}
}));
const canvasStream = canvas.captureStream(60);
const context = new AudioContext();
const audioStream = context.createMediaStreamDestination();
const [videoTrack, audioTrack] = [canvasStream.getVideoTracks()[0], audioStream.stream.getAudioTracks()[0]];
const stream = [canvasStream, videoTrack].find(({
requestFrame: rF
}) => rF);
[videoTrack, audioTrack]
.forEach(track => {
track.onmute = e => console.log(e);
track.onunmute = e => console.log(e);
track.onended = e => console.log(e);
});
await context.audioWorklet.addModule("script.js");
const aw = new AudioWorkletNode(context, "recorder-processor");
aw.port.onmessage = _ => {
ctx.drawImage(video, 0, 0, width, height);
// stream.requestFrame();
}
context.onstatechange = e => console.log(e, e.target.state);
audioStream.stream.addTrack(videoTrack);
const source = context.createMediaElementSource(video);
source.connect(aw).connect(context.destination);
source.connect(audioStream);
const recorder = new MediaRecorder(audioStream.stream, {
mimeType: "video/webm;codecs=vp8,opus"
});
recorder.addEventListener("error", e => {
console.error(e);
throw e;
});
recorder.addEventListener("dataavailable", e => {
result(e.data);
});
recorder.addEventListener("stop", async e => {
console.log(e);
[videoTrack, audioTrack].forEach(track => track.stop());
video.src = "";
video.load();
await context.close();
video.remove();
canvas.remove();
});
video.addEventListener("canplay", async e => {
if (recorder.state === "inactive") {
recorder.start();
}
width = canvas.width = video.videoWidth;
height = canvas.height = video.videoHeight;
await video.play().catch(e => {
throw e;
});
});
for (let {
from, to, src, blob
}
of urls) {
await new Promise(resolve => {
const url = new URL(src);
if (url.hash.length) {
[from, to] = url.hash.match(/\d+|\d+\.\d+/g).map(Number);
}
const blobURL = URL.createObjectURL(blob);
video.addEventListener("pause", e => {
resolve();
}, {
once: true
});
video.src = `${blobURL}#t=${from},${to}`;
})
}
if (recorder.state === "recording") {
console.log(recorder.state);
recorder.stop();
}
const blob = await promise;
console.log(blob);
const display = document.createElement("video");
display.controls = true;
display.addEventListener("canplaythrough", async e => {
await display.play().catch(e => {
throw e;
})
}, {
once: true
});
display.addEventListener("ended", async e => {
// when captureStream(60) without requestFrame() is used
// duration is greater than 42
// when captureStream(0) with requestFrame() is used
// duration is less than 42
console.log(display.duration);
}, {
once: true
});
display.src = URL.createObjectURL(blob);
document.body.appendChild(display);
} catch (e) {
console.error(e);
}
}, {
once: true
});
</script>
</body>
</html>
registerProcessor("recorder-processor", class extends AudioWorkletProcessor {
constructor(options) {
super(options);
}
process(inputs, outputs, parameters) {
this.port.postMessage(0);
const input = inputs[0];
const output = outputs[0];
for (let channel = 0; channel < output.length; channel++) {
output[channel].set(input[channel]);
}
return true;
}
});
/* Styles go here */