<!DOCTYPE html>
<html>
<head>
<title>MediaRecorder codecs Blob sizes WebRTC MediaStream input</title>
</head>
<body>
<video id="video" autoplay controls></video>
<script>
let _mediaStream, _audioContext, _worker;
const mimeTypes = ["video/webm;codecs=vp8", "video/webm;codecs=vp9", "video/x-matroska;codecs=h264"];
const results = Promise.all(
mimeTypes.map(mimeType => {
return import ("./export.js").then(async({
// MediaStream outputting #000000 video frames and audio silence
mediaStream, audioContext, worker
}) => {
if (!_mediaStream) {
_mediaStream = mediaStream;
_audioContext = audioContext;
_worker = worker;
}
let recorder, result;
const video = document.getElementById("video");
const [audioTrack, videoTrack] = ["audio", "video"].map(kind => mediaStream.getTracks().find(({
kind: trackKind
}) => trackKind === kind));
const fromLocalPeerConnection = new RTCPeerConnection();
const toLocalPeerConnection = new RTCPeerConnection();
const fromConnection = new Promise(resolve => fromLocalPeerConnection.addEventListener("icecandidate", async e => {
//console.log("from", e);
try {
resolve(toLocalPeerConnection.addIceCandidate(e.candidate ? e.candidate : null));
} catch (e) {
console.error(e);
}
}, {
once: true
}));
const toConnection = new Promise(resolve => toLocalPeerConnection.addEventListener("icecandidate", async e => {
//console.log("to", e);
try {
resolve(fromLocalPeerConnection.addIceCandidate(e.candidate ? e.candidate : null));
} catch (e) {
console.error(e);
}
}, {
once: true
}));
fromLocalPeerConnection.addEventListener("negotiationneeded", e => {
//console.log(e);
});
toLocalPeerConnection.addEventListener("negotiationneeded", e => {
//console.log(e);
});
const tracks = new Promise(resolve => {
let ids = mediaStream.getTracks().map(({
id
}) => id),
i = 0;
toLocalPeerConnection.addEventListener("track", ({
track, streams: [stream]
}) => {
if (ids.includes(track.id) && ++i === ids.length) {
resolve(stream);
}
});
});
// Add initial audio and video MediaStreamTrack to PeerConnection, pass initial MediaStream
const {
sender: audioSender,
receiver: audioReceiver
} = fromLocalPeerConnection.addTransceiver(audioTrack, {
streams: [mediaStream]
});
const {
sender: videoSender,
receiver: videoReceiver
} = fromLocalPeerConnection.addTransceiver(videoTrack, {
streams: [mediaStream]
});
//console.log(audioSender, videoSender, audioReceiver, videoReceiver);
const offer = await fromLocalPeerConnection.createOffer();
await toLocalPeerConnection.setRemoteDescription(offer);
await fromLocalPeerConnection.setLocalDescription(toLocalPeerConnection.remoteDescription);
const answer = await toLocalPeerConnection.createAnswer();
await fromLocalPeerConnection.setRemoteDescription(answer);
await toLocalPeerConnection.setLocalDescription(fromLocalPeerConnection.remoteDescription);
const stream = await tracks;
video.srcObject = stream;
// When MediaStream is not set as srcObject Blob at MediaRecorder
// dataavailable event size is 0
const urls = await Promise.all([{
src: "https://upload.wikimedia.org/wikipedia/commons/a/a4/Xacti-AC8EX-Sample_video-001.ogv",
from: 0,
to: 4
}, {
src: "https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=10,20",
from: 10,
to: 20
}, {
from: 55,
to: 60,
src: "https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4"
}, {
from: 0,
to: 5,
src: "https://raw.githubusercontent.com/w3c/web-platform-tests/master/media-source/mp4/test.mp4"
}, {
from: 0,
to: 5,
src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerBlazes.mp4"
}, {
from: 0,
to: 5,
src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerJoyrides.mp4"
}, {
from: 0,
to: 6,
src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerMeltdowns.mp4#t=0,6"
}].map(async({
from,
to,
src
}) => {
try {
const request = await fetch(src);
const blob = await request.blob();
const blobURL = URL.createObjectURL(blob);
const url = new URL(src);
//console.log(url.hash);
return blobURL + (url.hash || `#t=${from},${to}`);
} catch (e) {
throw e;
}
}));
const videoStream = document.createElement("video");
videoStream.muted = true;
videoStream.onloadedmetadata = e => {
const {
videoWidth: width,
videoHeight: height
} = e.target;
videoStream.width = video.width = width;
videoStream.height = video.height = height;
}
const ms = videoStream.captureStream();
ms.onaddtrack = async e => {
//console.log(e.track.getSettings());
if (e.track.kind === "audio") {
await audioSender.replaceTrack(e.track);
} else {
await videoSender.replaceTrack(e.track);
}
}
for (const blobURL of urls) {
await new Promise(resolve => {
videoStream.addEventListener("pause", async _ => {
recorder.pause();
resolve();
}, {
once: true
});
videoStream.addEventListener("canplay", async _ => {
if (!recorder) {
await new Promise(resizedPromise => {
const handleResize = async _ => {
const {
width, height
} = ms.getVideoTracks()[0].getSettings();
//console.log(video.width, width);
if (video.width === width && video.height === height) {
//console.log(video.width, width);
video.removeEventListener("resize", handleResize);
await new Promise(unmutePromise => {
const [track] = mediaStream.getVideoTracks();
let now = performance.now();
//console.log(video.currentTime, track.muted, stream.getVideoTracks()[0].muted, ms.getVideoTracks()[0].muted);
track.addEventListener("unmute", async _ => {
//console.log(_.type, (performance.now() - now) / 1000, video.currentTime, video.width, video.videoWidth, track.getSettings().width);
unmutePromise(await videoStream.play());
}, {
once: true
})
});
stream.removeTrack(stream.getAudioTracks()[0]);
console.log(stream.getTracks());
recorder = new MediaRecorder(stream, {
mimeType
});
recorder.onstart = _ => {
//console.log(_.type);
}
result = new Promise(resolve => {
recorder.addEventListener("dataavailable", ({
data
}) => {
resolve(data);
}, {
once: true
})
});
recorder.start();
resizedPromise();
}
}
video.addEventListener("resize", handleResize);
});
} else {
await videoStream.play();
recorder.resume();
}
}, {
once: true
});
videoStream.src = blobURL;
});
}
recorder.stop();
let blob = await result;
// console.log(blob.size);
// let blobURL = URL.createObjectURL(blob);
//await audioContext.close();
//worker.terminate();
[stream.getTracks(), ms.getTracks()].flat()
.forEach(track => {
track.enabled = false;
track.stop();
//console.log(track);
});
video.srcObject = null;
videoStream.remove();
return {
mimeType, size: blob.size
}
})
})
);
results.then(results => {
_mediaStream.getTracks().forEach(track => track.stop());
_worker.terminate();
_audioContext.close();
document.body.removeChild(document.body.querySelector("video"));
const pre = document.createElement("pre");
pre.textContent = JSON.stringify(results.sort((a, b) => a.size - b.size), null, 2);
document.body.appendChild(pre);
}, console.error);
</script>
</body>
</html>
MediaRecorder codecs Blob sizes WebRTC MediaStream input
let width = screen.width;
let height = screen.height;
const canvas = globalThis.document.createElement("canvas");
canvas.width = width;
canvas.height = height;
const canvasStream = canvas.captureStream(0);
const [videoTrack] = canvasStream.getVideoTracks();
videoTrack.onunmute = e => {
// console.log(e);
}
videoTrack.onmute = e => {
// console.log(e);
}
videoTrack.onended = e => {
// console.log(e);
}
const worker = new Worker("worker.js");
// console.log(canvasStream, videoTrack);
const offscreen = new OffscreenCanvas(width, height);
const offscreenCtx = offscreen.getContext("2d");
offscreenCtx.fillStyle = "#000000";
offscreenCtx.fillRect(0, 0, width, height);
const imageBitmap = offscreen.transferToImageBitmap();
const osc = canvas.transferControlToOffscreen();
const audioContext = new AudioContext({
sampleRate: 44100
});
const audioStream = audioContext.createMediaStreamDestination();
const [audioTrack] = audioStream.stream.getAudioTracks();
audioContext.audioWorklet.addModule("audioWorklet.js")
.then(_ => {
const aw = new AudioWorkletNode(audioContext, "output-silence");
aw.connect(audioStream);
aw.connect(audioContext.destination);
aw.port.onmessage = _ => {
worker.postMessage(null);
}
worker.postMessage({
osc, imageBitmap
}, [osc, imageBitmap]);
worker.onmessage = e => {
videoTrack.requestFrame();
};
});
audioStream.stream.addTrack(videoTrack);
const mediaStream = audioStream.stream;
export {
mediaStream, audioContext, worker
};
class RecorderProcessor extends AudioWorkletProcessor {
constructor(options) {
super(options);
// console.log(globalThis, options.processorOptions);
}
process(inputs, outputs) {
this.port.postMessage(null);
return true;
}
}
registerProcessor("output-silence", RecorderProcessor);
let osc = osctx = imageBitmap = width = height = void 0;
onmessage = ({
data
}) => {
if (!osc) {
({
osc, imageBitmap
} = data);
({width, height} = osc);
osctx = osc.getContext("2d");
}
osctx.clearRect(0, 0, width, height);
osctx.drawImage(imageBitmap, 0, 0);
postMessage(null);
}