<!--
> Muaz Khan - www.MuazKhan.com
> MIT License - www.WebRTC-Experiment.com/licence
> Documentation - github.com/muaz-khan/RecordRTC
> and - RecordRTC.org
-->
<!DOCTYPE html>
<html lang="en">
<head>
<title>Record Mp3/Wav using RecordRTC ® Muaz Khan</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no">
<link rel="author" type="text/html" href="https://plus.google.com/+MuazKhan">
<meta name="author" content="Muaz Khan">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<script src="script.js"></script>
</head>
<body>
<div id="audio-container"></div>
<button id="btn-start-recording" onclick="clickRecord()">Record</button>
<script>
var htmlRecordObj = {};
var isRecording = false;
var mediaConstraints = {
audio: {
mandatory: {
echoCancellation: false,
googAutoGainControl: false,
googNoiseSuppression: false,
googHighpassFilter: false
},
optional: [{
googAudioMirroring: false
}]
}
};
var clickRecord = function() {
if (isRecording) {
stopRecording();
} else {
startRecording();
}
};
var startRecording = function() {
navigator.mediaDevices.getUserMedia(mediaConstraints).then(function(mediaStream) {
var options = {
mimeType: 'audio/wav', // or video/mp4 or audio/ogg
bitsPerSecond: 128000,
bufferSize: 512,
numberOfAudioChannels: 1,
recorderType: StereoAudioRecorder,
disableLogs: true
};
isRecording = true;
htmlRecordObj.recordRTC = RecordRTC(mediaStream, options);
htmlRecordObj.mediaStream = mediaStream;
htmlRecordObj.recordRTC.startRecording();
})
.catch(function(error) {
isRecording = false;
console.log("in catch", error);
});
};
var stopRecording = function() {
htmlRecordObj.recordRTC.stopRecording(function(audioURL) {
isRecording = false;
htmlRecordObj.recordRTC.getDataURL(function(dataURL) {
addAudio(dataURL);
});
htmlRecordObj.mediaStream.getAudioTracks()[0].stop();
htmlRecordObj.recordRTC.clearRecordedData();
htmlRecordObj = {};
});
};
var addAudio = function(url) {
var sound = document.createElement('audio');
sound.id = 'audio-player';
sound.controls = 'controls';
sound.src = url;
sound.type = 'audio/wav';
document.getElementById('audio-container').appendChild(sound);
};
</script>
</body>
</html>
'use strict';
// Last time updated: 2017-03-20 11:50:41 AM UTC
// ________________
// RecordRTC v5.4.1
// Open-Sourced: https://github.com/muaz-khan/RecordRTC
// --------------------------------------------------
// Muaz Khan - www.MuazKhan.com
// MIT License - www.WebRTC-Experiment.com/licence
// --------------------------------------------------
// ____________
// RecordRTC.js
/**
* {@link https://github.com/muaz-khan/RecordRTC|RecordRTC} is a WebRTC JavaScript library for audio/video as well as screen activity recording. It supports Chrome, Firefox, Opera, Android, and Microsoft Edge. Platforms: Linux, Mac and Windows.
* @summary Record audio, video or screen inside the browser.
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef RecordRTC
* @class
* @example
* var recorder = RecordRTC(mediaStream or [arrayOfMediaStream], {
* type: 'video', // audio or video or gif or canvas
* recorderType: MediaStreamRecorder || CanvasRecorder || StereoAudioRecorder || Etc
* });
* recorder.startRecording();
* @see For further information:
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc.
* @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, desiredSampRate: 16000, video: HTMLVideoElement, etc.}
*/
function RecordRTC(mediaStream, config) {
if (!mediaStream) {
throw 'First parameter is required.';
}
config = config || {
type: 'video'
};
config = new RecordRTCConfiguration(mediaStream, config);
// a reference to user's recordRTC object
var self = this;
function startRecording() {
if (!config.disableLogs) {
console.debug('started recording ' + config.type + ' stream.');
}
if (mediaRecorder) {
mediaRecorder.clearRecordedData();
mediaRecorder.record();
setState('recording');
if (self.recordingDuration) {
handleRecordingDuration();
}
return self;
}
initRecorder(function() {
if (self.recordingDuration) {
handleRecordingDuration();
}
});
return self;
}
function initRecorder(initCallback) {
if (initCallback) {
config.initCallback = function() {
initCallback();
initCallback = config.initCallback = null; // recorder.initRecorder should be call-backed once.
};
}
var Recorder = new GetRecorderType(mediaStream, config);
mediaRecorder = new Recorder(mediaStream, config);
mediaRecorder.record();
setState('recording');
if (!config.disableLogs) {
console.debug('Initialized recorderType:', mediaRecorder.constructor.name, 'for output-type:', config.type);
}
}
function stopRecording(callback) {
if (!mediaRecorder) {
return console.warn(WARNING);
}
if (self.state === 'paused') {
setState('recording');
self.resumeRecording();
setTimeout(function() {
stopRecording(callback);
}, 1);
return;
}
if (self.state !== 'recording') {
if (!config.disableLogs) {
console.warn('Unable to stop the recording. Recording state: ', self.state);
}
return;
}
if (!config.disableLogs) {
console.warn('Stopped recording ' + config.type + ' stream.');
}
if (config.type !== 'gif') {
mediaRecorder.stop(_callback);
} else {
mediaRecorder.stop();
_callback();
}
setState('stopped');
function _callback(__blob) {
Object.keys(mediaRecorder).forEach(function(key) {
if (typeof mediaRecorder[key] === 'function') {
return;
}
self[key] = mediaRecorder[key];
});
var blob = mediaRecorder.blob;
if (!blob) {
if (__blob) {
mediaRecorder.blob = blob = __blob;
} else {
throw 'Recording failed.';
}
}
if (callback) {
var url = URL.createObjectURL(blob);
if (typeof callback.call === 'function') {
callback.call(self, url);
} else {
callback(url);
}
}
if (blob && !config.disableLogs) {
console.debug(blob.type, '->', bytesToSize(blob.size));
}
if (!config.autoWriteToDisk) {
return;
}
getDataURL(function(dataURL) {
var parameter = {};
parameter[config.type + 'Blob'] = dataURL;
DiskStorage.Store(parameter);
});
}
}
function pauseRecording() {
if (!mediaRecorder) {
return console.warn(WARNING);
}
if (self.state !== 'recording') {
if (!config.disableLogs) {
console.warn('Unable to pause the recording. Recording state: ', self.state);
}
return;
}
setState('paused');
mediaRecorder.pause();
if (!config.disableLogs) {
console.debug('Paused recording.');
}
}
function resumeRecording() {
if (!mediaRecorder) {
return console.warn(WARNING);
}
if (self.state !== 'paused') {
if (!config.disableLogs) {
console.warn('Unable to resume the recording. Recording state: ', self.state);
}
return;
}
setState('recording');
// not all libs have this method yet
mediaRecorder.resume();
if (!config.disableLogs) {
console.debug('Resumed recording.');
}
}
function readFile(_blob) {
postMessage(new FileReaderSync().readAsDataURL(_blob));
}
function getDataURL(callback, _mediaRecorder) {
if (!callback) {
throw 'Pass a callback function over getDataURL.';
}
var blob = _mediaRecorder ? _mediaRecorder.blob : (mediaRecorder || {}).blob;
if (!blob) {
if (!config.disableLogs) {
console.warn('Blob encoder did not finish its job yet.');
}
setTimeout(function() {
getDataURL(callback, _mediaRecorder);
}, 1000);
return;
}
if (typeof Worker !== 'undefined' && !navigator.mozGetUserMedia) {
var webWorker = processInWebWorker(readFile);
webWorker.onmessage = function(event) {
callback(event.data);
};
webWorker.postMessage(blob);
} else {
var reader = new FileReader();
reader.readAsDataURL(blob);
reader.onload = function(event) {
callback(event.target.result);
};
}
function processInWebWorker(_function) {
var blob = URL.createObjectURL(new Blob([_function.toString(),
'this.onmessage = function (e) {' + _function.name + '(e.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(blob);
URL.revokeObjectURL(blob);
return worker;
}
}
function handleRecordingDuration(counter) {
counter = counter || 0;
if (self.state === 'paused') {
setTimeout(function() {
handleRecordingDuration(counter);
}, 1000);
return;
}
if (self.state === 'stopped') {
return;
}
if (counter >= self.recordingDuration) {
stopRecording(self.onRecordingStopped);
return;
}
counter += 1000; // 1-second
setTimeout(function() {
handleRecordingDuration(counter);
}, 1000);
}
function setState(state) {
self.state = state;
if (typeof self.onStateChanged.call === 'function') {
self.onStateChanged.call(self, state);
} else {
self.onStateChanged(state);
}
}
var WARNING = 'It seems that "startRecording" is not invoked for ' + config.type + ' recorder.';
var mediaRecorder;
var returnObject = {
/**
* This method starts the recording.
* @method
* @memberof RecordRTC
* @instance
* @example
* var recorder = RecordRTC(mediaStream, {
* type: 'video'
* });
* recorder.startRecording();
*/
startRecording: startRecording,
/**
* This method stops the recording. It is strongly recommended to get "blob" or "URI" inside the callback to make sure all recorders finished their job.
* @param {function} callback - Callback to get the recorded blob.
* @method
* @memberof RecordRTC
* @instance
* @example
* recorder.stopRecording(function() {
* // use either "this" or "recorder" object; both are identical
* video.src = this.toURL();
* var blob = this.getBlob();
* });
*/
stopRecording: stopRecording,
/**
* This method pauses the recording. You can resume recording using "resumeRecording" method.
* @method
* @memberof RecordRTC
* @instance
* @todo Firefox is unable to pause the recording. Fix it.
* @example
* recorder.pauseRecording(); // pause the recording
* recorder.resumeRecording(); // resume again
*/
pauseRecording: pauseRecording,
/**
* This method resumes the recording.
* @method
* @memberof RecordRTC
* @instance
* @example
* recorder.pauseRecording(); // first of all, pause the recording
* recorder.resumeRecording(); // now resume it
*/
resumeRecording: resumeRecording,
/**
* This method initializes the recording.
* @method
* @memberof RecordRTC
* @instance
* @todo This method should be deprecated.
* @example
* recorder.initRecorder();
*/
initRecorder: initRecorder,
/**
* Ask RecordRTC to auto-stop the recording after 5 minutes.
* @method
* @memberof RecordRTC
* @instance
* @example
* var fiveMinutes = 5 * 1000 * 60;
* recorder.setRecordingDuration(fiveMinutes, function() {
* var blob = this.getBlob();
* video.src = this.toURL();
* });
*
* // or otherwise
* recorder.setRecordingDuration(fiveMinutes).onRecordingStopped(function() {
* var blob = this.getBlob();
* video.src = this.toURL();
* });
*/
setRecordingDuration: function(recordingDuration, callback) {
if (typeof recordingDuration === 'undefined') {
throw 'recordingDuration is required.';
}
if (typeof recordingDuration !== 'number') {
throw 'recordingDuration must be a number.';
}
self.recordingDuration = recordingDuration;
self.onRecordingStopped = callback || function() {};
return {
onRecordingStopped: function(callback) {
self.onRecordingStopped = callback;
}
};
},
/**
* This method can be used to clear/reset all the recorded data.
* @method
* @memberof RecordRTC
* @instance
* @todo Figure out the difference between "reset" and "clearRecordedData" methods.
* @example
* recorder.clearRecordedData();
*/
clearRecordedData: function() {
if (!mediaRecorder) {
return console.warn(WARNING);
}
mediaRecorder.clearRecordedData();
if (!config.disableLogs) {
console.debug('Cleared old recorded data.');
}
},
/**
* Get the recorded blob. Use this method inside the "stopRecording" callback.
* @method
* @memberof RecordRTC
* @instance
* @example
* recorder.stopRecording(function() {
* var blob = this.getBlob();
*
* var file = new File([blob], 'filename.webm', {
* type: 'video/webm'
* });
*
* var formData = new FormData();
* formData.append('file', file); // upload "File" object rather than a "Blob"
* uploadToServer(formData);
* });
*/
getBlob: function() {
if (!mediaRecorder) {
return console.warn(WARNING);
}
return mediaRecorder.blob;
},
/**
* Get data-URI instead of Blob.
* @param {function} callback - Callback to get the Data-URI.
* @method
* @memberof RecordRTC
* @instance
* @example
* recorder.stopRecording(function() {
* recorder.getDataURL(function(dataURI) {
* video.src = dataURI;
* });
* });
*/
getDataURL: getDataURL,
/**
* Get virtual/temporary URL. Usage of this URL is limited to current tab.
* @method
* @memberof RecordRTC
* @instance
* @example
* recorder.stopRecording(function() {
* video.src = this.toURL();
* });
*/
toURL: function() {
if (!mediaRecorder) {
return console.warn(WARNING);
}
return URL.createObjectURL(mediaRecorder.blob);
},
/**
* Add extra media-streams to existing recordings.
* @method
* @memberof RecordRTC
* @instance
* @example
* recorder.addStream(MediaStream);
*/
addStream: function(stream) {
if (!mediaRecorder) {
return console.warn(WARNING);
}
if (typeof mediaRecorder.addStream === 'function') {
mediaRecorder.addStream(stream);
}
},
/**
* Invoke save-as dialog to save the recorded blob into your disk.
* @param {string} fileName - Set your own file name.
* @method
* @memberof RecordRTC
* @instance
* @example
* recorder.stopRecording(function() {
* this.save('file-name');
*
* // or manually:
* invokeSaveAsDialog(this.getBlob(), 'filename.webm');
* });
*/
save: function(fileName) {
if (!mediaRecorder) {
return console.warn(WARNING);
}
invokeSaveAsDialog(mediaRecorder.blob, fileName);
},
/**
* This method gets a blob from indexed-DB storage.
* @param {function} callback - Callback to get the recorded blob.
* @method
* @memberof RecordRTC
* @instance
* @example
* recorder.getFromDisk(function(dataURL) {
* video.src = dataURL;
* });
*/
getFromDisk: function(callback) {
if (!mediaRecorder) {
return console.warn(WARNING);
}
RecordRTC.getFromDisk(config.type, callback);
},
/**
* This method appends an array of webp images to the recorded video-blob. It takes an "array" object.
* @type {Array.<Array>}
* @param {Array} arrayOfWebPImages - Array of webp images.
* @method
* @memberof RecordRTC
* @instance
* @todo This method should be deprecated.
* @example
* var arrayOfWebPImages = [];
* arrayOfWebPImages.push({
* duration: index,
* image: 'data:image/webp;base64,...'
* });
* recorder.setAdvertisementArray(arrayOfWebPImages);
*/
setAdvertisementArray: function(arrayOfWebPImages) {
config.advertisement = [];
var length = arrayOfWebPImages.length;
for (var i = 0; i < length; i++) {
config.advertisement.push({
duration: i,
image: arrayOfWebPImages[i]
});
}
},
/**
* It is equivalent to <code class="str">"recorder.getBlob()"</code> method. Usage of "getBlob" is recommended, though.
* @property {Blob} blob - Recorded Blob can be accessed using this property.
* @memberof RecordRTC
* @instance
* @readonly
* @example
* recorder.stopRecording(function() {
* var blob = this.blob;
*
* // below one is recommended
* var blob = this.getBlob();
* });
*/
blob: null,
/**
* This works only with {recorderType:StereoAudioRecorder}. Use this property on "stopRecording" to verify the encoder's sample-rates.
* @property {number} bufferSize - Buffer-size used to encode the WAV container
* @memberof RecordRTC
* @instance
* @readonly
* @example
* recorder.stopRecording(function() {
* alert('Recorder used this buffer-size: ' + this.bufferSize);
* });
*/
bufferSize: 0,
/**
* This works only with {recorderType:StereoAudioRecorder}. Use this property on "stopRecording" to verify the encoder's sample-rates.
* @property {number} sampleRate - Sample-rates used to encode the WAV container
* @memberof RecordRTC
* @instance
* @readonly
* @example
* recorder.stopRecording(function() {
* alert('Recorder used these sample-rates: ' + this.sampleRate);
* });
*/
sampleRate: 0,
/**
* {recorderType:StereoAudioRecorder} returns ArrayBuffer object.
* @property {ArrayBuffer} buffer - Audio ArrayBuffer, supported only in Chrome.
* @memberof RecordRTC
* @instance
* @readonly
* @example
* recorder.stopRecording(function() {
* var arrayBuffer = this.buffer;
* alert(arrayBuffer.byteLength);
* });
*/
buffer: null,
/**
* This method resets the recorder. So that you can reuse single recorder instance many times.
* @method
* @memberof RecordRTC
* @instance
* @example
* recorder.reset();
* recorder.startRecording();
*/
reset: function() {
if (mediaRecorder && typeof mediaRecorder.clearRecordedData === 'function') {
mediaRecorder.clearRecordedData();
}
mediaRecorder = null;
setState('inactive');
self.blob = null;
},
/**
* This method is called whenever recorder's state changes. Use this as an "event".
* @property {String} state - A recorder's state can be: recording, paused, stopped or inactive.
* @method
* @memberof RecordRTC
* @instance
* @example
* recorder.onStateChanged = function(state) {
* console.log('Recorder state: ', state);
* };
*/
onStateChanged: function(state) {
if (!config.disableLogs) {
console.info('Recorder state changed:', state);
}
},
/**
* A recorder can have inactive, recording, paused or stopped states.
* @property {String} state - A recorder's state can be: recording, paused, stopped or inactive.
* @memberof RecordRTC
* @static
* @readonly
* @example
* // this looper function will keep you updated about the recorder's states.
* (function looper() {
* document.querySelector('h1').innerHTML = 'Recorder's state is: ' + recorder.state;
* if(recorder.state === 'stopped') return; // ignore+stop
* setTimeout(looper, 1000); // update after every 3-seconds
* })();
* recorder.startRecording();
*/
state: 'inactive'
};
if (!this) {
self = returnObject;
return returnObject;
}
// if someone wants to use RecordRTC with the "new" keyword.
for (var prop in returnObject) {
this[prop] = returnObject[prop];
}
self = this;
return returnObject;
}
/**
* This method can be used to get all recorded blobs from IndexedDB storage.
* @param {string} type - 'all' or 'audio' or 'video' or 'gif'
* @param {function} callback - Callback function to get all stored blobs.
* @method
* @memberof RecordRTC
* @example
* RecordRTC.getFromDisk('all', function(dataURL, type){
* if(type === 'audio') { }
* if(type === 'video') { }
* if(type === 'gif') { }
* });
*/
RecordRTC.getFromDisk = function(type, callback) {
if (!callback) {
throw 'callback is mandatory.';
}
console.log('Getting recorded ' + (type === 'all' ? 'blobs' : type + ' blob ') + ' from disk!');
DiskStorage.Fetch(function(dataURL, _type) {
if (type !== 'all' && _type === type + 'Blob' && callback) {
callback(dataURL);
}
if (type === 'all' && callback) {
callback(dataURL, _type.replace('Blob', ''));
}
});
};
/**
* This method can be used to store recorded blobs into IndexedDB storage.
* @param {object} options - {audio: Blob, video: Blob, gif: Blob}
* @method
* @memberof RecordRTC
* @example
* RecordRTC.writeToDisk({
* audio: audioBlob,
* video: videoBlob,
* gif : gifBlob
* });
*/
RecordRTC.writeToDisk = function(options) {
console.log('Writing recorded blob(s) to disk!');
options = options || {};
if (options.audio && options.video && options.gif) {
options.audio.getDataURL(function(audioDataURL) {
options.video.getDataURL(function(videoDataURL) {
options.gif.getDataURL(function(gifDataURL) {
DiskStorage.Store({
audioBlob: audioDataURL,
videoBlob: videoDataURL,
gifBlob: gifDataURL
});
});
});
});
} else if (options.audio && options.video) {
options.audio.getDataURL(function(audioDataURL) {
options.video.getDataURL(function(videoDataURL) {
DiskStorage.Store({
audioBlob: audioDataURL,
videoBlob: videoDataURL
});
});
});
} else if (options.audio && options.gif) {
options.audio.getDataURL(function(audioDataURL) {
options.gif.getDataURL(function(gifDataURL) {
DiskStorage.Store({
audioBlob: audioDataURL,
gifBlob: gifDataURL
});
});
});
} else if (options.video && options.gif) {
options.video.getDataURL(function(videoDataURL) {
options.gif.getDataURL(function(gifDataURL) {
DiskStorage.Store({
videoBlob: videoDataURL,
gifBlob: gifDataURL
});
});
});
} else if (options.audio) {
options.audio.getDataURL(function(audioDataURL) {
DiskStorage.Store({
audioBlob: audioDataURL
});
});
} else if (options.video) {
options.video.getDataURL(function(videoDataURL) {
DiskStorage.Store({
videoBlob: videoDataURL
});
});
} else if (options.gif) {
options.gif.getDataURL(function(gifDataURL) {
DiskStorage.Store({
gifBlob: gifDataURL
});
});
}
};
if (typeof module !== 'undefined' /* && !!module.exports*/ ) {
module.exports = RecordRTC;
}
if (typeof define === 'function' && define.amd) {
define('RecordRTC', [], function() {
return RecordRTC;
});
}
// __________________________
// RecordRTC-Configuration.js
/**
* {@link RecordRTCConfiguration} is an inner/private helper for {@link RecordRTC}.
* @summary It configures the 2nd parameter passed over {@link RecordRTC} and returns a valid "config" object.
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef RecordRTCConfiguration
* @class
* @example
* var options = RecordRTCConfiguration(mediaStream, options);
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, getNativeBlob:true, etc.}
*/
function RecordRTCConfiguration(mediaStream, config) {
if (config.recorderType && !config.type) {
if (config.recorderType === WhammyRecorder || config.recorderType === CanvasRecorder) {
config.type = 'video';
} else if (config.recorderType === GifRecorder) {
config.type = 'gif';
} else if (config.recorderType === StereoAudioRecorder) {
config.type = 'audio';
} else if (config.recorderType === MediaStreamRecorder) {
if (mediaStream.getAudioTracks().length && mediaStream.getVideoTracks().length) {
config.type = 'video';
} else if (mediaStream.getAudioTracks().length && !mediaStream.getVideoTracks().length) {
config.type = 'audio';
} else if (!mediaStream.getAudioTracks().length && mediaStream.getVideoTracks().length) {
config.type = 'audio';
} else {
// config.type = 'UnKnown';
}
}
}
if (typeof MediaStreamRecorder !== 'undefined' && typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype) {
if (!config.mimeType) {
config.mimeType = 'video/webm';
}
if (!config.type) {
config.type = config.mimeType.split('/')[0];
}
if (!config.bitsPerSecond) {
// config.bitsPerSecond = 128000;
}
}
// consider default type=audio
if (!config.type) {
if (config.mimeType) {
config.type = config.mimeType.split('/')[0];
}
if (!config.type) {
config.type = 'audio';
}
}
return config;
}
// __________________
// GetRecorderType.js
/**
* {@link GetRecorderType} is an inner/private helper for {@link RecordRTC}.
* @summary It returns best recorder-type available for your browser.
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef GetRecorderType
* @class
* @example
* var RecorderType = GetRecorderType(options);
* var recorder = new RecorderType(options);
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}
*/
function GetRecorderType(mediaStream, config) {
var recorder;
// StereoAudioRecorder can work with all three: Edge, Firefox and Chrome
// todo: detect if it is Edge, then auto use: StereoAudioRecorder
if (isChrome || isEdge || isOpera) {
// Media Stream Recording API has not been implemented in chrome yet;
// That's why using WebAudio API to record stereo audio in WAV format
recorder = StereoAudioRecorder;
}
if (typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype && !isChrome) {
recorder = MediaStreamRecorder;
}
// video recorder (in WebM format)
if (config.type === 'video' && (isChrome || isOpera)) {
recorder = WhammyRecorder;
}
// video recorder (in Gif format)
if (config.type === 'gif') {
recorder = GifRecorder;
}
// html2canvas recording!
if (config.type === 'canvas') {
recorder = CanvasRecorder;
}
if (isMediaRecorderCompatible() && recorder !== CanvasRecorder && recorder !== GifRecorder && typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype) {
if ((mediaStream.getVideoTracks && mediaStream.getVideoTracks().length) || (mediaStream.getAudioTracks && mediaStream.getAudioTracks().length)) {
// audio-only recording
if (config.type === 'audio') {
if (typeof MediaRecorder.isTypeSupported === 'function' && MediaRecorder.isTypeSupported('audio/webm')) {
recorder = MediaStreamRecorder;
}
// else recorder = StereoAudioRecorder;
} else {
// video or screen tracks
if (typeof MediaRecorder.isTypeSupported === 'function' && MediaRecorder.isTypeSupported('video/webm')) {
recorder = MediaStreamRecorder;
}
}
}
}
if (config.recorderType) {
recorder = config.recorderType;
}
if (mediaStream instanceof Array && mediaStream.length) {
recorder = MultiStreamRecorder;
}
if (!config.disableLogs && !!recorder && !!recorder.name) {
console.debug('Using recorderType:', recorder.name || recorder.constructor.name);
}
return recorder;
}
// _____________
// MRecordRTC.js
/**
* MRecordRTC runs on top of {@link RecordRTC} to bring multiple recordings in a single place, by providing simple API.
* @summary MRecordRTC stands for "Multiple-RecordRTC".
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef MRecordRTC
* @class
* @example
* var recorder = new MRecordRTC();
* recorder.addStream(MediaStream);
* recorder.mediaType = {
* audio: true, // or StereoAudioRecorder or MediaStreamRecorder
* video: true, // or WhammyRecorder or MediaStreamRecorder
* gif: true // or GifRecorder
* };
* // mimeType is optional and should be set only in advance cases.
* recorder.mimeType = {
* audio: 'audio/wav',
* video: 'video/webm',
* gif: 'image/gif'
* };
* recorder.startRecording();
* @see For further information:
* @see {@link https://github.com/muaz-khan/RecordRTC/tree/master/MRecordRTC|MRecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
*/
function MRecordRTC(mediaStream) {
/**
* This method attaches MediaStream object to {@link MRecordRTC}.
* @param {MediaStream} mediaStream - A MediaStream object, either fetched using getUserMedia API, or generated using captureStreamUntilEnded or WebAudio API.
* @method
* @memberof MRecordRTC
* @example
* recorder.addStream(MediaStream);
*/
this.addStream = function(_mediaStream) {
if (_mediaStream) {
mediaStream = _mediaStream;
}
};
/**
* This property can be used to set the recording type e.g. audio, or video, or gif, or canvas.
* @property {object} mediaType - {audio: true, video: true, gif: true}
* @memberof MRecordRTC
* @example
* var recorder = new MRecordRTC();
* recorder.mediaType = {
* audio: true, // TRUE or StereoAudioRecorder or MediaStreamRecorder
* video: true, // TRUE or WhammyRecorder or MediaStreamRecorder
* gif : true // TRUE or GifRecorder
* };
*/
this.mediaType = {
audio: true,
video: true
};
/**
* This method starts recording.
* @method
* @memberof MRecordRTC
* @example
* recorder.startRecording();
*/
this.startRecording = function() {
var mediaType = this.mediaType;
var recorderType;
var mimeType = this.mimeType || {
audio: null,
video: null,
gif: null
};
if (typeof mediaType.audio !== 'function' && isMediaRecorderCompatible() && mediaStream.getAudioTracks && !mediaStream.getAudioTracks().length) {
mediaType.audio = false;
}
if (typeof mediaType.video !== 'function' && isMediaRecorderCompatible() && mediaStream.getVideoTracks && !mediaStream.getVideoTracks().length) {
mediaType.video = false;
}
if (typeof mediaType.gif !== 'function' && isMediaRecorderCompatible() && mediaStream.getVideoTracks && !mediaStream.getVideoTracks().length) {
mediaType.gif = false;
}
if (!mediaType.audio && !mediaType.video && !mediaType.gif) {
throw 'MediaStream must have either audio or video tracks.';
}
if (!!mediaType.audio) {
recorderType = null;
if (typeof mediaType.audio === 'function') {
recorderType = mediaType.audio;
}
this.audioRecorder = new RecordRTC(mediaStream, {
type: 'audio',
bufferSize: this.bufferSize,
sampleRate: this.sampleRate,
numberOfAudioChannels: this.numberOfAudioChannels || 2,
disableLogs: this.disableLogs,
recorderType: recorderType,
mimeType: mimeType.audio
});
if (!mediaType.video) {
this.audioRecorder.startRecording();
}
}
if (!!mediaType.video) {
recorderType = null;
if (typeof mediaType.video === 'function') {
recorderType = mediaType.video;
}
var newStream = mediaStream;
if (isMediaRecorderCompatible() && !!mediaType.audio && typeof mediaType.audio === 'function') {
var videoTrack = mediaStream.getVideoTracks()[0];
if (!!navigator.mozGetUserMedia) {
newStream = new MediaStream();
newStream.addTrack(videoTrack);
if (recorderType && recorderType === WhammyRecorder) {
// Firefox does NOT support webp-encoding yet
recorderType = MediaStreamRecorder;
}
} else {
newStream = new MediaStream([videoTrack]);
}
}
this.videoRecorder = new RecordRTC(newStream, {
type: 'video',
video: this.video,
canvas: this.canvas,
frameInterval: this.frameInterval || 10,
disableLogs: this.disableLogs,
recorderType: recorderType,
mimeType: mimeType.video
});
if (!mediaType.audio) {
this.videoRecorder.startRecording();
}
}
if (!!mediaType.audio && !!mediaType.video) {
var self = this;
if (isMediaRecorderCompatible()) {
self.audioRecorder = null;
self.videoRecorder.startRecording();
} else {
self.videoRecorder.initRecorder(function() {
self.audioRecorder.initRecorder(function() {
// Both recorders are ready to record things accurately
self.videoRecorder.startRecording();
self.audioRecorder.startRecording();
});
});
}
}
if (!!mediaType.gif) {
recorderType = null;
if (typeof mediaType.gif === 'function') {
recorderType = mediaType.gif;
}
this.gifRecorder = new RecordRTC(mediaStream, {
type: 'gif',
frameRate: this.frameRate || 200,
quality: this.quality || 10,
disableLogs: this.disableLogs,
recorderType: recorderType,
mimeType: mimeType.gif
});
this.gifRecorder.startRecording();
}
};
/**
* This method stops recording.
* @param {function} callback - Callback function is invoked when all encoders finished their jobs.
* @method
* @memberof MRecordRTC
* @example
* recorder.stopRecording(function(recording){
* var audioBlob = recording.audio;
* var videoBlob = recording.video;
* var gifBlob = recording.gif;
* });
*/
this.stopRecording = function(callback) {
callback = callback || function() {};
if (this.audioRecorder) {
this.audioRecorder.stopRecording(function(blobURL) {
callback(blobURL, 'audio');
});
}
if (this.videoRecorder) {
this.videoRecorder.stopRecording(function(blobURL) {
callback(blobURL, 'video');
});
}
if (this.gifRecorder) {
this.gifRecorder.stopRecording(function(blobURL) {
callback(blobURL, 'gif');
});
}
};
/**
* This method pauses recording.
* @method
* @memberof MRecordRTC
* @example
* recorder.pauseRecording();
*/
this.pauseRecording = function() {
if (this.audioRecorder) {
this.audioRecorder.pauseRecording();
}
if (this.videoRecorder) {
this.videoRecorder.pauseRecording();
}
if (this.gifRecorder) {
this.gifRecorder.pauseRecording();
}
};
/**
* This method resumes recording.
* @method
* @memberof MRecordRTC
* @example
* recorder.resumeRecording();
*/
this.resumeRecording = function() {
if (this.audioRecorder) {
this.audioRecorder.resumeRecording();
}
if (this.videoRecorder) {
this.videoRecorder.resumeRecording();
}
if (this.gifRecorder) {
this.gifRecorder.resumeRecording();
}
};
/**
* This method can be used to manually get all recorded blobs.
* @param {function} callback - All recorded blobs are passed back to the "callback" function.
* @method
* @memberof MRecordRTC
* @example
* recorder.getBlob(function(recording){
* var audioBlob = recording.audio;
* var videoBlob = recording.video;
* var gifBlob = recording.gif;
* });
* // or
* var audioBlob = recorder.getBlob().audio;
* var videoBlob = recorder.getBlob().video;
*/
this.getBlob = function(callback) {
var output = {};
if (this.audioRecorder) {
output.audio = this.audioRecorder.getBlob();
}
if (this.videoRecorder) {
output.video = this.videoRecorder.getBlob();
}
if (this.gifRecorder) {
output.gif = this.gifRecorder.getBlob();
}
if (callback) {
callback(output);
}
return output;
};
/**
* This method can be used to manually get all recorded blobs' DataURLs.
* @param {function} callback - All recorded blobs' DataURLs are passed back to the "callback" function.
* @method
* @memberof MRecordRTC
* @example
* recorder.getDataURL(function(recording){
* var audioDataURL = recording.audio;
* var videoDataURL = recording.video;
* var gifDataURL = recording.gif;
* });
*/
this.getDataURL = function(callback) {
this.getBlob(function(blob) {
if (blob.audio && blob.video) {
getDataURL(blob.audio, function(_audioDataURL) {
getDataURL(blob.video, function(_videoDataURL) {
callback({
audio: _audioDataURL,
video: _videoDataURL
});
});
});
} else if (blob.audio) {
getDataURL(blob.audio, function(_audioDataURL) {
callback({
audio: _audioDataURL
});
});
} else if (blob.video) {
getDataURL(blob.video, function(_videoDataURL) {
callback({
video: _videoDataURL
});
});
}
});
function getDataURL(blob, callback00) {
if (typeof Worker !== 'undefined') {
var webWorker = processInWebWorker(function readFile(_blob) {
postMessage(new FileReaderSync().readAsDataURL(_blob));
});
webWorker.onmessage = function(event) {
callback00(event.data);
};
webWorker.postMessage(blob);
} else {
var reader = new FileReader();
reader.readAsDataURL(blob);
reader.onload = function(event) {
callback00(event.target.result);
};
}
}
function processInWebWorker(_function) {
var blob = URL.createObjectURL(new Blob([_function.toString(),
'this.onmessage = function (e) {' + _function.name + '(e.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(blob);
var url;
if (typeof URL !== 'undefined') {
url = URL;
} else if (typeof webkitURL !== 'undefined') {
url = webkitURL;
} else {
throw 'Neither URL nor webkitURL detected.';
}
url.revokeObjectURL(blob);
return worker;
}
};
/**
* This method can be used to ask {@link MRecordRTC} to write all recorded blobs into IndexedDB storage.
* @method
* @memberof MRecordRTC
* @example
* recorder.writeToDisk();
*/
this.writeToDisk = function() {
RecordRTC.writeToDisk({
audio: this.audioRecorder,
video: this.videoRecorder,
gif: this.gifRecorder
});
};
/**
* This method can be used to invoke a save-as dialog for all recorded blobs.
* @param {object} args - {audio: 'audio-name', video: 'video-name', gif: 'gif-name'}
* @method
* @memberof MRecordRTC
* @example
* recorder.save({
* audio: 'audio-file-name',
* video: 'video-file-name',
* gif : 'gif-file-name'
* });
*/
this.save = function(args) {
args = args || {
audio: true,
video: true,
gif: true
};
if (!!args.audio && this.audioRecorder) {
this.audioRecorder.save(typeof args.audio === 'string' ? args.audio : '');
}
if (!!args.video && this.videoRecorder) {
this.videoRecorder.save(typeof args.video === 'string' ? args.video : '');
}
if (!!args.gif && this.gifRecorder) {
this.gifRecorder.save(typeof args.gif === 'string' ? args.gif : '');
}
};
}
/**
* This method can be used to get all recorded blobs from IndexedDB storage.
* @param {string} type - 'all' or 'audio' or 'video' or 'gif'
* @param {function} callback - Callback function to get all stored blobs.
* @method
* @memberof MRecordRTC
* @example
* MRecordRTC.getFromDisk('all', function(dataURL, type){
* if(type === 'audio') { }
* if(type === 'video') { }
* if(type === 'gif') { }
* });
*/
MRecordRTC.getFromDisk = RecordRTC.getFromDisk;
/**
* This method can be used to store recorded blobs into IndexedDB storage.
* @param {object} options - {audio: Blob, video: Blob, gif: Blob}
* @method
* @memberof MRecordRTC
* @example
* MRecordRTC.writeToDisk({
* audio: audioBlob,
* video: videoBlob,
* gif : gifBlob
* });
*/
MRecordRTC.writeToDisk = RecordRTC.writeToDisk;
if (typeof RecordRTC !== 'undefined') {
RecordRTC.MRecordRTC = MRecordRTC;
}
var browserFakeUserAgent = 'Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45';
(function(that) {
if (!that) {
return;
}
if (typeof window !== 'undefined') {
return;
}
if (typeof global === 'undefined') {
return;
}
global.navigator = {
userAgent: browserFakeUserAgent,
getUserMedia: function() {}
};
if (!global.console) {
global.console = {};
}
if (typeof global.console.debug === 'undefined') {
global.console.debug = global.console.info = global.console.error = global.console.log = global.console.log || function() {
console.log(arguments);
};
}
if (typeof document === 'undefined') {
/*global document:true */
that.document = {};
document.createElement = document.captureStream = document.mozCaptureStream = function() {
var obj = {
getContext: function() {
return obj;
},
play: function() {},
pause: function() {},
drawImage: function() {},
toDataURL: function() {
return '';
}
};
return obj;
};
that.HTMLVideoElement = function() {};
}
if (typeof location === 'undefined') {
/*global location:true */
that.location = {
protocol: 'file:',
href: '',
hash: ''
};
}
if (typeof screen === 'undefined') {
/*global screen:true */
that.screen = {
width: 0,
height: 0
};
}
if (typeof URL === 'undefined') {
/*global screen:true */
that.URL = {
createObjectURL: function() {
return '';
},
revokeObjectURL: function() {
return '';
}
};
}
/*global window:true */
that.window = global;
})(typeof global !== 'undefined' ? global : null);
// _____________________________
// Cross-Browser-Declarations.js
// animation-frame used in WebM recording
/*jshint -W079 */
var requestAnimationFrame = window.requestAnimationFrame;
if (typeof requestAnimationFrame === 'undefined') {
if (typeof webkitRequestAnimationFrame !== 'undefined') {
/*global requestAnimationFrame:true */
requestAnimationFrame = webkitRequestAnimationFrame;
} else if (typeof mozRequestAnimationFrame !== 'undefined') {
/*global requestAnimationFrame:true */
requestAnimationFrame = mozRequestAnimationFrame;
} else if (typeof msRequestAnimationFrame !== 'undefined') {
/*global requestAnimationFrame:true */
requestAnimationFrame = msRequestAnimationFrame;
} else if (typeof requestAnimationFrame === 'undefined') {
// via: https://gist.github.com/paulirish/1579671
var lastTime = 0;
/*global requestAnimationFrame:true */
requestAnimationFrame = function(callback, element) {
var currTime = new Date().getTime();
var timeToCall = Math.max(0, 16 - (currTime - lastTime));
var id = setTimeout(function() {
callback(currTime + timeToCall);
}, timeToCall);
lastTime = currTime + timeToCall;
return id;
};
}
}
/*jshint -W079 */
var cancelAnimationFrame = window.cancelAnimationFrame;
if (typeof cancelAnimationFrame === 'undefined') {
if (typeof webkitCancelAnimationFrame !== 'undefined') {
/*global cancelAnimationFrame:true */
cancelAnimationFrame = webkitCancelAnimationFrame;
} else if (typeof mozCancelAnimationFrame !== 'undefined') {
/*global cancelAnimationFrame:true */
cancelAnimationFrame = mozCancelAnimationFrame;
} else if (typeof msCancelAnimationFrame !== 'undefined') {
/*global cancelAnimationFrame:true */
cancelAnimationFrame = msCancelAnimationFrame;
} else if (typeof cancelAnimationFrame === 'undefined') {
/*global cancelAnimationFrame:true */
cancelAnimationFrame = function(id) {
clearTimeout(id);
};
}
}
// WebAudio API representer
var AudioContext = window.AudioContext;
if (typeof AudioContext === 'undefined') {
if (typeof webkitAudioContext !== 'undefined') {
/*global AudioContext:true */
AudioContext = webkitAudioContext;
}
if (typeof mozAudioContext !== 'undefined') {
/*global AudioContext:true */
AudioContext = mozAudioContext;
}
}
/*jshint -W079 */
var URL = window.URL;
if (typeof URL === 'undefined' && typeof webkitURL !== 'undefined') {
/*global URL:true */
URL = webkitURL;
}
if (typeof navigator !== 'undefined' && typeof navigator.getUserMedia === 'undefined') { // maybe window.navigator?
if (typeof navigator.webkitGetUserMedia !== 'undefined') {
navigator.getUserMedia = navigator.webkitGetUserMedia;
}
if (typeof navigator.mozGetUserMedia !== 'undefined') {
navigator.getUserMedia = navigator.mozGetUserMedia;
}
}
var isEdge = navigator.userAgent.indexOf('Edge') !== -1 && (!!navigator.msSaveBlob || !!navigator.msSaveOrOpenBlob);
var isOpera = !!window.opera || navigator.userAgent.indexOf('OPR/') !== -1;
var isChrome = !isOpera && !isEdge && !!navigator.webkitGetUserMedia;
var MediaStream = window.MediaStream;
if (typeof MediaStream === 'undefined' && typeof webkitMediaStream !== 'undefined') {
MediaStream = webkitMediaStream;
}
/*global MediaStream:true */
if (typeof MediaStream !== 'undefined') {
if (!('getVideoTracks' in MediaStream.prototype)) {
MediaStream.prototype.getVideoTracks = function() {
if (!this.getTracks) {
return [];
}
var tracks = [];
this.getTracks.forEach(function(track) {
if (track.kind.toString().indexOf('video') !== -1) {
tracks.push(track);
}
});
return tracks;
};
MediaStream.prototype.getAudioTracks = function() {
if (!this.getTracks) {
return [];
}
var tracks = [];
this.getTracks.forEach(function(track) {
if (track.kind.toString().indexOf('audio') !== -1) {
tracks.push(track);
}
});
return tracks;
};
}
if (!('stop' in MediaStream.prototype)) {
MediaStream.prototype.stop = function() {
this.getAudioTracks().forEach(function(track) {
if (!!track.stop) {
track.stop();
}
});
this.getVideoTracks().forEach(function(track) {
if (!!track.stop) {
track.stop();
}
});
};
}
}
// below function via: http://goo.gl/B3ae8c
/**
* @param {number} bytes - Pass bytes and get formafted string.
* @returns {string} - formafted string
* @example
* bytesToSize(1024*1024*5) === '5 GB'
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
*/
function bytesToSize(bytes) {
var k = 1000;
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) {
return '0 Bytes';
}
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
}
/**
* @param {Blob} file - File or Blob object. This parameter is required.
* @param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
* @example
* invokeSaveAsDialog(blob or file, [optional] fileName);
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
*/
function invokeSaveAsDialog(file, fileName) {
if (!file) {
throw 'Blob object is required.';
}
if (!file.type) {
try {
file.type = 'video/webm';
} catch (e) {}
}
var fileExtension = (file.type || 'video/webm').split('/')[1];
if (fileName && fileName.indexOf('.') !== -1) {
var splitted = fileName.split('.');
fileName = splitted[0];
fileExtension = splitted[1];
}
var fileFullName = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + fileExtension;
if (typeof navigator.msSaveOrOpenBlob !== 'undefined') {
return navigator.msSaveOrOpenBlob(file, fileFullName);
} else if (typeof navigator.msSaveBlob !== 'undefined') {
return navigator.msSaveBlob(file, fileFullName);
}
var hyperlink = document.createElement('a');
hyperlink.href = URL.createObjectURL(file);
hyperlink.target = '_blank';
hyperlink.download = fileFullName;
if (!!navigator.mozGetUserMedia) {
hyperlink.onclick = function() {
(document.body || document.documentElement).removeChild(hyperlink);
};
(document.body || document.documentElement).appendChild(hyperlink);
}
var evt = new MouseEvent('click', {
view: window,
bubbles: true,
cancelable: true
});
hyperlink.dispatchEvent(evt);
if (!navigator.mozGetUserMedia) {
URL.revokeObjectURL(hyperlink.href);
}
}
// __________ (used to handle stuff like http://goo.gl/xmE5eg) issue #129
// Storage.js
/**
* Storage is a standalone object used by {@link RecordRTC} to store reusable objects e.g. "new AudioContext".
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @example
* Storage.AudioContext === webkitAudioContext
* @property {webkitAudioContext} AudioContext - Keeps a reference to AudioContext object.
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
*/
var Storage = {};
if (typeof AudioContext !== 'undefined') {
Storage.AudioContext = AudioContext;
} else if (typeof webkitAudioContext !== 'undefined') {
Storage.AudioContext = webkitAudioContext;
}
if (typeof RecordRTC !== 'undefined') {
RecordRTC.Storage = Storage;
}
function isMediaRecorderCompatible() {
var isOpera = !!window.opera || navigator.userAgent.indexOf(' OPR/') >= 0;
var isChrome = !!window.chrome && !isOpera;
var isFirefox = typeof window.InstallTrigger !== 'undefined';
if (isFirefox) {
return true;
}
var nVer = navigator.appVersion;
var nAgt = navigator.userAgent;
var fullVersion = '' + parseFloat(navigator.appVersion);
var majorVersion = parseInt(navigator.appVersion, 10);
var nameOffset, verOffset, ix;
if (isChrome || isOpera) {
verOffset = nAgt.indexOf('Chrome');
fullVersion = nAgt.substring(verOffset + 7);
}
// trim the fullVersion string at semicolon/space if present
if ((ix = fullVersion.indexOf(';')) !== -1) {
fullVersion = fullVersion.substring(0, ix);
}
if ((ix = fullVersion.indexOf(' ')) !== -1) {
fullVersion = fullVersion.substring(0, ix);
}
majorVersion = parseInt('' + fullVersion, 10);
if (isNaN(majorVersion)) {
fullVersion = '' + parseFloat(navigator.appVersion);
majorVersion = parseInt(navigator.appVersion, 10);
}
return majorVersion >= 49;
}
// ______________________
// MediaStreamRecorder.js
/*
* Implementation of https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html
* The MediaRecorder accepts a mediaStream as input source passed from UA. When recorder starts,
* a MediaEncoder will be created and accept the mediaStream as input source.
* Encoder will get the raw data by track data changes, encode it by selected MIME Type, then store the encoded in EncodedBufferCache object.
* The encoded data will be extracted on every timeslice passed from Start function call or by RequestData function.
* Thread model:
* When the recorder starts, it creates a "Media Encoder" thread to read data from MediaEncoder object and store buffer in EncodedBufferCache object.
* Also extract the encoded data and create blobs on every timeslice passed from start function or RequestData function called by UA.
*/
/**
* MediaStreamRecorder is an abstraction layer for "MediaRecorder API". It is used by {@link RecordRTC} to record MediaStream(s) in Firefox.
* @summary Runs top over MediaRecorder API.
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef MediaStreamRecorder
* @class
* @example
* var options = {
* mimeType: 'video/webm',
* audioBitsPerSecond : 256 * 8 * 1024,
* videoBitsPerSecond : 256 * 8 * 1024,
* bitsPerSecond: 256 * 8 * 1024, // if this is provided, skip above two
* getNativeBlob: true // by default: it is false
* }
* var recorder = new MediaStreamRecorder(MediaStream, options);
* recorder.record();
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
*
* // or
* var blob = recorder.blob;
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @param {object} config - {disableLogs:true, initCallback: function, mimeType: "video/webm", onAudioProcessStarted: function}
*/
function MediaStreamRecorder(mediaStream, config) {
var self = this;
config = config || {
// bitsPerSecond: 256 * 8 * 1024,
mimeType: 'video/webm'
};
if (config.type === 'audio') {
if (mediaStream.getVideoTracks().length && mediaStream.getAudioTracks().length) {
var stream;
if (!!navigator.mozGetUserMedia) {
stream = new MediaStream();
stream.addTrack(mediaStream.getAudioTracks()[0]);
} else {
// webkitMediaStream
stream = new MediaStream(mediaStream.getAudioTracks());
}
mediaStream = stream;
}
if (!config.mimeType || config.mimeType.toString().toLowerCase().indexOf('audio') === -1) {
config.mimeType = isChrome ? 'audio/webm' : 'audio/ogg';
}
if (config.mimeType && config.mimeType.toString().toLowerCase() !== 'audio/ogg' && !!navigator.mozGetUserMedia) {
// forcing better codecs on Firefox (via #166)
config.mimeType = 'audio/ogg';
}
}
/**
* This method records MediaStream.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.record();
*/
this.record = function() {
self.blob = null;
var recorderHints = config;
if (!config.disableLogs) {
console.log('Passing following config over MediaRecorder API.', recorderHints);
}
if (mediaRecorder) {
// mandatory to make sure Firefox doesn't fails to record streams 3-4 times without reloading the page.
mediaRecorder = null;
}
if (isChrome && !isMediaRecorderCompatible()) {
// to support video-only recording on stable
recorderHints = 'video/vp8';
}
if (typeof MediaRecorder.isTypeSupported === 'function' && recorderHints.mimeType) {
if (!MediaRecorder.isTypeSupported(recorderHints.mimeType)) {
if (!config.disableLogs) {
console.warn('MediaRecorder API seems unable to record mimeType:', recorderHints.mimeType);
}
recorderHints.mimeType = config.type === 'audio' ? 'audio/webm' : 'video/webm';
}
}
// http://dxr.mozilla.org/mozilla-central/source/content/media/MediaRecorder.cpp
// https://wiki.mozilla.org/Gecko:MediaRecorder
// https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html
// starting a recording session; which will initiate "Reading Thread"
// "Reading Thread" are used to prevent main-thread blocking scenarios
try {
mediaRecorder = new MediaRecorder(mediaStream, recorderHints);
} catch (e) {
mediaRecorder = new MediaRecorder(mediaStream);
}
if (!MediaRecorder.isTypeSupported && 'canRecordMimeType' in mediaRecorder && mediaRecorder.canRecordMimeType(config.mimeType) === false) {
if (!config.disableLogs) {
console.warn('MediaRecorder API seems unable to record mimeType:', config.mimeType);
}
}
// i.e. stop recording when <video> is paused by the user; and auto restart recording
// when video is resumed. E.g. yourStream.getVideoTracks()[0].muted = true; // it will auto-stop recording.
mediaRecorder.ignoreMutedMedia = config.ignoreMutedMedia || false;
// Dispatching OnDataAvailable Handler
mediaRecorder.ondataavailable = function(e) {
if (self.dontFireOnDataAvailableEvent) {
return;
}
if (!e.data || !e.data.size || e.data.size < 100 || self.blob) {
// make sure that stopRecording always getting fired
// even if there is invalid data
if (self.recordingCallback) {
self.recordingCallback(new Blob([], {
type: recorderHints.mimeType || 'video/webm'
}));
self.recordingCallback = null;
}
return;
}
/**
* @property {Blob} blob - Recorded frames in video/webm blob.
* @memberof MediaStreamRecorder
* @example
* recorder.stop(function() {
* var blob = recorder.blob;
* });
*/
self.blob = config.getNativeBlob ? e.data : new Blob([e.data], {
type: recorderHints.mimeType || 'video/webm'
});
if (self.recordingCallback) {
self.recordingCallback(self.blob);
self.recordingCallback = null;
}
};
mediaRecorder.onerror = function(error) {
if (!config.disableLogs) {
if (error.name === 'InvalidState') {
console.error('The MediaRecorder is not in a state in which the proposed operation is allowed to be executed.');
} else if (error.name === 'OutOfMemory') {
console.error('The UA has exhaused the available memory. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'IllegalStreamModification') {
console.error('A modification to the stream has occurred that makes it impossible to continue recording. An example would be the addition of a Track while recording is occurring. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'OtherRecordingError') {
console.error('Used for an fatal error other than those listed above. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'GenericError') {
console.error('The UA cannot provide the codec or recording option that has been requested.', error);
} else {
console.error('MediaRecorder Error', error);
}
}
(function(looper) {
if (!self.manuallyStopped && mediaRecorder && mediaRecorder.state === 'inactive') {
// 10 minutes, enough?
mediaRecorder.start(10 * 60 * 1000);
return;
}
setTimeout(looper, 1000);
})();
// When the stream is "ended" set recording to 'inactive'
// and stop gathering data. Callers should not rely on
// exactness of the timeSlice value, especially
// if the timeSlice value is small. Callers should
// consider timeSlice as a minimum value
if (mediaRecorder.state !== 'inactive' && mediaRecorder.state !== 'stopped') {
mediaRecorder.stop();
}
};
// void start(optional long mTimeSlice)
// The interval of passing encoded data from EncodedBufferCache to onDataAvailable
// handler. "mTimeSlice < 0" means Session object does not push encoded data to
// onDataAvailable, instead, it passive wait the client side pull encoded data
// by calling requestData API.
mediaRecorder.start(3.6e+6); // default is 60 minutes; enough?
// Start recording. If timeSlice has been provided, mediaRecorder will
// raise a dataavailable event containing the Blob of collected data on every timeSlice milliseconds.
// If timeSlice isn't provided, UA should call the RequestData to obtain the Blob data, also set the mTimeSlice to zero.
if (config.onAudioProcessStarted) {
config.onAudioProcessStarted();
}
if (config.initCallback) {
config.initCallback();
}
};
/**
* This method stops recording MediaStream.
* @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
*/
this.stop = function(callback) {
self.manuallyStopped = true; // used inside the mediaRecorder.onerror
if (!mediaRecorder) {
return;
}
this.recordingCallback = function(blob) {
mediaRecorder = null;
if (callback) {
callback(blob);
}
};
// mediaRecorder.state === 'recording' means that media recorder is associated with "session"
// mediaRecorder.state === 'stopped' means that media recorder is detached from the "session" ... in this case; "session" will also be deleted.
if (mediaRecorder.state === 'recording') {
// "stop" method auto invokes "requestData"!
// mediaRecorder.requestData();
mediaRecorder.stop();
}
};
/**
* This method pauses the recording process.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.pause();
*/
this.pause = function() {
if (!mediaRecorder) {
return;
}
if (mediaRecorder.state === 'recording') {
mediaRecorder.pause();
}
};
/**
* This method resumes the recording process.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.resume();
*/
this.resume = function() {
if (this.dontFireOnDataAvailableEvent) {
this.dontFireOnDataAvailableEvent = false;
var disableLogs = config.disableLogs;
config.disableLogs = true;
this.record();
config.disableLogs = disableLogs;
return;
}
if (!mediaRecorder) {
return;
}
if (mediaRecorder.state === 'paused') {
mediaRecorder.resume();
}
};
/**
* This method resets currently recorded data.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.clearRecordedData();
*/
this.clearRecordedData = function() {
if (!mediaRecorder) {
return;
}
this.pause();
this.dontFireOnDataAvailableEvent = true;
this.stop();
};
// Reference to "MediaRecorder" object
var mediaRecorder;
function isMediaStreamActive() {
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
var self = this;
// this method checks if media stream is stopped
// or any track is ended.
(function looper() {
if (!mediaRecorder) {
return;
}
if (isMediaStreamActive() === false) {
if (!config.disableLogs) {
console.log('MediaStream seems stopped.');
}
self.stop();
return;
}
setTimeout(looper, 1000); // check every second
})();
}
if (typeof RecordRTC !== 'undefined') {
RecordRTC.MediaStreamRecorder = MediaStreamRecorder;
}
// source code from: http://typedarray.org/wp-content/projects/WebAudioRecorder/script.js
// https://github.com/mattdiamond/Recorderjs#license-mit
// ______________________
// StereoAudioRecorder.js
/**
* StereoAudioRecorder is a standalone class used by {@link RecordRTC} to bring "stereo" audio-recording in chrome.
* @summary JavaScript standalone object for stereo audio recording.
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef StereoAudioRecorder
* @class
* @example
* var recorder = new StereoAudioRecorder(MediaStream, {
* sampleRate: 44100,
* bufferSize: 4096
* });
* recorder.record();
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @param {object} config - {sampleRate: 44100, bufferSize: 4096, numberOfAudioChannels: 1, etc.}
*/
function StereoAudioRecorder(mediaStream, config) {
if (!mediaStream.getAudioTracks().length) {
throw 'Your stream has no audio tracks.';
}
config = config || {};
var self = this;
// variables
var leftchannel = [];
var rightchannel = [];
var recording = false;
var recordingLength = 0;
var jsAudioNode;
var numberOfAudioChannels = 2;
/**
* Set sample rates such as 8K or 16K. Reference: http://stackoverflow.com/a/28977136/552182
* @property {number} desiredSampRate - Desired Bits per sample * 1000
* @memberof StereoAudioRecorder
* @instance
* @example
* var recorder = StereoAudioRecorder(mediaStream, {
* desiredSampRate: 16 * 1000 // bits-per-sample * 1000
* });
*/
var desiredSampRate = config.desiredSampRate;
// backward compatibility
if (config.leftChannel === true) {
numberOfAudioChannels = 1;
}
if (config.numberOfAudioChannels === 1) {
numberOfAudioChannels = 1;
}
if (!config.disableLogs) {
console.debug('StereoAudioRecorder is set to record number of channels: ', numberOfAudioChannels);
}
function isMediaStreamActive() {
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
/**
* This method records MediaStream.
* @method
* @memberof StereoAudioRecorder
* @example
* recorder.record();
*/
this.record = function() {
if (isMediaStreamActive() === false) {
throw 'Please make sure MediaStream is active.';
}
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
if (audioInput) {
audioInput.connect(jsAudioNode);
}
// to prevent self audio to be connected with speakers
// jsAudioNode.connect(context.destination);
isAudioProcessStarted = isPaused = false;
recording = true;
};
function mergeLeftRightBuffers(config, callback) {
function mergeAudioBuffers(config, cb) {
var numberOfAudioChannels = config.numberOfAudioChannels;
// todo: "slice(0)" --- is it causes loop? Should be removed?
var leftBuffers = config.leftBuffers.slice(0);
var rightBuffers = config.rightBuffers.slice(0);
var sampleRate = config.sampleRate;
var internalInterleavedLength = config.internalInterleavedLength;
var desiredSampRate = config.desiredSampRate;
if (numberOfAudioChannels === 2) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
rightBuffers = mergeBuffers(rightBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
rightBuffers = interpolateArray(rightBuffers, desiredSampRate, sampleRate);
}
}
if (numberOfAudioChannels === 1) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
}
}
// set sample rate as desired sample rate
if (desiredSampRate) {
sampleRate = desiredSampRate;
}
// for changing the sampling rate, reference:
// http://stackoverflow.com/a/28977136/552182
function interpolateArray(data, newSampleRate, oldSampleRate) {
var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));
//var newData = new Array();
var newData = [];
//var springFactor = new Number((data.length - 1) / (fitCount - 1));
var springFactor = Number((data.length - 1) / (fitCount - 1));
newData[0] = data[0]; // for new allocation
for (var i = 1; i < fitCount - 1; i++) {
var tmp = i * springFactor;
//var before = new Number(Math.floor(tmp)).toFixed();
//var after = new Number(Math.ceil(tmp)).toFixed();
var before = Number(Math.floor(tmp)).toFixed();
var after = Number(Math.ceil(tmp)).toFixed();
var atPoint = tmp - before;
newData[i] = linearInterpolate(data[before], data[after], atPoint);
}
newData[fitCount - 1] = data[data.length - 1]; // for new allocation
return newData;
}
function linearInterpolate(before, after, atPoint) {
return before + (after - before) * atPoint;
}
function mergeBuffers(channelBuffer, rLength) {
var result = new Float64Array(rLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float64Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
// interleave both channels together
var interleaved;
if (numberOfAudioChannels === 2) {
interleaved = interleave(leftBuffers, rightBuffers);
}
if (numberOfAudioChannels === 1) {
interleaved = leftBuffers;
}
var interleavedLength = interleaved.length;
// create wav file
var resultingBufferLength = 44 + interleavedLength * 2;
var buffer = new ArrayBuffer(resultingBufferLength);
var view = new DataView(buffer);
// RIFF chunk descriptor/identifier
writeUTFBytes(view, 0, 'RIFF');
// RIFF chunk length
view.setUint32(4, 44 + interleavedLength * 2, true);
// RIFF type
writeUTFBytes(view, 8, 'WAVE');
// format chunk identifier
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
// format chunk length
view.setUint32(16, 16, true);
// sample format (raw)
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, numberOfAudioChannels, true);
// sample rate
view.setUint32(24, sampleRate, true);
// byte rate (sample rate * block align)
view.setUint32(28, sampleRate * 2, true);
// block align (channel count * bytes per sample)
view.setUint16(32, numberOfAudioChannels * 2, true);
// bits per sample
view.setUint16(34, 16, true);
// data sub-chunk
// data chunk identifier
writeUTFBytes(view, 36, 'data');
// data chunk length
view.setUint32(40, interleavedLength * 2, true);
// write the PCM samples
var lng = interleavedLength;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
if (cb) {
return cb({
buffer: buffer,
view: view
});
}
postMessage({
buffer: buffer,
view: view
});
}
if (!isChrome) {
// its Microsoft Edge
mergeAudioBuffers(config, function(data) {
callback(data.buffer, data.view);
});
return;
}
var webWorker = processInWebWorker(mergeAudioBuffers);
webWorker.onmessage = function(event) {
callback(event.data.buffer, event.data.view);
// release memory
URL.revokeObjectURL(webWorker.workerURL);
};
webWorker.postMessage(config);
}
function processInWebWorker(_function) {
var workerURL = URL.createObjectURL(new Blob([_function.toString(),
';this.onmessage = function (e) {' + _function.name + '(e.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(workerURL);
worker.workerURL = workerURL;
return worker;
}
/**
* This method stops recording MediaStream.
* @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
* @method
* @memberof StereoAudioRecorder
* @example
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
*/
this.stop = function(callback) {
// stop recording
recording = false;
// to make sure onaudioprocess stops firing
// audioInput.disconnect();
mergeLeftRightBuffers({
desiredSampRate: desiredSampRate,
sampleRate: sampleRate,
numberOfAudioChannels: numberOfAudioChannels,
internalInterleavedLength: recordingLength,
leftBuffers: leftchannel,
rightBuffers: numberOfAudioChannels === 1 ? [] : rightchannel
}, function(buffer, view) {
/**
* @property {Blob} blob - The recorded blob object.
* @memberof StereoAudioRecorder
* @example
* recorder.stop(function(){
* var blob = recorder.blob;
* });
*/
self.blob = new Blob([view], {
type: 'audio/wav'
});
/**
* @property {ArrayBuffer} buffer - The recorded buffer object.
* @memberof StereoAudioRecorder
* @example
* recorder.stop(function(){
* var buffer = recorder.buffer;
* });
*/
self.buffer = new ArrayBuffer(view.buffer.byteLength);
/**
* @property {DataView} view - The recorded data-view object.
* @memberof StereoAudioRecorder
* @example
* recorder.stop(function(){
* var view = recorder.view;
* });
*/
self.view = view;
self.sampleRate = desiredSampRate || sampleRate;
self.bufferSize = bufferSize;
// recorded audio length
self.length = recordingLength;
if (callback) {
callback();
}
isAudioProcessStarted = false;
});
};
if (!Storage.AudioContextConstructor) {
Storage.AudioContextConstructor = new Storage.AudioContext();
}
var context = Storage.AudioContextConstructor;
// creates an audio node from the microphone incoming stream
var audioInput = context.createMediaStreamSource(mediaStream);
var legalBufferValues = [0, 256, 512, 1024, 2048, 4096, 8192, 16384];
/**
* From the spec: This value controls how frequently the audioprocess event is
* dispatched and how many sample-frames need to be processed each call.
* Lower values for buffer size will result in a lower (better) latency.
* Higher values will be necessary to avoid audio breakup and glitches
* The size of the buffer (in sample-frames) which needs to
* be processed each time onprocessaudio is called.
* Legal values are (256, 512, 1024, 2048, 4096, 8192, 16384).
* @property {number} bufferSize - Buffer-size for how frequently the audioprocess event is dispatched.
* @memberof StereoAudioRecorder
* @example
* recorder = new StereoAudioRecorder(mediaStream, {
* bufferSize: 4096
* });
*/
// "0" means, let chrome decide the most accurate buffer-size for current platform.
var bufferSize = typeof config.bufferSize === 'undefined' ? 4096 : config.bufferSize;
if (legalBufferValues.indexOf(bufferSize) === -1) {
if (!config.disableLogs) {
console.warn('Legal values for buffer-size are ' + JSON.stringify(legalBufferValues, null, '\t'));
}
}
if (context.createJavaScriptNode) {
jsAudioNode = context.createJavaScriptNode(bufferSize, numberOfAudioChannels, numberOfAudioChannels);
} else if (context.createScriptProcessor) {
jsAudioNode = context.createScriptProcessor(bufferSize, numberOfAudioChannels, numberOfAudioChannels);
} else {
throw 'WebAudio API has no support on this browser.';
}
// connect the stream to the gain node
audioInput.connect(jsAudioNode);
if (!config.bufferSize) {
bufferSize = jsAudioNode.bufferSize; // device buffer-size
}
/**
* The sample rate (in sample-frames per second) at which the
* AudioContext handles audio. It is assumed that all AudioNodes
* in the context run at this rate. In making this assumption,
* sample-rate converters or "varispeed" processors are not supported
* in real-time processing.
* The sampleRate parameter describes the sample-rate of the
* linear PCM audio data in the buffer in sample-frames per second.
* An implementation must support sample-rates in at least
* the range 22050 to 96000.
* @property {number} sampleRate - Buffer-size for how frequently the audioprocess event is dispatched.
* @memberof StereoAudioRecorder
* @example
* recorder = new StereoAudioRecorder(mediaStream, {
* sampleRate: 44100
* });
*/
var sampleRate = typeof config.sampleRate !== 'undefined' ? config.sampleRate : context.sampleRate || 44100;
if (sampleRate < 22050 || sampleRate > 96000) {
// Ref: http://stackoverflow.com/a/26303918/552182
if (!config.disableLogs) {
console.warn('sample-rate must be under range 22050 and 96000.');
}
}
if (!config.disableLogs) {
console.log('sample-rate', sampleRate);
console.log('buffer-size', bufferSize);
if (config.desiredSampRate) {
console.log('Desired sample-rate', config.desiredSampRate);
}
}
var isPaused = false;
/**
* This method pauses the recording process.
* @method
* @memberof StereoAudioRecorder
* @example
* recorder.pause();
*/
this.pause = function() {
isPaused = true;
};
/**
* This method resumes the recording process.
* @method
* @memberof StereoAudioRecorder
* @example
* recorder.resume();
*/
this.resume = function() {
if (isMediaStreamActive() === false) {
throw 'Please make sure MediaStream is active.';
}
if (!recording) {
if (!config.disableLogs) {
console.info('Seems recording has been restarted.');
}
this.record();
return;
}
isPaused = false;
};
/**
* This method resets currently recorded data.
* @method
* @memberof StereoAudioRecorder
* @example
* recorder.clearRecordedData();
*/
this.clearRecordedData = function() {
this.pause();
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
};
var isAudioProcessStarted = false;
function onAudioProcessDataAvailable(e) {
if (isPaused) {
return;
}
if (isMediaStreamActive() === false) {
if (!config.disableLogs) {
console.log('MediaStream seems stopped.');
}
jsAudioNode.disconnect();
recording = false;
}
if (!recording) {
audioInput.disconnect();
return;
}
/**
* This method is called on "onaudioprocess" event's first invocation.
* @method {function} onAudioProcessStarted
* @memberof StereoAudioRecorder
* @example
* recorder.onAudioProcessStarted: function() { };
*/
if (!isAudioProcessStarted) {
isAudioProcessStarted = true;
if (config.onAudioProcessStarted) {
config.onAudioProcessStarted();
}
if (config.initCallback) {
config.initCallback();
}
}
var left = e.inputBuffer.getChannelData(0);
// we clone the samples
leftchannel.push(new Float32Array(left));
if (numberOfAudioChannels === 2) {
var right = e.inputBuffer.getChannelData(1);
rightchannel.push(new Float32Array(right));
}
recordingLength += bufferSize;
}
jsAudioNode.onaudioprocess = onAudioProcessDataAvailable;
// to prevent self audio to be connected with speakers
jsAudioNode.connect(context.destination);
}
if (typeof RecordRTC !== 'undefined') {
RecordRTC.StereoAudioRecorder = StereoAudioRecorder;
}
// _________________
// CanvasRecorder.js
/**
* CanvasRecorder is a standalone class used by {@link RecordRTC} to bring HTML5-Canvas recording into video WebM. It uses HTML2Canvas library and runs top over {@link Whammy}.
* @summary HTML2Canvas recording into video WebM.
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef CanvasRecorder
* @class
* @example
* var recorder = new CanvasRecorder(htmlElement, { disableLogs: true });
* recorder.record();
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {HTMLElement} htmlElement - querySelector/getElementById/getElementsByTagName[0]/etc.
* @param {object} config - {disableLogs:true, initCallback: function}
*/
function CanvasRecorder(htmlElement, config) {
if (typeof html2canvas === 'undefined' && htmlElement.nodeName.toLowerCase() !== 'canvas') {
throw 'Please link: https://cdn.webrtc-experiment.com/screenshot.js';
}
config = config || {};
if (!config.frameInterval) {
config.frameInterval = 10;
}
// via DetectRTC.js
var isCanvasSupportsStreamCapturing = false;
['captureStream', 'mozCaptureStream', 'webkitCaptureStream'].forEach(function(item) {
if (item in document.createElement('canvas')) {
isCanvasSupportsStreamCapturing = true;
}
});
var _isChrome = (!!window.webkitRTCPeerConnection || !!window.webkitGetUserMedia) && !!window.chrome;
var chromeVersion = 50;
var matchArray = navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./);
if (_isChrome && matchArray && matchArray[2]) {
chromeVersion = parseInt(matchArray[2], 10);
}
if (_isChrome && chromeVersion < 52) {
isCanvasSupportsStreamCapturing = false;
}
var globalCanvas, mediaStreamRecorder;
if (isCanvasSupportsStreamCapturing) {
if (!config.disableLogs) {
console.debug('Your browser supports both MediRecorder API and canvas.captureStream!');
}
if (htmlElement instanceof HTMLCanvasElement) {
globalCanvas = htmlElement;
} else if (htmlElement instanceof CanvasRenderingContext2D) {
globalCanvas = htmlElement.canvas;
} else {
throw 'Please pass either HTMLCanvasElement or CanvasRenderingContext2D.';
}
} else if (!!navigator.mozGetUserMedia) {
if (!config.disableLogs) {
console.error('Canvas recording is NOT supported in Firefox.');
}
}
var isRecording;
/**
* This method records Canvas.
* @method
* @memberof CanvasRecorder
* @example
* recorder.record();
*/
this.record = function() {
isRecording = true;
if (isCanvasSupportsStreamCapturing) {
// CanvasCaptureMediaStream
var canvasMediaStream;
if ('captureStream' in globalCanvas) {
canvasMediaStream = globalCanvas.captureStream(25); // 25 FPS
} else if ('mozCaptureStream' in globalCanvas) {
canvasMediaStream = globalCanvas.mozCaptureStream(25);
} else if ('webkitCaptureStream' in globalCanvas) {
canvasMediaStream = globalCanvas.webkitCaptureStream(25);
}
try {
var mdStream = new MediaStream();
mdStream.addTrack(canvasMediaStream.getVideoTracks()[0]);
canvasMediaStream = mdStream;
} catch (e) {}
if (!canvasMediaStream) {
throw 'captureStream API are NOT available.';
}
// Note: Jan 18, 2016 status is that,
// Firefox MediaRecorder API can't record CanvasCaptureMediaStream object.
mediaStreamRecorder = new MediaStreamRecorder(canvasMediaStream, {
mimeType: 'video/webm'
});
mediaStreamRecorder.record();
} else {
whammy.frames = [];
lastTime = new Date().getTime();
drawCanvasFrame();
}
if (config.initCallback) {
config.initCallback();
}
};
this.getWebPImages = function(callback) {
if (htmlElement.nodeName.toLowerCase() !== 'canvas') {
callback();
return;
}
var framesLength = whammy.frames.length;
whammy.frames.forEach(function(frame, idx) {
var framesRemaining = framesLength - idx;
if (!config.disableLogs) {
console.debug(framesRemaining + '/' + framesLength + ' frames remaining');
}
if (config.onEncodingCallback) {
config.onEncodingCallback(framesRemaining, framesLength);
}
var webp = frame.image.toDataURL('image/webp', 1);
whammy.frames[idx].image = webp;
});
if (!config.disableLogs) {
console.debug('Generating WebM');
}
callback();
};
/**
* This method stops recording Canvas.
* @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
* @method
* @memberof CanvasRecorder
* @example
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
*/
this.stop = function(callback) {
isRecording = false;
var that = this;
if (isCanvasSupportsStreamCapturing && mediaStreamRecorder) {
mediaStreamRecorder.stop(callback);
return;
}
this.getWebPImages(function() {
/**
* @property {Blob} blob - Recorded frames in video/webm blob.
* @memberof CanvasRecorder
* @example
* recorder.stop(function() {
* var blob = recorder.blob;
* });
*/
whammy.compile(function(blob) {
if (!config.disableLogs) {
console.debug('Recording finished!');
}
that.blob = blob;
if (that.blob.forEach) {
that.blob = new Blob([], {
type: 'video/webm'
});
}
if (callback) {
callback(that.blob);
}
whammy.frames = [];
});
});
};
var isPausedRecording = false;
/**
* This method pauses the recording process.
* @method
* @memberof CanvasRecorder
* @example
* recorder.pause();
*/
this.pause = function() {
isPausedRecording = true;
if (mediaStreamRecorder instanceof MediaStreamRecorder) {
mediaStreamRecorder.pause();
return;
}
};
/**
* This method resumes the recording process.
* @method
* @memberof CanvasRecorder
* @example
* recorder.resume();
*/
this.resume = function() {
isPausedRecording = false;
if (mediaStreamRecorder instanceof MediaStreamRecorder) {
mediaStreamRecorder.resume();
return;
}
if (!isRecording) {
this.record();
}
};
/**
* This method resets currently recorded data.
* @method
* @memberof CanvasRecorder
* @example
* recorder.clearRecordedData();
*/
this.clearRecordedData = function() {
this.pause();
whammy.frames = [];
};
function cloneCanvas() {
//create a new canvas
var newCanvas = document.createElement('canvas');
var context = newCanvas.getContext('2d');
//set dimensions
newCanvas.width = htmlElement.width;
newCanvas.height = htmlElement.height;
//apply the old canvas to the new one
context.drawImage(htmlElement, 0, 0);
//return the new canvas
return newCanvas;
}
function drawCanvasFrame() {
if (isPausedRecording) {
lastTime = new Date().getTime();
return setTimeout(drawCanvasFrame, 500);
}
if (htmlElement.nodeName.toLowerCase() === 'canvas') {
var duration = new Date().getTime() - lastTime;
// via #206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
whammy.frames.push({
image: cloneCanvas(),
duration: duration
});
if (isRecording) {
setTimeout(drawCanvasFrame, config.frameInterval);
}
return;
}
html2canvas(htmlElement, {
grabMouse: typeof config.showMousePointer === 'undefined' || config.showMousePointer,
onrendered: function(canvas) {
var duration = new Date().getTime() - lastTime;
if (!duration) {
return setTimeout(drawCanvasFrame, config.frameInterval);
}
// via #206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
whammy.frames.push({
image: canvas.toDataURL('image/webp', 1),
duration: duration
});
if (isRecording) {
setTimeout(drawCanvasFrame, config.frameInterval);
}
}
});
}
var lastTime = new Date().getTime();
var whammy = new Whammy.Video(100);
}
if (typeof RecordRTC !== 'undefined') {
RecordRTC.CanvasRecorder = CanvasRecorder;
}
// _________________
// WhammyRecorder.js
/**
* WhammyRecorder is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It runs top over {@link Whammy}.
* @summary Video recording feature in Chrome.
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef WhammyRecorder
* @class
* @example
* var recorder = new WhammyRecorder(mediaStream);
* recorder.record();
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @param {object} config - {disableLogs: true, initCallback: function, video: HTMLVideoElement, etc.}
*/
function WhammyRecorder(mediaStream, config) {
config = config || {};
if (!config.frameInterval) {
config.frameInterval = 10;
}
if (!config.disableLogs) {
console.log('Using frames-interval:', config.frameInterval);
}
/**
* This method records video.
* @method
* @memberof WhammyRecorder
* @example
* recorder.record();
*/
this.record = function() {
if (!config.width) {
config.width = 320;
}
if (!config.height) {
config.height = 240;
}
if (!config.video) {
config.video = {
width: config.width,
height: config.height
};
}
if (!config.canvas) {
config.canvas = {
width: config.width,
height: config.height
};
}
canvas.width = config.canvas.width || 320;
canvas.height = config.canvas.height || 240;
context = canvas.getContext('2d');
// setting defaults
if (config.video && config.video instanceof HTMLVideoElement) {
video = config.video.cloneNode();
if (config.initCallback) {
config.initCallback();
}
} else {
video = document.createElement('video');
if (typeof video.srcObject !== 'undefined') {
video.srcObject = mediaStream;
} else {
video.src = URL.createObjectURL(mediaStream);
}
video.onloadedmetadata = function() { // "onloadedmetadata" may NOT work in FF?
if (config.initCallback) {
config.initCallback();
}
};
video.width = config.video.width;
video.height = config.video.height;
}
video.muted = true;
video.play();
lastTime = new Date().getTime();
whammy = new Whammy.Video();
if (!config.disableLogs) {
console.log('canvas resolutions', canvas.width, '*', canvas.height);
console.log('video width/height', video.width || canvas.width, '*', video.height || canvas.height);
}
drawFrames(config.frameInterval);
};
/**
* Draw and push frames to Whammy
* @param {integer} frameInterval - set minimum interval (in milliseconds) between each time we push a frame to Whammy
*/
function drawFrames(frameInterval) {
frameInterval = typeof frameInterval !== 'undefined' ? frameInterval : 10;
var duration = new Date().getTime() - lastTime;
if (!duration) {
return setTimeout(drawFrames, frameInterval, frameInterval);
}
if (isPausedRecording) {
lastTime = new Date().getTime();
return setTimeout(drawFrames, 100);
}
// via #206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
if (video.paused) {
// via: https://github.com/muaz-khan/WebRTC-Experiment/pull/316
// Tweak for Android Chrome
video.play();
}
context.drawImage(video, 0, 0, canvas.width, canvas.height);
whammy.frames.push({
duration: duration,
image: canvas.toDataURL('image/webp')
});
if (!isStopDrawing) {
setTimeout(drawFrames, frameInterval, frameInterval);
}
}
function asyncLoop(o) {
var i = -1,
length = o.length;
(function loop() {
i++;
if (i === length) {
o.callback();
return;
}
// "setTimeout" added by Jim McLeod
setTimeout(function() {
o.functionToLoop(loop, i);
}, 1);
})();
}
/**
* remove black frames from the beginning to the specified frame
* @param {Array} _frames - array of frames to be checked
* @param {number} _framesToCheck - number of frame until check will be executed (-1 - will drop all frames until frame not matched will be found)
* @param {number} _pixTolerance - 0 - very strict (only black pixel color) ; 1 - all
* @param {number} _frameTolerance - 0 - very strict (only black frame color) ; 1 - all
* @returns {Array} - array of frames
*/
// pull#293 by @volodalexey
function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance, callback) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var resultFrames = [];
var checkUntilNotBlack = _framesToCheck === -1;
var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ?
_framesToCheck : _frames.length;
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var doNotCheckNext = false;
asyncLoop({
length: endCheckFrame,
functionToLoop: function(loop, f) {
var matchPixCount, endPixCheck, maxPixCount;
var finishImage = function() {
if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
// console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);
} else {
// console.log('frame is passed : ' + f);
if (checkUntilNotBlack) {
doNotCheckNext = true;
}
resultFrames.push(_frames[f]);
}
loop();
};
if (!doNotCheckNext) {
var image = new Image();
image.onload = function() {
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
finishImage();
};
image.src = _frames[f].image;
} else {
finishImage();
}
},
callback: function() {
resultFrames = resultFrames.concat(_frames.slice(endCheckFrame));
if (resultFrames.length <= 0) {
// at least one last frame should be available for next manipulation
// if total duration of all frames will be < 1000 than ffmpeg doesn't work well...
resultFrames.push(_frames[_frames.length - 1]);
}
callback(resultFrames);
}
});
}
var isStopDrawing = false;
/**
* This method stops recording video.
* @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
* @method
* @memberof WhammyRecorder
* @example
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
*/
this.stop = function(callback) {
isStopDrawing = true;
var _this = this;
// analyse of all frames takes some time!
setTimeout(function() {
// e.g. dropBlackFrames(frames, 10, 1, 1) - will cut all 10 frames
// e.g. dropBlackFrames(frames, 10, 0.5, 0.5) - will analyse 10 frames
// e.g. dropBlackFrames(frames, 10) === dropBlackFrames(frames, 10, 0, 0) - will analyse 10 frames with strict black color
dropBlackFrames(whammy.frames, -1, null, null, function(frames) {
whammy.frames = frames;
// to display advertisement images!
if (config.advertisement && config.advertisement.length) {
whammy.frames = config.advertisement.concat(whammy.frames);
}
/**
* @property {Blob} blob - Recorded frames in video/webm blob.
* @memberof WhammyRecorder
* @example
* recorder.stop(function() {
* var blob = recorder.blob;
* });
*/
whammy.compile(function(blob) {
_this.blob = blob;
if (_this.blob.forEach) {
_this.blob = new Blob([], {
type: 'video/webm'
});
}
if (callback) {
callback(_this.blob);
}
});
});
}, 10);
};
var isPausedRecording = false;
/**
* This method pauses the recording process.
* @method
* @memberof WhammyRecorder
* @example
* recorder.pause();
*/
this.pause = function() {
isPausedRecording = true;
};
/**
* This method resumes the recording process.
* @method
* @memberof WhammyRecorder
* @example
* recorder.resume();
*/
this.resume = function() {
isPausedRecording = false;
if (isStopDrawing) {
this.record();
}
};
/**
* This method resets currently recorded data.
* @method
* @memberof WhammyRecorder
* @example
* recorder.clearRecordedData();
*/
this.clearRecordedData = function() {
this.pause();
whammy.frames = [];
};
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
var video;
var lastTime;
var whammy;
}
if (typeof RecordRTC !== 'undefined') {
RecordRTC.WhammyRecorder = WhammyRecorder;
}
// https://github.com/antimatter15/whammy/blob/master/LICENSE
// _________
// Whammy.js
// todo: Firefox now supports webp for webm containers!
// their MediaRecorder implementation works well!
// should we provide an option to record via Whammy.js or MediaRecorder API is a better solution?
/**
* Whammy is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It is written by {@link https://github.com/antimatter15|antimatter15}
* @summary A real time javascript webm encoder based on a canvas hack.
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef Whammy
* @class
* @example
* var recorder = new Whammy().Video(15);
* recorder.add(context || canvas || dataURL);
* var output = recorder.compile();
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
*/
var Whammy = (function() {
// a more abstract-ish API
function WhammyVideo(duration) {
this.frames = [];
this.duration = duration || 1;
this.quality = 0.8;
}
/**
* Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
* @method
* @memberof Whammy
* @example
* recorder = new Whammy().Video(0.8, 100);
* recorder.add(canvas || context || 'image/webp');
* @param {string} frame - Canvas || Context || image/webp
* @param {number} duration - Stick a duration (in milliseconds)
*/
WhammyVideo.prototype.add = function(frame, duration) {
if ('canvas' in frame) { //CanvasRenderingContext2D
frame = frame.canvas;
}
if ('toDataURL' in frame) {
frame = frame.toDataURL('image/webp', this.quality);
}
if (!(/^data:image\/webp;base64,/ig).test(frame)) {
throw 'Input must be formatted properly as a base64 encoded DataURI of type image/webp';
}
this.frames.push({
image: frame,
duration: duration || this.duration
});
};
function processInWebWorker(_function) {
var blob = URL.createObjectURL(new Blob([_function.toString(),
'this.onmessage = function (e) {' + _function.name + '(e.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(blob);
URL.revokeObjectURL(blob);
return worker;
}
function whammyInWebWorker(frames) {
function ArrayToWebM(frames) {
var info = checkFrames(frames);
if (!info) {
return [];
}
var clusterMaxDuration = 30000;
var EBML = [{
'id': 0x1a45dfa3, // EBML
'data': [{
'data': 1,
'id': 0x4286 // EBMLVersion
}, {
'data': 1,
'id': 0x42f7 // EBMLReadVersion
}, {
'data': 4,
'id': 0x42f2 // EBMLMaxIDLength
}, {
'data': 8,
'id': 0x42f3 // EBMLMaxSizeLength
}, {
'data': 'webm',
'id': 0x4282 // DocType
}, {
'data': 2,
'id': 0x4287 // DocTypeVersion
}, {
'data': 2,
'id': 0x4285 // DocTypeReadVersion
}]
}, {
'id': 0x18538067, // Segment
'data': [{
'id': 0x1549a966, // Info
'data': [{
'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)
'id': 0x2ad7b1 // TimecodeScale
}, {
'data': 'whammy',
'id': 0x4d80 // MuxingApp
}, {
'data': 'whammy',
'id': 0x5741 // WritingApp
}, {
'data': doubleToString(info.duration),
'id': 0x4489 // Duration
}]
}, {
'id': 0x1654ae6b, // Tracks
'data': [{
'id': 0xae, // TrackEntry
'data': [{
'data': 1,
'id': 0xd7 // TrackNumber
}, {
'data': 1,
'id': 0x73c5 // TrackUID
}, {
'data': 0,
'id': 0x9c // FlagLacing
}, {
'data': 'und',
'id': 0x22b59c // Language
}, {
'data': 'V_VP8',
'id': 0x86 // CodecID
}, {
'data': 'VP8',
'id': 0x258688 // CodecName
}, {
'data': 1,
'id': 0x83 // TrackType
}, {
'id': 0xe0, // Video
'data': [{
'data': info.width,
'id': 0xb0 // PixelWidth
}, {
'data': info.height,
'id': 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);
var clusterCounter = 0;
var cluster = {
'id': 0x1f43b675, // Cluster
'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
return generateEBML(EBML);
}
function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {
return [{
'data': clusterTimecode,
'id': 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}));
}
// sums the lengths of all the frames and gets the duration
function checkFrames(frames) {
if (!frames[0]) {
postMessage({
error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'
});
return;
}
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data === 'object') {
data = generateEBML(data);
}
if (typeof data === 'number') {
data = bitsToBuffer(data.toString(2));
}
if (typeof data === 'string') {
data = strToBuffer(data);
}
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var sizeToString = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: 'video/webm'
});
}
function toBinStrOld(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) {
flags |= 128;
}
if (data.invisible) {
flags |= 8;
}
if (data.lacing) {
flags |= (data.lacing << 1);
}
if (data.discardable) {
flags |= 1;
}
if (data.trackNum > 127) {
throw 'TrackNumber > 127 not supported';
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) {
c[i] = VP8.charCodeAt(frameStart + 3 + i);
}
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
function getStrLength(string, offset) {
return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
}
function parseRIFF(string) {
var offset = 0;
var chunks = {};
while (offset < string.length) {
var id = string.substr(offset, 4);
var len = getStrLength(string, offset);
var data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id] = chunks[id] || [];
if (id === 'RIFF' || id === 'LIST') {
chunks[id].push(parseRIFF(data));
} else {
chunks[id].push(data);
}
}
return chunks;
}
function doubleToString(num) {
return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);
}).reverse().join('');
}
var webm = new ArrayToWebM(frames.map(function(frame) {
var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
webp.duration = frame.duration;
return webp;
}));
postMessage(webm);
}
/**
* Encodes frames in WebM container. It uses WebWorkinvoke to invoke 'ArrayToWebM' method.
* @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
* @method
* @memberof Whammy
* @example
* recorder = new Whammy().Video(0.8, 100);
* recorder.compile(function(blob) {
* // blob.size - blob.type
* });
*/
WhammyVideo.prototype.compile = function(callback) {
var webWorker = processInWebWorker(whammyInWebWorker);
webWorker.onmessage = function(event) {
if (event.data.error) {
console.error(event.data.error);
return;
}
callback(event.data);
};
webWorker.postMessage(this.frames);
};
return {
/**
* A more abstract-ish API.
* @method
* @memberof Whammy
* @example
* recorder = new Whammy().Video(0.8, 100);
* @param {?number} speed - 0.8
* @param {?number} quality - 100
*/
Video: WhammyVideo
};
})();
if (typeof RecordRTC !== 'undefined') {
RecordRTC.Whammy = Whammy;
}
// ______________ (indexed-db)
// DiskStorage.js
/**
* DiskStorage is a standalone object used by {@link RecordRTC} to store recorded blobs in IndexedDB storage.
* @summary Writing blobs into IndexedDB.
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @example
* DiskStorage.Store({
* audioBlob: yourAudioBlob,
* videoBlob: yourVideoBlob,
* gifBlob : yourGifBlob
* });
* DiskStorage.Fetch(function(dataURL, type) {
* if(type === 'audioBlob') { }
* if(type === 'videoBlob') { }
* if(type === 'gifBlob') { }
* });
* // DiskStorage.dataStoreName = 'recordRTC';
* // DiskStorage.onError = function(error) { };
* @property {function} init - This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.
* @property {function} Fetch - This method fetches stored blobs from IndexedDB.
* @property {function} Store - This method stores blobs in IndexedDB.
* @property {function} onError - This function is invoked for any known/unknown error.
* @property {string} dataStoreName - Name of the ObjectStore created in IndexedDB storage.
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
*/
var DiskStorage = {
/**
* This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.
* @method
* @memberof DiskStorage
* @internal
* @example
* DiskStorage.init();
*/
init: function() {
var self = this;
if (typeof indexedDB === 'undefined' || typeof indexedDB.open === 'undefined') {
console.error('IndexedDB API are not available in this browser.');
return;
}
var dbVersion = 1;
var dbName = this.dbName || location.href.replace(/\/|:|#|%|\.|\[|\]/g, ''),
db;
var request = indexedDB.open(dbName, dbVersion);
function createObjectStore(dataBase) {
dataBase.createObjectStore(self.dataStoreName);
}
function putInDB() {
var transaction = db.transaction([self.dataStoreName], 'readwrite');
if (self.videoBlob) {
transaction.objectStore(self.dataStoreName).put(self.videoBlob, 'videoBlob');
}
if (self.gifBlob) {
transaction.objectStore(self.dataStoreName).put(self.gifBlob, 'gifBlob');
}
if (self.audioBlob) {
transaction.objectStore(self.dataStoreName).put(self.audioBlob, 'audioBlob');
}
function getFromStore(portionName) {
transaction.objectStore(self.dataStoreName).get(portionName).onsuccess = function(event) {
if (self.callback) {
self.callback(event.target.result, portionName);
}
};
}
getFromStore('audioBlob');
getFromStore('videoBlob');
getFromStore('gifBlob');
}
request.onerror = self.onError;
request.onsuccess = function() {
db = request.result;
db.onerror = self.onError;
if (db.setVersion) {
if (db.version !== dbVersion) {
var setVersion = db.setVersion(dbVersion);
setVersion.onsuccess = function() {
createObjectStore(db);
putInDB();
};
} else {
putInDB();
}
} else {
putInDB();
}
};
request.onupgradeneeded = function(event) {
createObjectStore(event.target.result);
};
},
/**
* This method fetches stored blobs from IndexedDB.
* @method
* @memberof DiskStorage
* @internal
* @example
* DiskStorage.Fetch(function(dataURL, type) {
* if(type === 'audioBlob') { }
* if(type === 'videoBlob') { }
* if(type === 'gifBlob') { }
* });
*/
Fetch: function(callback) {
this.callback = callback;
this.init();
return this;
},
/**
* This method stores blobs in IndexedDB.
* @method
* @memberof DiskStorage
* @internal
* @example
* DiskStorage.Store({
* audioBlob: yourAudioBlob,
* videoBlob: yourVideoBlob,
* gifBlob : yourGifBlob
* });
*/
Store: function(config) {
this.audioBlob = config.audioBlob;
this.videoBlob = config.videoBlob;
this.gifBlob = config.gifBlob;
this.init();
return this;
},
/**
* This function is invoked for any known/unknown error.
* @method
* @memberof DiskStorage
* @internal
* @example
* DiskStorage.onError = function(error){
* alerot( JSON.stringify(error) );
* };
*/
onError: function(error) {
console.error(JSON.stringify(error, null, '\t'));
},
/**
* @property {string} dataStoreName - Name of the ObjectStore created in IndexedDB storage.
* @memberof DiskStorage
* @internal
* @example
* DiskStorage.dataStoreName = 'recordRTC';
*/
dataStoreName: 'recordRTC',
dbName: null
};
if (typeof RecordRTC !== 'undefined') {
RecordRTC.DiskStorage = DiskStorage;
}
// ______________
// GifRecorder.js
/**
* GifRecorder is standalone calss used by {@link RecordRTC} to record video or canvas into animated gif.
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef GifRecorder
* @class
* @example
* var recorder = new GifRecorder(mediaStream || canvas || context, { width: 1280, height: 720, frameRate: 200, quality: 10 });
* recorder.record();
* recorder.stop(function(blob) {
* img.src = URL.createObjectURL(blob);
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object or HTMLCanvasElement or CanvasRenderingContext2D.
* @param {object} config - {disableLogs:true, initCallback: function, width: 320, height: 240, frameRate: 200, quality: 10}
*/
function GifRecorder(mediaStream, config) {
if (typeof GIFEncoder === 'undefined') {
throw 'Please link: https://cdn.webrtc-experiment.com/gif-recorder.js';
}
config = config || {};
var isHTMLObject = mediaStream instanceof CanvasRenderingContext2D || mediaStream instanceof HTMLCanvasElement;
/**
* This method records MediaStream.
* @method
* @memberof GifRecorder
* @example
* recorder.record();
*/
this.record = function() {
if (!isHTMLObject) {
if (!config.width) {
config.width = video.offsetWidth || 320;
}
if (!this.height) {
config.height = video.offsetHeight || 240;
}
if (!config.video) {
config.video = {
width: config.width,
height: config.height
};
}
if (!config.canvas) {
config.canvas = {
width: config.width,
height: config.height
};
}
canvas.width = config.canvas.width || 320;
canvas.height = config.canvas.height || 240;
video.width = config.video.width || 320;
video.height = config.video.height || 240;
}
// external library to record as GIF images
gifEncoder = new GIFEncoder();
// void setRepeat(int iter)
// Sets the number of times the set of GIF frames should be played.
// Default is 1; 0 means play indefinitely.
gifEncoder.setRepeat(0);
// void setFrameRate(Number fps)
// Sets frame rate in frames per second.
// Equivalent to setDelay(1000/fps).
// Using "setDelay" instead of "setFrameRate"
gifEncoder.setDelay(config.frameRate || 200);
// void setQuality(int quality)
// Sets quality of color quantization (conversion of images to the
// maximum 256 colors allowed by the GIF specification).
// Lower values (minimum = 1) produce better colors,
// but slow processing significantly. 10 is the default,
// and produces good color mapping at reasonable speeds.
// Values greater than 20 do not yield significant improvements in speed.
gifEncoder.setQuality(config.quality || 10);
// Boolean start()
// This writes the GIF Header and returns false if it fails.
gifEncoder.start();
startTime = Date.now();
var self = this;
function drawVideoFrame(time) {
if (isPausedRecording) {
return setTimeout(function() {
drawVideoFrame(time);
}, 100);
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
if (typeof lastFrameTime === undefined) {
lastFrameTime = time;
}
// ~10 fps
if (time - lastFrameTime < 90) {
return;
}
if (!isHTMLObject && video.paused) {
// via: https://github.com/muaz-khan/WebRTC-Experiment/pull/316
// Tweak for Android Chrome
video.play();
}
if (!isHTMLObject) {
context.drawImage(video, 0, 0, canvas.width, canvas.height);
}
if (config.onGifPreview) {
config.onGifPreview(canvas.toDataURL('image/png'));
}
gifEncoder.addFrame(context);
lastFrameTime = time;
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
if (config.initCallback) {
config.initCallback();
}
};
/**
* This method stops recording MediaStream.
* @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
* @method
* @memberof GifRecorder
* @example
* recorder.stop(function(blob) {
* img.src = URL.createObjectURL(blob);
* });
*/
this.stop = function() {
if (lastAnimationFrame) {
cancelAnimationFrame(lastAnimationFrame);
}
endTime = Date.now();
/**
* @property {Blob} blob - The recorded blob object.
* @memberof GifRecorder
* @example
* recorder.stop(function(){
* var blob = recorder.blob;
* });
*/
this.blob = new Blob([new Uint8Array(gifEncoder.stream().bin)], {
type: 'image/gif'
});
// bug: find a way to clear old recorded blobs
gifEncoder.stream().bin = [];
};
var isPausedRecording = false;
/**
* This method pauses the recording process.
* @method
* @memberof GifRecorder
* @example
* recorder.pause();
*/
this.pause = function() {
isPausedRecording = true;
};
/**
* This method resumes the recording process.
* @method
* @memberof GifRecorder
* @example
* recorder.resume();
*/
this.resume = function() {
isPausedRecording = false;
};
/**
* This method resets currently recorded data.
* @method
* @memberof GifRecorder
* @example
* recorder.clearRecordedData();
*/
this.clearRecordedData = function() {
if (!gifEncoder) {
return;
}
this.pause();
gifEncoder.stream().bin = [];
};
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
if (isHTMLObject) {
if (mediaStream instanceof CanvasRenderingContext2D) {
context = mediaStream;
canvas = context.canvas;
} else if (mediaStream instanceof HTMLCanvasElement) {
context = mediaStream.getContext('2d');
canvas = mediaStream;
}
}
if (!isHTMLObject) {
var video = document.createElement('video');
video.muted = true;
video.autoplay = true;
if (typeof video.srcObject !== 'undefined') {
video.srcObject = mediaStream;
} else {
video.src = URL.createObjectURL(mediaStream);
}
video.play();
}
var lastAnimationFrame = null;
var startTime, endTime, lastFrameTime;
var gifEncoder;
}
if (typeof RecordRTC !== 'undefined') {
RecordRTC.GifRecorder = GifRecorder;
}
// ______________________
// MultiStreamRecorder.js
/*
* Video conference recording, using captureStream API along with WebAudio and Canvas2D API.
*/
/**
* MultiStreamRecorder can record multiple videos in single container.
* @summary Multi-videos recorder.
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef MultiStreamRecorder
* @class
* @example
* var options = {
* mimeType: 'video/webm'
* }
* var recorder = new MultiStreamRecorder(ArrayOfMediaStreams, options);
* recorder.record();
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
*
* // or
* var blob = recorder.blob;
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStreams} mediaStreams - Array of MediaStreams.
* @param {object} config - {disableLogs:true, frameInterval: 1, mimeType: "video/webm"}
*/
function MultiStreamRecorder(arrayOfMediaStreams, options) {
var self = this;
options = options || {
mimeType: 'video/webm',
video: {
width: 360,
height: 240
}
};
if (!options.frameInterval) {
options.frameInterval = 10;
}
if (!options.video) {
options.video = {};
}
if (!options.video.width) {
options.video.width = 360;
}
if (!options.video.height) {
options.video.height = 240;
}
/**
* This method records all MediaStreams.
* @method
* @memberof MultiStreamRecorder
* @example
* recorder.record();
*/
this.record = function() {
isStoppedRecording = false;
var mixedVideoStream = getMixedVideoStream();
var mixedAudioStream = getMixedAudioStream();
if (mixedAudioStream) {
mixedAudioStream.getAudioTracks().forEach(function(track) {
mixedVideoStream.addTrack(track);
});
}
if (options.previewStream && typeof options.previewStream === 'function') {
options.previewStream(mixedVideoStream);
}
mediaRecorder = new MediaStreamRecorder(mixedVideoStream, options);
drawVideosToCanvas();
mediaRecorder.record();
};
/**
* This method stops recording MediaStream.
* @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
* @method
* @memberof MultiStreamRecorder
* @example
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
*/
this.stop = function(callback) {
isStoppedRecording = true;
if (!mediaRecorder) {
return;
}
mediaRecorder.stop(function(blob) {
callback(blob);
self.clearRecordedData();
});
};
function getMixedAudioStream() {
// via: @pehrsons
if (!Storage.AudioContextConstructor) {
Storage.AudioContextConstructor = new Storage.AudioContext();
}
self.audioContext = Storage.AudioContextConstructor;
self.audioSources = [];
self.gainNode = self.audioContext.createGain();
self.gainNode.connect(self.audioContext.destination);
self.gainNode.gain.value = 0; // don't hear self
var audioTracksLength = 0;
arrayOfMediaStreams.forEach(function(stream) {
if (!stream.getAudioTracks().length) {
return;
}
audioTracksLength++;
var audioSource = self.audioContext.createMediaStreamSource(stream);
audioSource.connect(self.gainNode);
self.audioSources.push(audioSource);
});
if (!audioTracksLength) {
return;
}
self.audioDestination = self.audioContext.createMediaStreamDestination();
self.audioSources.forEach(function(audioSource) {
audioSource.connect(self.audioDestination);
});
return self.audioDestination.stream;
}
var videos = [];
var mediaRecorder;
function getMixedVideoStream() {
// via: @adrian-ber
arrayOfMediaStreams.forEach(function(stream) {
if (!stream.getVideoTracks().length) {
return;
}
var video = getVideo(stream);
video.width = options.video.width;
video.height = options.video.height;
video.stream = stream;
videos.push(video);
});
var capturedStream;
if ('captureStream' in canvas) {
capturedStream = canvas.captureStream();
} else if ('mozCaptureStream' in canvas) {
capturedStream = canvas.mozCaptureStream();
} else if (!options.disableLogs) {
console.error('Upgrade to latest Chrome or otherwise enable this flag: chrome://flags/#enable-experimental-web-platform-features');
}
canvas.stream = capturedStream;
return capturedStream;
}
function getVideo(stream) {
var video = document.createElement('video');
video.src = URL.createObjectURL(stream);
video.muted = true;
video.volume = 0;
video.play();
return video;
}
var isStoppedRecording = false;
function drawVideosToCanvas() {
if (isStoppedRecording) {
return;
}
var videosLength = videos.length;
var fullcanvas = false;
videos.forEach(function(video) {
if (!video.stream) {
video.stream = {};
}
if (video.stream.fullcanvas) {
fullcanvas = video.stream;
}
});
if (fullcanvas) {
canvas.width = fullcanvas.width;
canvas.height = fullcanvas.height;
} else {
canvas.width = videosLength > 1 ? videos[0].width * 2 : videos[0].width;
canvas.height = videosLength > 2 ? videos[0].height * 2 : videos[0].height;
}
videos.forEach(drawImage);
setTimeout(drawVideosToCanvas, options.frameInterval);
}
function drawImage(video, idx) {
if (isStoppedRecording) {
return;
}
var x = 0;
var y = 0;
var width = video.width;
var height = video.height;
if (idx === 1) {
x = video.width;
}
if (idx === 2) {
y = video.height;
}
if (idx === 3) {
x = video.width;
y = video.height;
}
if (typeof video.stream.left !== 'undefined') {
x = video.stream.left;
}
if (typeof video.stream.top !== 'undefined') {
y = video.stream.top;
}
if (typeof video.stream.width !== 'undefined') {
width = video.stream.width;
}
if (typeof video.stream.height !== 'undefined') {
height = video.stream.height;
}
context.drawImage(video, x, y, width, height);
if (typeof video.stream.onRender === 'function') {
video.stream.onRender(context, x, y, width, height, video.stream);
}
}
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
canvas.style = 'opacity:0;position:absolute;z-index:-1;top: -100000000;left:-1000000000;';
(document.body || document.documentElement).appendChild(canvas);
/**
* This method pauses the recording process.
* @method
* @memberof MultiStreamRecorder
* @example
* recorder.pause();
*/
this.pause = function() {
if (mediaRecorder) {
mediaRecorder.pause();
}
};
/**
* This method resumes the recording process.
* @method
* @memberof MultiStreamRecorder
* @example
* recorder.resume();
*/
this.resume = function() {
if (mediaRecorder) {
mediaRecorder.resume();
}
};
/**
* This method resets currently recorded data.
* @method
* @memberof MultiStreamRecorder
* @example
* recorder.clearRecordedData();
*/
this.clearRecordedData = function() {
videos = [];
isStoppedRecording = true;
if (mediaRecorder) {
mediaRecorder.clearRecordedData();
}
mediaRecorder = null;
if (self.gainNode) {
self.gainNode.disconnect();
self.gainNode = null;
}
if (self.audioSources.length) {
self.audioSources.forEach(function(source) {
source.disconnect();
});
self.audioSources = [];
}
if (self.audioDestination) {
self.audioDestination.disconnect();
self.audioDestination = null;
}
self.audioContext = null;
context.clearRect(0, 0, canvas.width, canvas.height);
if (canvas.stream) {
canvas.stream.stop();
canvas.stream = null;
}
};
/**
* Add extra media-streams to existing recordings.
* @method
* @memberof MultiStreamRecorder
* @example
* recorder.addStream(MediaStream);
*/
this.addStream = function(stream) {
if (stream instanceof Array && stream.length) {
stream.forEach(this.addStream);
return;
}
arrayOfMediaStreams.push(stream);
if (!mediaRecorder) {
return;
}
if (stream.getVideoTracks().length) {
var video = getVideo(stream);
video.width = options.video.width;
video.height = options.video.height;
video.stream = stream;
videos.push(video);
}
if (stream.getAudioTracks().length && self.audioContext) {
var audioSource = self.audioContext.createMediaStreamSource(stream);
audioSource.connect(self.audioDestination);
self.audioSources.push(audioSource);
}
};
}
if (typeof RecordRTC !== 'undefined') {
RecordRTC.MultiStreamRecorder = MultiStreamRecorder;
}
// _____________________
// RecordRTC.promises.js
/**
* RecordRTCPromisesHandler adds promises support in RecordRTC
* @summary Promises for RecordRTC
* @license {@link https://github.com/muaz-khan/RecordRTC#license|MIT}
* @author {@link http://www.MuazKhan.com|Muaz Khan}
* @typedef RecordRTCPromisesHandler
* @class
* @example
* var recorder = new RecordRTCPromisesHandler(mediaStream, options);
* recorder.startRecording().then(successCB).catch(errorCB);
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc.
* @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}
*/
function RecordRTCPromisesHandler(mediaStream, options) {
if (!this) {
throw 'Use "new RecordRTCPromisesHandler()"';
}
var self = this;
self.recordRTC = new RecordRTC(mediaStream, options);
this.startRecording = function() {
return new Promise(function(resolve, reject) {
try {
self.recordRTC.startRecording();
resolve();
} catch (e) {
reject(e);
}
});
};
this.stopRecording = function() {
return new Promise(function(resolve, reject) {
try {
self.recordRTC.stopRecording(function(url) {
self.blob = self.recordRTC.getBlob();
resolve(url);
});
} catch (e) {
reject(e);
}
});
};
this.getDataURL = function(callback) {
return new Promise(function(resolve, reject) {
try {
self.recordRTC.getDataURL(function(dataURL) {
resolve(dataURL);
});
} catch (e) {
reject(e);
}
});
};
this.getBlob = function() {
return self.recordRTC.getBlob();
};
this.blob = null;
}
if (typeof RecordRTC !== 'undefined') {
RecordRTC.RecordRTCPromisesHandler = RecordRTCPromisesHandler;
}