diff --git a/Gruntfile.js b/Gruntfile.js
index 5303f3ac..771434aa 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -70,7 +70,7 @@ module.exports = function(grunt) {
},
jsbeautifier: {
files: [
- 'RecordRTC.js',
+ // 'RecordRTC.js',
'dev/*.js',
'Gruntfile.js',
'./Canvas-Recording/*.html',
diff --git a/RecordRTC.js b/RecordRTC.js
index 28b54b87..16a0e6a8 100644
--- a/RecordRTC.js
+++ b/RecordRTC.js
@@ -1,4 +1,4 @@
-// Last time updated at Jan 21, 2014, 08:32:23
+// Last time updated at Jan 21, 2015, 08:32:23
// links:
// Open-Sourced: https://github.com/muaz-khan/RecordRTC
@@ -958,16 +958,16 @@ if (location.href.indexOf('file:') === 0) {
* bytesToSize(1024*1024*5) === '5 GB'
*/
function bytesToSize(bytes) {
- var k = 1000;
- var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
- if (bytes === 0) {
- return '0 Bytes';
- }
- var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);
- return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
+ var k = 1000;
+ var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
+ if (bytes === 0) {
+ return '0 Bytes';
}
- // __________ (used to handle stuff like http://goo.gl/xmE5eg) issue #129
- // Storage.js
+ var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);
+ return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
+}
+// __________ (used to handle stuff like http://goo.gl/xmE5eg) issue #129
+// Storage.js
/**
* Storage is a standalone object used by {@link RecordRTC} to store reusable objects e.g. "new AudioContext".
@@ -1015,123 +1015,123 @@ var Storage = {
*/
function MediaStreamRecorder(mediaStream) {
- var self = this;
+ var self = this;
- // if user chosen only audio option; and he tried to pass MediaStream with
- // both audio and video tracks;
- // using a dirty workaround to generate audio-only stream so that we can get audio/ogg output.
- if (self.mimeType && self.mimeType !== 'video/webm' && mediaStream.getVideoTracks && mediaStream.getVideoTracks().length) {
- var context = new AudioContext();
- var mediaStreamSource = context.createMediaStreamSource(mediaStream);
+ // if user chosen only audio option; and he tried to pass MediaStream with
+ // both audio and video tracks;
+ // using a dirty workaround to generate audio-only stream so that we can get audio/ogg output.
+ if (self.mimeType && self.mimeType !== 'video/webm' && mediaStream.getVideoTracks && mediaStream.getVideoTracks().length) {
+ var context = new AudioContext();
+ var mediaStreamSource = context.createMediaStreamSource(mediaStream);
- var destination = context.createMediaStreamDestination();
- mediaStreamSource.connect(destination);
+ var destination = context.createMediaStreamDestination();
+ mediaStreamSource.connect(destination);
- mediaStream = destination.stream;
- }
+ mediaStream = destination.stream;
+ }
- var dataAvailable = false;
+ var dataAvailable = false;
- /**
- * This method records MediaStream.
- * @method
- * @memberof MediaStreamRecorder
- * @example
- * recorder.record();
- */
- this.record = function() {
- // http://dxr.mozilla.org/mozilla-central/source/content/media/MediaRecorder.cpp
- // https://wiki.mozilla.org/Gecko:MediaRecorder
- // https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html
-
- // starting a recording session; which will initiate "Reading Thread"
- // "Reading Thread" are used to prevent main-thread blocking scenarios
- mediaRecorder = new window.MediaRecorder(mediaStream);
-
- // Dispatching OnDataAvailable Handler
- mediaRecorder.ondataavailable = function(e) {
- if (dataAvailable) {
- return;
- }
-
- if (!e.data.size) {
- if (!self.disableLogs) {
- console.warn('Recording of', e.data.type, 'failed.');
- }
- return;
- }
+ /**
+ * This method records MediaStream.
+ * @method
+ * @memberof MediaStreamRecorder
+ * @example
+ * recorder.record();
+ */
+ this.record = function() {
+ // http://dxr.mozilla.org/mozilla-central/source/content/media/MediaRecorder.cpp
+ // https://wiki.mozilla.org/Gecko:MediaRecorder
+ // https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html
- dataAvailable = true;
-
- /**
- * @property {Blob} blob - Recorded frames in video/webm blob.
- * @memberof MediaStreamRecorder
- * @example
- * recorder.stop(function() {
- * var blob = recorder.blob;
- * });
- */
- self.blob = new Blob([e.data], {
- type: e.data.type || self.mimeType || 'audio/ogg'
- });
+ // starting a recording session; which will initiate "Reading Thread"
+ // "Reading Thread" are used to prevent main-thread blocking scenarios
+ mediaRecorder = new window.MediaRecorder(mediaStream);
- if (self.callback) {
- self.callback();
- }
- };
+ // Dispatching OnDataAvailable Handler
+ mediaRecorder.ondataavailable = function(e) {
+ if (dataAvailable) {
+ return;
+ }
- mediaRecorder.onerror = function(error) {
+ if (!e.data.size) {
if (!self.disableLogs) {
- console.warn(error);
+ console.warn('Recording of', e.data.type, 'failed.');
}
+ return;
+ }
- mediaRecorder.stop();
- self.record(0);
- };
-
- // void start(optional long mTimeSlice)
- // The interval of passing encoded data from EncodedBufferCache to onDataAvailable
- // handler. "mTimeSlice < 0" means Session object does not push encoded data to
- // onDataAvailable, instead, it passive wait the client side pull encoded data
- // by calling requestData API.
- mediaRecorder.start(0);
+ dataAvailable = true;
- // Start recording. If timeSlice has been provided, mediaRecorder will
- // raise a dataavailable event containing the Blob of collected data on every timeSlice milliseconds.
- // If timeSlice isn't provided, UA should call the RequestData to obtain the Blob data, also set the mTimeSlice to zero.
+ /**
+ * @property {Blob} blob - Recorded frames in video/webm blob.
+ * @memberof MediaStreamRecorder
+ * @example
+ * recorder.stop(function() {
+ * var blob = recorder.blob;
+ * });
+ */
+ self.blob = new Blob([e.data], {
+ type: e.data.type || self.mimeType || 'audio/ogg'
+ });
- if (self.onAudioProcessStarted) {
- self.onAudioProcessStarted();
+ if (self.callback) {
+ self.callback();
}
};
- /**
- * This method stops recording MediaStream.
- * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
- * @method
- * @memberof MediaStreamRecorder
- * @example
- * recorder.stop(function(blob) {
- * video.src = URL.createObjectURL(blob);
- * });
- */
- this.stop = function(callback) {
- this.callback = callback;
- // mediaRecorder.state === 'recording' means that media recorder is associated with "session"
- // mediaRecorder.state === 'stopped' means that media recorder is detached from the "session" ... in this case; "session" will also be deleted.
-
- if (mediaRecorder.state === 'recording') {
- // "stop" method auto invokes "requestData"!
- // mediaRecorder.requestData();
- mediaRecorder.stop();
+ mediaRecorder.onerror = function(error) {
+ if (!self.disableLogs) {
+ console.warn(error);
}
+
+ mediaRecorder.stop();
+ self.record(0);
};
- // Reference to "MediaRecorder" object
- var mediaRecorder;
- }
- // _________________
- // StereoRecorder.js
+ // void start(optional long mTimeSlice)
+ // The interval of passing encoded data from EncodedBufferCache to onDataAvailable
+ // handler. "mTimeSlice < 0" means Session object does not push encoded data to
+ // onDataAvailable, instead, it passive wait the client side pull encoded data
+ // by calling requestData API.
+ mediaRecorder.start(0);
+
+ // Start recording. If timeSlice has been provided, mediaRecorder will
+ // raise a dataavailable event containing the Blob of collected data on every timeSlice milliseconds.
+ // If timeSlice isn't provided, UA should call the RequestData to obtain the Blob data, also set the mTimeSlice to zero.
+
+ if (self.onAudioProcessStarted) {
+ self.onAudioProcessStarted();
+ }
+ };
+
+ /**
+ * This method stops recording MediaStream.
+ * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
+ * @method
+ * @memberof MediaStreamRecorder
+ * @example
+ * recorder.stop(function(blob) {
+ * video.src = URL.createObjectURL(blob);
+ * });
+ */
+ this.stop = function(callback) {
+ this.callback = callback;
+ // mediaRecorder.state === 'recording' means that media recorder is associated with "session"
+ // mediaRecorder.state === 'stopped' means that media recorder is detached from the "session" ... in this case; "session" will also be deleted.
+
+ if (mediaRecorder.state === 'recording') {
+ // "stop" method auto invokes "requestData"!
+ // mediaRecorder.requestData();
+ mediaRecorder.stop();
+ }
+ };
+
+ // Reference to "MediaRecorder" object
+ var mediaRecorder;
+}
+// _________________
+// StereoRecorder.js
/**
* StereoRecorder is a standalone class used by {@link RecordRTC} to bring audio-recording in chrome. It runs top over {@link StereoAudioRecorder}.
@@ -1148,58 +1148,58 @@ function MediaStreamRecorder(mediaStream) {
*/
function StereoRecorder(mediaStream) {
- var self = this;
+ var self = this;
- /**
- * This method records MediaStream.
- * @method
- * @memberof StereoRecorder
- * @example
- * recorder.record();
- */
- this.record = function() {
- mediaRecorder = new StereoAudioRecorder(mediaStream, this);
- mediaRecorder.onAudioProcessStarted = function() {
- if (self.onAudioProcessStarted) {
- self.onAudioProcessStarted();
- }
- };
- mediaRecorder.record();
+ /**
+ * This method records MediaStream.
+ * @method
+ * @memberof StereoRecorder
+ * @example
+ * recorder.record();
+ */
+ this.record = function() {
+ mediaRecorder = new StereoAudioRecorder(mediaStream, this);
+ mediaRecorder.onAudioProcessStarted = function() {
+ if (self.onAudioProcessStarted) {
+ self.onAudioProcessStarted();
+ }
};
+ mediaRecorder.record();
+ };
- /**
- * This method stops recording MediaStream.
- * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
- * @method
- * @memberof StereoRecorder
- * @example
- * recorder.stop(function(blob) {
- * video.src = URL.createObjectURL(blob);
- * });
- */
- this.stop = function(callback) {
- if (!mediaRecorder) {
- return;
- }
+ /**
+ * This method stops recording MediaStream.
+ * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
+ * @method
+ * @memberof StereoRecorder
+ * @example
+ * recorder.stop(function(blob) {
+ * video.src = URL.createObjectURL(blob);
+ * });
+ */
+ this.stop = function(callback) {
+ if (!mediaRecorder) {
+ return;
+ }
- mediaRecorder.stop(function() {
- for (var item in mediaRecorder) {
- self[item] = mediaRecorder[item];
- }
+ mediaRecorder.stop(function() {
+ for (var item in mediaRecorder) {
+ self[item] = mediaRecorder[item];
+ }
- if (callback) {
- callback();
- }
- });
- };
+ if (callback) {
+ callback();
+ }
+ });
+ };
- // Reference to "StereoAudioRecorder" object
- var mediaRecorder;
- }
- // source code from: http://typedarray.org/wp-content/projects/WebAudioRecorder/script.js
- // https://github.com/mattdiamond/Recorderjs#license-mit
- // ______________________
- // StereoAudioRecorder.js
+ // Reference to "StereoAudioRecorder" object
+ var mediaRecorder;
+}
+// source code from: http://typedarray.org/wp-content/projects/WebAudioRecorder/script.js
+// https://github.com/mattdiamond/Recorderjs#license-mit
+// ______________________
+// StereoAudioRecorder.js
/**
* StereoAudioRecorder is a standalone class used by {@link RecordRTC} to bring "stereo" audio-recording in chrome.
@@ -1222,61 +1222,85 @@ function StereoRecorder(mediaStream) {
var __stereoAudioRecorderJavacriptNode;
function StereoAudioRecorder(mediaStream, config) {
- if (!mediaStream.getAudioTracks().length) {
- throw 'Your stream has no audio tracks.';
- }
+ if (!mediaStream.getAudioTracks().length) {
+ throw 'Your stream has no audio tracks.';
+ }
- // variables
- var leftchannel = [];
- var rightchannel = [];
- var recording = false;
- var recordingLength = 0;
+ var self = this;
- /**
- * This method records MediaStream.
- * @method
- * @memberof StereoAudioRecorder
- * @example
- * recorder.record();
- */
- this.record = function() {
- // reset the buffers for the new recording
- leftchannel.length = rightchannel.length = 0;
- recordingLength = 0;
+ // variables
+ var leftchannel = [];
+ var rightchannel = [];
+ var recording = false;
+ var recordingLength = 0;
- recording = true;
- };
+ /**
+ * This method records MediaStream.
+ * @method
+ * @memberof StereoAudioRecorder
+ * @example
+ * recorder.record();
+ */
+ this.record = function() {
+ // reset the buffers for the new recording
+ leftchannel.length = rightchannel.length = 0;
+ recordingLength = 0;
- /**
- * This method stops recording MediaStream.
- * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
- * @method
- * @memberof StereoAudioRecorder
- * @example
- * recorder.stop(function(blob) {
- * video.src = URL.createObjectURL(blob);
- * });
- */
- this.stop = function(callback) {
- // stop recording
- recording = false;
+ recording = true;
+ };
- // to make sure onaudioprocess stops firing
- audioInput.disconnect();
+ function mergeLeftRightBuffers(config, callback) {
+ var webWorker = processInWebWorker(function mergeAudioBuffers(config) {
+ var leftBuffers = config.leftBuffers;
+ var rightBuffers = config.rightBuffers;
+ var sampleRate = config.sampleRate;
+
+ leftBuffers = mergeBuffers(leftBuffers[0], leftBuffers[1]);
+ rightBuffers = mergeBuffers(rightBuffers[0], rightBuffers[1]);
- // flat the left and right channels down
- var leftBuffer = mergeBuffers(leftchannel, recordingLength);
- var rightBuffer = mergeBuffers(rightchannel, recordingLength);
+ function mergeBuffers(channelBuffer, rLength) {
+ var result = new Float64Array(rLength);
+ var offset = 0;
+ var lng = channelBuffer.length;
+
+ for (var i = 0; i < lng; i++) {
+ var buffer = channelBuffer[i];
+ result.set(buffer, offset);
+ offset += buffer.length;
+ }
+
+ return result;
+ }
+
+ function interleave(leftChannel, rightChannel) {
+ var length = leftChannel.length + rightChannel.length;
+
+ var result = new Float64Array(length);
+
+ var inputIndex = 0;
+
+ for (var index = 0; index < length;) {
+ result[index++] = leftChannel[inputIndex];
+ result[index++] = rightChannel[inputIndex];
+ inputIndex++;
+ }
+ return result;
+ }
+
+ function writeUTFBytes(view, offset, string) {
+ var lng = string.length;
+ for (var i = 0; i < lng; i++) {
+ view.setUint8(offset + i, string.charCodeAt(i));
+ }
+ }
// interleave both channels together
- var interleaved = interleave(leftBuffer, rightBuffer);
+ var interleaved = interleave(leftBuffers, rightBuffers);
+
var interleavedLength = interleaved.length;
- // create our wav file
+ // create wav file
var resultingBufferLength = 44 + interleavedLength * 2;
- if (!config.disableLogs) {
- console.log('Resulting Buffer Length', resultingBufferLength);
- }
var buffer = new ArrayBuffer(resultingBufferLength);
@@ -1341,6 +1365,54 @@ function StereoAudioRecorder(mediaStream, config) {
}
}
+ postMessage({
+ buffer: buffer,
+ view: view
+ });
+ });
+
+ webWorker.onmessage = function(event) {
+ callback(event.data.buffer, event.data.view);
+ };
+
+ webWorker.postMessage(config);
+ }
+
+ function processInWebWorker(_function) {
+ var blob = URL.createObjectURL(new Blob([_function.toString(),
+ 'this.onmessage = function (e) {mergeAudioBuffers(e.data);}'
+ ], {
+ type: 'application/javascript'
+ }));
+
+ var worker = new Worker(blob);
+ URL.revokeObjectURL(blob);
+ return worker;
+ }
+
+ /**
+ * This method stops recording MediaStream.
+ * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
+ * @method
+ * @memberof StereoAudioRecorder
+ * @example
+ * recorder.stop(function(blob) {
+ * video.src = URL.createObjectURL(blob);
+ * });
+ */
+ this.stop = function(callback) {
+ // stop recording
+ recording = false;
+
+ // to make sure onaudioprocess stops firing
+ audioInput.disconnect();
+
+ mergeLeftRightBuffers({
+ sampleRate: sampleRate,
+ leftChannel: config.leftChannel,
+ leftBuffers: [leftchannel, recordingLength],
+ rightBuffers: [rightchannel, recordingLength]
+ }, function(buffer, view) {
/**
* @property {Blob} blob - The recorded blob object.
* @memberof StereoAudioRecorder
@@ -1349,7 +1421,7 @@ function StereoAudioRecorder(mediaStream, config) {
* var blob = recorder.blob;
* });
*/
- this.blob = new Blob([view], {
+ self.blob = new Blob([view], {
type: 'audio/wav'
});
@@ -1361,7 +1433,7 @@ function StereoAudioRecorder(mediaStream, config) {
* var buffer = recorder.buffer;
* });
*/
- this.buffer = new ArrayBuffer(view);
+ self.buffer = new ArrayBuffer(view);
/**
* @property {DataView} view - The recorded data-view object.
@@ -1371,185 +1443,146 @@ function StereoAudioRecorder(mediaStream, config) {
* var view = recorder.view;
* });
*/
- this.view = view;
+ self.view = view;
- this.sampleRate = sampleRate;
- this.bufferSize = bufferSize;
+ self.sampleRate = sampleRate;
+ self.bufferSize = bufferSize;
// recorded audio length
- this.length = recordingLength;
+ self.length = recordingLength;
if (callback) {
callback();
}
isAudioProcessStarted = false;
- };
-
- function interleave(leftChannel, rightChannel) {
- var length = leftChannel.length + rightChannel.length;
+ });
+ };
- if (!config.disableLogs) {
- console.log('Buffers length:', length);
- }
+ if (!Storage.AudioContextConstructor) {
+ Storage.AudioContextConstructor = new Storage.AudioContext();
+ }
- var result = new Float64Array(length);
+ var context = Storage.AudioContextConstructor;
- var inputIndex = 0;
+ // creates an audio node from the microphone incoming stream
+ var audioInput = context.createMediaStreamSource(mediaStream);
- for (var index = 0; index < length;) {
- result[index++] = leftChannel[inputIndex];
- result[index++] = rightChannel[inputIndex];
- inputIndex++;
- }
- return result;
- }
+ var legalBufferValues = [0, 256, 512, 1024, 2048, 4096, 8192, 16384];
- function mergeBuffers(channelBuffer, rLength) {
- var result = new Float64Array(rLength);
- var offset = 0;
- var lng = channelBuffer.length;
+ /**
+ * From the spec: This value controls how frequently the audioprocess event is
+ * dispatched and how many sample-frames need to be processed each call.
+ * Lower values for buffer size will result in a lower (better) latency.
+ * Higher values will be necessary to avoid audio breakup and glitches
+ * The size of the buffer (in sample-frames) which needs to
+ * be processed each time onprocessaudio is called.
+ * Legal values are (256, 512, 1024, 2048, 4096, 8192, 16384).
+ * @property {number} bufferSize - Buffer-size for how frequently the audioprocess event is dispatched.
+ * @memberof StereoAudioRecorder
+ * @example
+ * recorder = new StereoAudioRecorder(mediaStream, {
+ * bufferSize: 4096
+ * });
+ */
- for (var i = 0; i < lng; i++) {
- var buffer = channelBuffer[i];
- result.set(buffer, offset);
- offset += buffer.length;
- }
+ // "0" means, let chrome decide the most accurate buffer-size for current platform.
+ var bufferSize = typeof config.bufferSize === 'undefined' ? 4096 : config.bufferSize;
- return result;
+ if (legalBufferValues.indexOf(bufferSize) === -1) {
+ if (!config.disableLogs) {
+ console.warn('Legal values for buffer-size are ' + JSON.stringify(legalBufferValues, null, '\t'));
}
+ }
- function writeUTFBytes(view, offset, string) {
- var lng = string.length;
- for (var i = 0; i < lng; i++) {
- view.setUint8(offset + i, string.charCodeAt(i));
- }
- }
- if (!Storage.AudioContextConstructor) {
- Storage.AudioContextConstructor = new Storage.AudioContext();
- }
+ /**
+ * The sample rate (in sample-frames per second) at which the
+ * AudioContext handles audio. It is assumed that all AudioNodes
+ * in the context run at this rate. In making this assumption,
+ * sample-rate converters or "varispeed" processors are not supported
+ * in real-time processing.
+ * The sampleRate parameter describes the sample-rate of the
+ * linear PCM audio data in the buffer in sample-frames per second.
+ * An implementation must support sample-rates in at least
+ * the range 22050 to 96000.
+ * @property {number} sampleRate - Buffer-size for how frequently the audioprocess event is dispatched.
+ * @memberof StereoAudioRecorder
+ * @example
+ * recorder = new StereoAudioRecorder(mediaStream, {
+ * sampleRate: 44100
+ * });
+ */
+ var sampleRate = typeof config.sampleRate !== 'undefined' ? config.sampleRate : context.sampleRate || 44100;
- var context = Storage.AudioContextConstructor;
+ if (sampleRate < 22050 || sampleRate > 96000) {
+ // Ref: http://stackoverflow.com/a/26303918/552182
+ if (!config.disableLogs) {
+ console.warn('sample-rate must be under range 22050 and 96000.');
+ }
+ }
- // creates an audio node from the microphone incoming stream
- var audioInput = context.createMediaStreamSource(mediaStream);
+ if (context.createJavaScriptNode) {
+ __stereoAudioRecorderJavacriptNode = context.createJavaScriptNode(bufferSize, 2, 2);
+ } else if (context.createScriptProcessor) {
+ __stereoAudioRecorderJavacriptNode = context.createScriptProcessor(bufferSize, 2, 2);
+ } else {
+ throw 'WebAudio API has no support on this browser.';
+ }
- var legalBufferValues = [0, 256, 512, 1024, 2048, 4096, 8192, 16384];
+ // connect the stream to the gain node
+ audioInput.connect(__stereoAudioRecorderJavacriptNode);
- /**
- * From the spec: This value controls how frequently the audioprocess event is
- * dispatched and how many sample-frames need to be processed each call.
- * Lower values for buffer size will result in a lower (better) latency.
- * Higher values will be necessary to avoid audio breakup and glitches
- * The size of the buffer (in sample-frames) which needs to
- * be processed each time onprocessaudio is called.
- * Legal values are (256, 512, 1024, 2048, 4096, 8192, 16384).
- * @property {number} bufferSize - Buffer-size for how frequently the audioprocess event is dispatched.
- * @memberof StereoAudioRecorder
- * @example
- * recorder = new StereoAudioRecorder(mediaStream, {
- * bufferSize: 4096
- * });
- */
+ bufferSize = __stereoAudioRecorderJavacriptNode.bufferSize;
- // "0" means, let chrome decide the most accurate buffer-size for current platform.
- var bufferSize = typeof config.bufferSize === 'undefined' ? 4096 : config.bufferSize;
+ if (!config.disableLogs) {
+ console.log('sample-rate', sampleRate);
+ console.log('buffer-size', bufferSize);
+ }
- if (legalBufferValues.indexOf(bufferSize) === -1) {
- if (!config.disableLogs) {
- console.warn('Legal values for buffer-size are ' + JSON.stringify(legalBufferValues, null, '\t'));
- }
+ var isAudioProcessStarted = false;
+
+ __stereoAudioRecorderJavacriptNode.onaudioprocess = function(e) {
+ // if MediaStream().stop() or MediaStreamTrack.stop() is invoked.
+ if (mediaStream.ended) {
+ __stereoAudioRecorderJavacriptNode.onaudioprocess = function() {};
+ return;
}
+ if (!recording) {
+ audioInput.disconnect();
+ return;
+ }
/**
- * The sample rate (in sample-frames per second) at which the
- * AudioContext handles audio. It is assumed that all AudioNodes
- * in the context run at this rate. In making this assumption,
- * sample-rate converters or "varispeed" processors are not supported
- * in real-time processing.
- * The sampleRate parameter describes the sample-rate of the
- * linear PCM audio data in the buffer in sample-frames per second.
- * An implementation must support sample-rates in at least
- * the range 22050 to 96000.
- * @property {number} sampleRate - Buffer-size for how frequently the audioprocess event is dispatched.
+ * This method is called on "onaudioprocess" event's first invocation.
+ * @method {function} onAudioProcessStarted
* @memberof StereoAudioRecorder
* @example
- * recorder = new StereoAudioRecorder(mediaStream, {
- * sampleRate: 44100
- * });
+ * recorder.onAudioProcessStarted: function() { };
*/
- var sampleRate = typeof config.sampleRate !== 'undefined' ? config.sampleRate : context.sampleRate || 44100;
-
- if (sampleRate < 22050 || sampleRate > 96000) {
- // Ref: http://stackoverflow.com/a/26303918/552182
- if (!config.disableLogs) {
- console.warn('sample-rate must be under range 22050 and 96000.');
+ if (!isAudioProcessStarted) {
+ isAudioProcessStarted = true;
+ if (self.onAudioProcessStarted) {
+ self.onAudioProcessStarted();
}
}
- if (context.createJavaScriptNode) {
- __stereoAudioRecorderJavacriptNode = context.createJavaScriptNode(bufferSize, 2, 2);
- } else if (context.createScriptProcessor) {
- __stereoAudioRecorderJavacriptNode = context.createScriptProcessor(bufferSize, 2, 2);
- } else {
- throw 'WebAudio API has no support on this browser.';
- }
+ var left = e.inputBuffer.getChannelData(0);
+ var right = e.inputBuffer.getChannelData(1);
- // connect the stream to the gain node
- audioInput.connect(__stereoAudioRecorderJavacriptNode);
+ // we clone the samples
+ leftchannel.push(new Float32Array(left));
+ rightchannel.push(new Float32Array(right));
- bufferSize = __stereoAudioRecorderJavacriptNode.bufferSize;
-
- if (!config.disableLogs) {
- console.log('sample-rate', sampleRate);
- console.log('buffer-size', bufferSize);
- }
-
- var isAudioProcessStarted = false,
- self = this;
- __stereoAudioRecorderJavacriptNode.onaudioprocess = function(e) {
- // if MediaStream().stop() or MediaStreamTrack.stop() is invoked.
- if (mediaStream.ended) {
- __stereoAudioRecorderJavacriptNode.onaudioprocess = function() {};
- return;
- }
-
- if (!recording) {
- audioInput.disconnect();
- return;
- }
-
- /**
- * This method is called on "onaudioprocess" event's first invocation.
- * @method {function} onAudioProcessStarted
- * @memberof StereoAudioRecorder
- * @example
- * recorder.onAudioProcessStarted: function() { };
- */
- if (!isAudioProcessStarted) {
- isAudioProcessStarted = true;
- if (self.onAudioProcessStarted) {
- self.onAudioProcessStarted();
- }
- }
-
- var left = e.inputBuffer.getChannelData(0);
- var right = e.inputBuffer.getChannelData(1);
-
- // we clone the samples
- leftchannel.push(new Float32Array(left));
- rightchannel.push(new Float32Array(right));
-
- recordingLength += bufferSize;
- };
+ recordingLength += bufferSize;
+ };
- // to prevent self audio to be connected with speakers
- __stereoAudioRecorderJavacriptNode.connect(context.destination);
- }
- // _________________
- // CanvasRecorder.js
+ // to prevent self audio to be connected with speakers
+ __stereoAudioRecorderJavacriptNode.connect(context.destination);
+}
+// _________________
+// CanvasRecorder.js
/**
* CanvasRecorder is a standalone class used by {@link RecordRTC} to bring HTML5-Canvas recording into video WebM. It uses HTML2Canvas library and runs top over {@link Whammy}.
@@ -1566,82 +1599,82 @@ function StereoAudioRecorder(mediaStream, config) {
*/
function CanvasRecorder(htmlElement) {
- if (!window.html2canvas) {
- throw 'Please link: //cdn.webrtc-experiment.com/screenshot.js';
- }
+ if (!window.html2canvas) {
+ throw 'Please link: //cdn.webrtc-experiment.com/screenshot.js';
+ }
- var isRecording;
+ var isRecording;
- /**
- * This method records Canvas.
- * @method
- * @memberof CanvasRecorder
- * @example
- * recorder.record();
- */
- this.record = function() {
- isRecording = true;
- whammy.frames = [];
- drawCanvasFrame();
- };
+ /**
+ * This method records Canvas.
+ * @method
+ * @memberof CanvasRecorder
+ * @example
+ * recorder.record();
+ */
+ this.record = function() {
+ isRecording = true;
+ whammy.frames = [];
+ drawCanvasFrame();
+ };
+
+ /**
+ * This method stops recording Canvas.
+ * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
+ * @method
+ * @memberof CanvasRecorder
+ * @example
+ * recorder.stop(function(blob) {
+ * video.src = URL.createObjectURL(blob);
+ * });
+ */
+ this.stop = function(callback) {
+ isRecording = false;
/**
- * This method stops recording Canvas.
- * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
- * @method
+ * @property {Blob} blob - Recorded frames in video/webm blob.
* @memberof CanvasRecorder
* @example
- * recorder.stop(function(blob) {
- * video.src = URL.createObjectURL(blob);
+ * recorder.stop(function() {
+ * var blob = recorder.blob;
* });
*/
- this.stop = function(callback) {
- isRecording = false;
+ this.blob = whammy.compile();
- /**
- * @property {Blob} blob - Recorded frames in video/webm blob.
- * @memberof CanvasRecorder
- * @example
- * recorder.stop(function() {
- * var blob = recorder.blob;
- * });
- */
- this.blob = whammy.compile();
-
- if (callback) {
- callback(this.blob);
- }
- };
+ if (callback) {
+ callback(this.blob);
+ }
+ };
- function drawCanvasFrame() {
- window.html2canvas(htmlElement, {
- onrendered: function(canvas) {
- var duration = new Date().getTime() - lastTime;
- if (!duration) {
- return drawCanvasFrame();
- }
+ function drawCanvasFrame() {
+ window.html2canvas(htmlElement, {
+ onrendered: function(canvas) {
+ var duration = new Date().getTime() - lastTime;
+ if (!duration) {
+ return drawCanvasFrame();
+ }
- // via #206, by Jack i.e. @Seymourr
- lastTime = new Date().getTime();
+ // via #206, by Jack i.e. @Seymourr
+ lastTime = new Date().getTime();
- whammy.frames.push({
- duration: duration,
- image: canvas.toDataURL('image/webp')
- });
+ whammy.frames.push({
+ duration: duration,
+ image: canvas.toDataURL('image/webp')
+ });
- if (isRecording) {
- requestAnimationFrame(drawCanvasFrame);
- }
+ if (isRecording) {
+ requestAnimationFrame(drawCanvasFrame);
}
- });
- }
+ }
+ });
+ }
- var lastTime = new Date().getTime();
+ var lastTime = new Date().getTime();
- var whammy = new Whammy.Video(100);
- }
- // _________________
- // WhammyRecorder.js
+ var whammy = new Whammy.Video(100);
+}
+// _________________
+// WhammyRecorder.js
/**
* WhammyRecorder is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It runs top over {@link Whammy}.
@@ -1658,231 +1691,231 @@ function CanvasRecorder(htmlElement) {
*/
function WhammyRecorder(mediaStream) {
- /**
- * This method records video.
- * @method
- * @memberof WhammyRecorder
- * @example
- * recorder.record();
- */
- this.record = function() {
- if (!this.width) {
- this.width = 320;
- }
+ /**
+ * This method records video.
+ * @method
+ * @memberof WhammyRecorder
+ * @example
+ * recorder.record();
+ */
+ this.record = function() {
+ if (!this.width) {
+ this.width = 320;
+ }
- if (!this.height) {
- this.height = 240;
- }
+ if (!this.height) {
+ this.height = 240;
+ }
- if (!this.video) {
- this.video = {
- width: this.width,
- height: this.height
- };
- }
+ if (!this.video) {
+ this.video = {
+ width: this.width,
+ height: this.height
+ };
+ }
- if (!this.canvas) {
- this.canvas = {
- width: this.width,
- height: this.height
- };
- }
+ if (!this.canvas) {
+ this.canvas = {
+ width: this.width,
+ height: this.height
+ };
+ }
- canvas.width = this.canvas.width;
- canvas.height = this.canvas.height;
+ canvas.width = this.canvas.width;
+ canvas.height = this.canvas.height;
- context = canvas.getContext('2d');
+ context = canvas.getContext('2d');
- // setting defaults
- if (this.video && this.video instanceof HTMLVideoElement) {
- video = this.video.cloneNode();
- } else {
- video = document.createElement('video');
- video.src = URL.createObjectURL(mediaStream);
+ // setting defaults
+ if (this.video && this.video instanceof HTMLVideoElement) {
+ video = this.video.cloneNode();
+ } else {
+ video = document.createElement('video');
+ video.src = URL.createObjectURL(mediaStream);
- video.width = this.video.width;
- video.height = this.video.height;
- }
+ video.width = this.video.width;
+ video.height = this.video.height;
+ }
- video.muted = true;
- video.play();
+ video.muted = true;
+ video.play();
- lastTime = new Date().getTime();
- whammy = new Whammy.Video();
+ lastTime = new Date().getTime();
+ whammy = new Whammy.Video();
- if (!this.disableLogs) {
- console.log('canvas resolutions', canvas.width, '*', canvas.height);
- console.log('video width/height', video.width || canvas.width, '*', video.height || canvas.height);
- }
+ if (!this.disableLogs) {
+ console.log('canvas resolutions', canvas.width, '*', canvas.height);
+ console.log('video width/height', video.width || canvas.width, '*', video.height || canvas.height);
+ }
- drawFrames();
- };
+ drawFrames();
+ };
- function drawFrames() {
- var duration = new Date().getTime() - lastTime;
- if (!duration) {
- return drawFrames();
- }
+ function drawFrames() {
+ var duration = new Date().getTime() - lastTime;
+ if (!duration) {
+ return drawFrames();
+ }
- // via #206, by Jack i.e. @Seymourr
- lastTime = new Date().getTime();
+ // via #206, by Jack i.e. @Seymourr
+ lastTime = new Date().getTime();
- context.drawImage(video, 0, 0, canvas.width, canvas.height);
- whammy.frames.push({
- duration: duration,
- image: canvas.toDataURL('image/webp')
- });
+ context.drawImage(video, 0, 0, canvas.width, canvas.height);
+ whammy.frames.push({
+ duration: duration,
+ image: canvas.toDataURL('image/webp')
+ });
- if (!isStopDrawing) {
- setTimeout(drawFrames, 10);
- }
+ if (!isStopDrawing) {
+ setTimeout(drawFrames, 10);
}
+ }
- /**
- * remove black frames from the beginning to the specified frame
- * @param {Array} _frames - array of frames to be checked
- * @param {number} _framesToCheck - number of frame until check will be executed (-1 - will drop all frames until frame not matched will be found)
- * @param {number} _pixTolerance - 0 - very strict (only black pixel color) ; 1 - all
- * @param {number} _frameTolerance - 0 - very strict (only black frame color) ; 1 - all
- * @returns {Array} - array of frames
- */
- // pull#293 by @volodalexey
- function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance) {
- var localCanvas = document.createElement('canvas');
- localCanvas.width = canvas.width;
- localCanvas.height = canvas.height;
- var context2d = localCanvas.getContext('2d');
- var resultFrames = [];
-
- var checkUntilNotBlack = _framesToCheck === -1;
- var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ?
- _framesToCheck : _frames.length;
- var sampleColor = {
- r: 0,
- g: 0,
- b: 0
- };
- var maxColorDifference = Math.sqrt(
- Math.pow(255, 2) +
- Math.pow(255, 2) +
- Math.pow(255, 2)
- );
- var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
- var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
- var doNotCheckNext = false;
-
- for (var f = 0; f < endCheckFrame; f++) {
- var matchPixCount, endPixCheck, maxPixCount;
-
- if (!doNotCheckNext) {
- var image = new Image();
- image.src = _frames[f].image;
- context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
- var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
- matchPixCount = 0;
- endPixCheck = imageData.data.length;
- maxPixCount = imageData.data.length / 4;
-
- for (var pix = 0; pix < endPixCheck; pix += 4) {
- var currentColor = {
- r: imageData.data[pix],
- g: imageData.data[pix + 1],
- b: imageData.data[pix + 2]
- };
- var colorDifference = Math.sqrt(
- Math.pow(currentColor.r - sampleColor.r, 2) +
- Math.pow(currentColor.g - sampleColor.g, 2) +
- Math.pow(currentColor.b - sampleColor.b, 2)
- );
- // difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
- if (colorDifference <= maxColorDifference * pixTolerance) {
- matchPixCount++;
- }
+ /**
+ * remove black frames from the beginning to the specified frame
+ * @param {Array} _frames - array of frames to be checked
+ * @param {number} _framesToCheck - number of frame until check will be executed (-1 - will drop all frames until frame not matched will be found)
+ * @param {number} _pixTolerance - 0 - very strict (only black pixel color) ; 1 - all
+ * @param {number} _frameTolerance - 0 - very strict (only black frame color) ; 1 - all
+ * @returns {Array} - array of frames
+ */
+ // pull#293 by @volodalexey
+ function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance) {
+ var localCanvas = document.createElement('canvas');
+ localCanvas.width = canvas.width;
+ localCanvas.height = canvas.height;
+ var context2d = localCanvas.getContext('2d');
+ var resultFrames = [];
+
+ var checkUntilNotBlack = _framesToCheck === -1;
+ var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ?
+ _framesToCheck : _frames.length;
+ var sampleColor = {
+ r: 0,
+ g: 0,
+ b: 0
+ };
+ var maxColorDifference = Math.sqrt(
+ Math.pow(255, 2) +
+ Math.pow(255, 2) +
+ Math.pow(255, 2)
+ );
+ var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
+ var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
+ var doNotCheckNext = false;
+
+ for (var f = 0; f < endCheckFrame; f++) {
+ var matchPixCount, endPixCheck, maxPixCount;
+
+ if (!doNotCheckNext) {
+ var image = new Image();
+ image.src = _frames[f].image;
+ context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
+ var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
+ matchPixCount = 0;
+ endPixCheck = imageData.data.length;
+ maxPixCount = imageData.data.length / 4;
+
+ for (var pix = 0; pix < endPixCheck; pix += 4) {
+ var currentColor = {
+ r: imageData.data[pix],
+ g: imageData.data[pix + 1],
+ b: imageData.data[pix + 2]
+ };
+ var colorDifference = Math.sqrt(
+ Math.pow(currentColor.r - sampleColor.r, 2) +
+ Math.pow(currentColor.g - sampleColor.g, 2) +
+ Math.pow(currentColor.b - sampleColor.b, 2)
+ );
+ // difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
+ if (colorDifference <= maxColorDifference * pixTolerance) {
+ matchPixCount++;
}
}
+ }
- if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
- // console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);
- } else {
- // console.log('frame is passed : ' + f);
- if (checkUntilNotBlack) {
- doNotCheckNext = true;
- }
- resultFrames.push(_frames[f]);
+ if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
+ // console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);
+ } else {
+ // console.log('frame is passed : ' + f);
+ if (checkUntilNotBlack) {
+ doNotCheckNext = true;
}
+ resultFrames.push(_frames[f]);
}
+ }
- resultFrames = resultFrames.concat(_frames.slice(endCheckFrame));
-
- if (resultFrames.length <= 0) {
- // at least one last frame should be available for next manipulation
- // if total duration of all frames will be < 1000 than ffmpeg doesn't work well...
- resultFrames.push(_frames[_frames.length - 1]);
- }
+ resultFrames = resultFrames.concat(_frames.slice(endCheckFrame));
- return resultFrames;
+ if (resultFrames.length <= 0) {
+ // at least one last frame should be available for next manipulation
+ // if total duration of all frames will be < 1000 than ffmpeg doesn't work well...
+ resultFrames.push(_frames[_frames.length - 1]);
}
- var isStopDrawing = false;
+ return resultFrames;
+ }
- /**
- * This method stops recording video.
- * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
- * @method
- * @memberof WhammyRecorder
- * @example
- * recorder.stop(function(blob) {
- * video.src = URL.createObjectURL(blob);
- * });
- */
- this.stop = function(callback) {
- isStopDrawing = true;
+ var isStopDrawing = false;
- var _this = this;
- // analyse of all frames takes some time!
- setTimeout(function() {
- // e.g. dropBlackFrames(frames, 10, 1, 1) - will cut all 10 frames
- // e.g. dropBlackFrames(frames, 10, 0.5, 0.5) - will analyse 10 frames
- // e.g. dropBlackFrames(frames, 10) === dropBlackFrames(frames, 10, 0, 0) - will analyse 10 frames with strict black color
- whammy.frames = dropBlackFrames(whammy.frames, -1);
-
- // to display advertisement images!
- if (this.advertisement && this.advertisement.length) {
- whammy.frames = this.advertisement.concat(whammy.frames);
- }
+ /**
+ * This method stops recording video.
+ * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
+ * @method
+ * @memberof WhammyRecorder
+ * @example
+ * recorder.stop(function(blob) {
+ * video.src = URL.createObjectURL(blob);
+ * });
+ */
+ this.stop = function(callback) {
+ isStopDrawing = true;
+
+ var _this = this;
+ // analyse of all frames takes some time!
+ setTimeout(function() {
+ // e.g. dropBlackFrames(frames, 10, 1, 1) - will cut all 10 frames
+ // e.g. dropBlackFrames(frames, 10, 0.5, 0.5) - will analyse 10 frames
+ // e.g. dropBlackFrames(frames, 10) === dropBlackFrames(frames, 10, 0, 0) - will analyse 10 frames with strict black color
+ whammy.frames = dropBlackFrames(whammy.frames, -1);
+
+ // to display advertisement images!
+ if (this.advertisement && this.advertisement.length) {
+ whammy.frames = this.advertisement.concat(whammy.frames);
+ }
- /**
- * @property {Blob} blob - Recorded frames in video/webm blob.
- * @memberof WhammyRecorder
- * @example
- * recorder.stop(function() {
- * var blob = recorder.blob;
- * });
- */
- _this.blob = whammy.compile();
-
- if (_this.blob.forEach) {
- _this.blob = new Blob([], {
- type: 'video/webm'
- });
- }
+ /**
+ * @property {Blob} blob - Recorded frames in video/webm blob.
+ * @memberof WhammyRecorder
+ * @example
+ * recorder.stop(function() {
+ * var blob = recorder.blob;
+ * });
+ */
+ _this.blob = whammy.compile();
- if (callback) {
- callback(_this.blob);
- }
- }, 10);
- };
+ if (_this.blob.forEach) {
+ _this.blob = new Blob([], {
+ type: 'video/webm'
+ });
+ }
+
+ if (callback) {
+ callback(_this.blob);
+ }
+ }, 10);
+ };
- var canvas = document.createElement('canvas');
- var context = canvas.getContext('2d');
+ var canvas = document.createElement('canvas');
+ var context = canvas.getContext('2d');
- var video;
- var lastTime;
- var whammy;
- }
- // https://github.com/antimatter15/whammy/blob/master/LICENSE
- // _________
- // Whammy.js
+ var video;
+ var lastTime;
+ var whammy;
+}
+// https://github.com/antimatter15/whammy/blob/master/LICENSE
+// _________
+// Whammy.js
// todo: Firefox now supports webp for webm containers!
// their MediaRecorder implementation works well!
diff --git a/RecordRTC.min.js b/RecordRTC.min.js
index 812cb9a2..6bc99e26 100644
--- a/RecordRTC.min.js
+++ b/RecordRTC.min.js
@@ -1 +1 @@
-"use strict";function RecordRTC(mediaStream,config){function startRecording(){config.disableLogs||console.debug("started recording "+config.type+" stream.");var Recorder=isChrome?window.StereoRecorder:window.MediaStreamRecorder;return"video"===config.type&&isChrome&&(Recorder=window.WhammyRecorder),"gif"===config.type&&(Recorder=window.GifRecorder),"canvas"===config.type&&(Recorder=window.CanvasRecorder),mediaRecorder=new Recorder(mediaStream),mediaRecorder=mergeProps(mediaRecorder,config),mediaRecorder.onAudioProcessStarted=function(){config.onAudioProcessStarted&&config.onAudioProcessStarted()},mediaRecorder.onGifPreview=function(gif){config.onGifPreview&&config.onGifPreview(gif)},mediaRecorder.record(),self}function stopRecording(callback){function _callback(){for(var item in mediaRecorder)self&&(self[item]=mediaRecorder[item]),recordRTC&&(recordRTC[item]=mediaRecorder[item]);var blob=mediaRecorder.blob;if(callback){var url=URL.createObjectURL(blob);callback(url)}config.disableLogs||console.debug(blob.type,"->",bytesToSize(blob.size)),config.autoWriteToDisk&&getDataURL(function(dataURL){var parameter={};parameter[config.type+"Blob"]=dataURL,DiskStorage.Store(parameter)})}if(!mediaRecorder)return console.warn(WARNING);var recordRTC=this;config.disableLogs||console.warn("Stopped recording "+config.type+" stream."),"gif"!==config.type?mediaRecorder.stop(_callback):(mediaRecorder.stop(),_callback())}function getDataURL(callback,_mediaRecorder){function processInWebWorker(_function){var blob=URL.createObjectURL(new Blob([_function.toString(),"this.onmessage = function (e) {readFile(e.data);}"],{type:"application/javascript"})),worker=new Worker(blob);return URL.revokeObjectURL(blob),worker}if(!callback)throw"Pass a callback function over getDataURL.";var blob=_mediaRecorder?_mediaRecorder.blob:mediaRecorder.blob;if(!blob)return config.disableLogs||console.warn("Blob encoder did not yet finished its job."),void setTimeout(function(){getDataURL(callback,_mediaRecorder)},1e3);if(window.Worker){var webWorker=processInWebWorker(function(_blob){postMessage((new FileReaderSync).readAsDataURL(_blob))});webWorker.onmessage=function(event){callback(event.data)},webWorker.postMessage(blob)}else{var reader=new FileReader;reader.readAsDataURL(blob),reader.onload=function(event){callback(event.target.result)}}}if(config=config||{},!mediaStream)throw"MediaStream is mandatory.";config.type||(config.type="audio");var mediaRecorder,self=this,WARNING='It seems that "startRecording" is not invoked for '+config.type+" recorder.",returnObject={startRecording:startRecording,stopRecording:stopRecording,getBlob:function(){return mediaRecorder?mediaRecorder.blob:console.warn(WARNING)},getDataURL:getDataURL,toURL:function(){return mediaRecorder?URL.createObjectURL(mediaRecorder.blob):console.warn(WARNING)},save:function(fileName){if(!mediaRecorder){var that=this;return setTimeout(function(){that.save(fileName)},2e3),console.warn(WARNING)}var hyperlink=document.createElement("a");hyperlink.href=URL.createObjectURL(mediaRecorder.blob),hyperlink.target="_blank",hyperlink.download=(fileName||Math.round(9999999999*Math.random())+888888888)+"."+mediaRecorder.blob.type.split("/")[1];var evt=new MouseEvent("click",{view:window,bubbles:!0,cancelable:!0});hyperlink.dispatchEvent(evt),(window.URL||window.webkitURL).revokeObjectURL(hyperlink.href)},getFromDisk:function(callback){return mediaRecorder?void RecordRTC.getFromDisk(config.type,callback):console.warn(WARNING)},setAdvertisementArray:function(arrayOfWebPImages){this.advertisement=[];for(var length=arrayOfWebPImages.length,i=0;length>i;i++)this.advertisement.push({duration:i,image:arrayOfWebPImages[i]})},blob:null,bufferSize:0,sampleRate:0,buffer:null,view:null};if(!this)return returnObject;for(var prop in returnObject)this[prop]=returnObject[prop];return returnObject}function MRecordRTC(mediaStream){this.addStream=function(_mediaStream){_mediaStream&&(mediaStream=_mediaStream)},this.mediaType={audio:!0,video:!0},this.startRecording=function(){!isChrome&&mediaStream&&mediaStream.getAudioTracks&&mediaStream.getAudioTracks().length&&mediaStream.getVideoTracks().length&&(this.mediaType.audio=!1),this.mediaType.audio&&(this.audioRecorder=new RecordRTC(mediaStream,{type:"audio",bufferSize:this.bufferSize,sampleRate:this.sampleRate}),this.audioRecorder.startRecording()),this.mediaType.video&&(this.videoRecorder=new RecordRTC(mediaStream,{type:"video",video:this.video,canvas:this.canvas}),this.videoRecorder.startRecording()),this.mediaType.gif&&(this.gifRecorder=new RecordRTC(mediaStream,{type:"gif",frameRate:this.frameRate||200,quality:this.quality||10}),this.gifRecorder.startRecording())},this.stopRecording=function(callback){callback=callback||function(){},this.audioRecorder&&this.audioRecorder.stopRecording(function(blobURL){callback(blobURL,"audio")}),this.videoRecorder&&this.videoRecorder.stopRecording(function(blobURL){callback(blobURL,"video")}),this.gifRecorder&&this.gifRecorder.stopRecording(function(blobURL){callback(blobURL,"gif")})},this.getBlob=function(callback){var output={};this.audioRecorder&&(output.audio=this.audioRecorder.getBlob()),this.videoRecorder&&(output.video=this.videoRecorder.getBlob()),this.gifRecorder&&(output.gif=this.gifRecorder.getBlob()),callback&&callback(output)},this.getDataURL=function(callback){function getDataURL(blob,callback00){if(window.Worker){var webWorker=processInWebWorker(function(_blob){postMessage((new FileReaderSync).readAsDataURL(_blob))});webWorker.onmessage=function(event){callback00(event.data)},webWorker.postMessage(blob)}else{var reader=new FileReader;reader.readAsDataURL(blob),reader.onload=function(event){callback00(event.target.result)}}}function processInWebWorker(_function){var blob=URL.createObjectURL(new Blob([_function.toString(),"this.onmessage = function (e) {readFile(e.data);}"],{type:"application/javascript"})),worker=new Worker(blob);return URL.revokeObjectURL(blob),worker}this.getBlob(function(blob){getDataURL(blob.audio,function(_audioDataURL){getDataURL(blob.video,function(_videoDataURL){callback({audio:_audioDataURL,video:_videoDataURL})})})})},this.writeToDisk=function(){RecordRTC.writeToDisk({audio:this.audioRecorder,video:this.videoRecorder,gif:this.gifRecorder})},this.save=function(args){args=args||{audio:!0,video:!0,gif:!0},args.audio&&this.audioRecorder&&this.audioRecorder.save("string"==typeof args.audio?args.audio:""),args.video&&this.videoRecorder&&this.videoRecorder.save("string"==typeof args.video?args.video:""),args.gif&&this.gifRecorder&&this.gifRecorder.save("string"==typeof args.gif?args.gif:"")}}function mergeProps(mergein,mergeto){mergeto=reformatProps(mergeto);for(var t in mergeto)"function"!=typeof mergeto[t]&&(mergein[t]=mergeto[t]);return mergein}function reformatProps(obj){var output={};for(var o in obj)if(-1!==o.indexOf("-")){var splitted=o.split("-"),name=splitted[0]+splitted[1].split("")[0].toUpperCase()+splitted[1].substr(1);output[name]=obj[o]}else output[o]=obj[o];return output}function bytesToSize(bytes){var k=1e3,sizes=["Bytes","KB","MB","GB","TB"];if(0===bytes)return"0 Bytes";var i=parseInt(Math.floor(Math.log(bytes)/Math.log(k)),10);return(bytes/Math.pow(k,i)).toPrecision(3)+" "+sizes[i]}function MediaStreamRecorder(mediaStream){var self=this;if(self.mimeType&&"video/webm"!==self.mimeType&&mediaStream.getVideoTracks&&mediaStream.getVideoTracks().length){var context=new AudioContext,mediaStreamSource=context.createMediaStreamSource(mediaStream),destination=context.createMediaStreamDestination();mediaStreamSource.connect(destination),mediaStream=destination.stream}var dataAvailable=!1;this.record=function(){mediaRecorder=new window.MediaRecorder(mediaStream),mediaRecorder.ondataavailable=function(e){if(!dataAvailable){if(!e.data.size)return void(self.disableLogs||console.warn("Recording of",e.data.type,"failed."));dataAvailable=!0,self.blob=new Blob([e.data],{type:e.data.type||self.mimeType||"audio/ogg"}),self.callback&&self.callback()}},mediaRecorder.onerror=function(error){self.disableLogs||console.warn(error),mediaRecorder.stop(),self.record(0)},mediaRecorder.start(0),self.onAudioProcessStarted&&self.onAudioProcessStarted()},this.stop=function(callback){this.callback=callback,"recording"===mediaRecorder.state&&mediaRecorder.stop()};var mediaRecorder}function StereoRecorder(mediaStream){var self=this;this.record=function(){mediaRecorder=new StereoAudioRecorder(mediaStream,this),mediaRecorder.onAudioProcessStarted=function(){self.onAudioProcessStarted&&self.onAudioProcessStarted()},mediaRecorder.record()},this.stop=function(callback){mediaRecorder&&mediaRecorder.stop(function(){for(var item in mediaRecorder)self[item]=mediaRecorder[item];callback&&callback()})};var mediaRecorder}function StereoAudioRecorder(mediaStream,config){function interleave(leftChannel,rightChannel){var length=leftChannel.length+rightChannel.length;config.disableLogs||console.log("Buffers length:",length);for(var result=new Float64Array(length),inputIndex=0,index=0;length>index;)result[index++]=leftChannel[inputIndex],result[index++]=rightChannel[inputIndex],inputIndex++;return result}function mergeBuffers(channelBuffer,rLength){for(var result=new Float64Array(rLength),offset=0,lng=channelBuffer.length,i=0;lng>i;i++){var buffer=channelBuffer[i];result.set(buffer,offset),offset+=buffer.length}return result}function writeUTFBytes(view,offset,string){for(var lng=string.length,i=0;lng>i;i++)view.setUint8(offset+i,string.charCodeAt(i))}if(!mediaStream.getAudioTracks().length)throw"Your stream has no audio tracks.";var leftchannel=[],rightchannel=[],recording=!1,recordingLength=0;this.record=function(){leftchannel.length=rightchannel.length=0,recordingLength=0,recording=!0},this.stop=function(callback){recording=!1,audioInput.disconnect();var leftBuffer=mergeBuffers(leftchannel,recordingLength),rightBuffer=mergeBuffers(rightchannel,recordingLength),interleaved=interleave(leftBuffer,rightBuffer),interleavedLength=interleaved.length,resultingBufferLength=44+2*interleavedLength;config.disableLogs||console.log("Resulting Buffer Length",resultingBufferLength);var buffer=new ArrayBuffer(resultingBufferLength),view=new DataView(buffer);writeUTFBytes(view,0,"RIFF");var blockAlign=4;view.setUint32(blockAlign,44+2*interleavedLength,!0),writeUTFBytes(view,8,"WAVE"),writeUTFBytes(view,12,"fmt "),view.setUint32(16,16,!0),view.setUint16(20,1,!0),view.setUint16(22,2,!0),view.setUint32(24,sampleRate,!0),view.setUint32(28,sampleRate*blockAlign,!0),view.setUint16(32,blockAlign,!0),view.setUint16(34,16,!0),writeUTFBytes(view,36,"data"),view.setUint32(40,2*interleavedLength,!0);for(var leftChannel,offset=44,i=0;interleavedLength>i;i++,offset+=2){var size=Math.max(-1,Math.min(1,interleaved[i])),currentChannel=0>size?32768*size:32767*size;config.leftChannel?(currentChannel!==leftChannel&&view.setInt16(offset,currentChannel,!0),leftChannel=currentChannel):view.setInt16(offset,currentChannel,!0)}this.blob=new Blob([view],{type:"audio/wav"}),this.buffer=new ArrayBuffer(view),this.view=view,this.sampleRate=sampleRate,this.bufferSize=bufferSize,this.length=recordingLength,callback&&callback(),isAudioProcessStarted=!1},Storage.AudioContextConstructor||(Storage.AudioContextConstructor=new Storage.AudioContext);var context=Storage.AudioContextConstructor,audioInput=context.createMediaStreamSource(mediaStream),legalBufferValues=[0,256,512,1024,2048,4096,8192,16384],bufferSize="undefined"==typeof config.bufferSize?4096:config.bufferSize;-1===legalBufferValues.indexOf(bufferSize)&&(config.disableLogs||console.warn("Legal values for buffer-size are "+JSON.stringify(legalBufferValues,null," ")));var sampleRate="undefined"!=typeof config.sampleRate?config.sampleRate:context.sampleRate||44100;if((22050>sampleRate||sampleRate>96e3)&&(config.disableLogs||console.warn("sample-rate must be under range 22050 and 96000.")),context.createJavaScriptNode)__stereoAudioRecorderJavacriptNode=context.createJavaScriptNode(bufferSize,2,2);else{if(!context.createScriptProcessor)throw"WebAudio API has no support on this browser.";__stereoAudioRecorderJavacriptNode=context.createScriptProcessor(bufferSize,2,2)}audioInput.connect(__stereoAudioRecorderJavacriptNode),bufferSize=__stereoAudioRecorderJavacriptNode.bufferSize,config.disableLogs||(console.log("sample-rate",sampleRate),console.log("buffer-size",bufferSize));var isAudioProcessStarted=!1,self=this;__stereoAudioRecorderJavacriptNode.onaudioprocess=function(e){if(mediaStream.ended)return void(__stereoAudioRecorderJavacriptNode.onaudioprocess=function(){});if(!recording)return void audioInput.disconnect();isAudioProcessStarted||(isAudioProcessStarted=!0,self.onAudioProcessStarted&&self.onAudioProcessStarted());var left=e.inputBuffer.getChannelData(0),right=e.inputBuffer.getChannelData(1);leftchannel.push(new Float32Array(left)),rightchannel.push(new Float32Array(right)),recordingLength+=bufferSize},__stereoAudioRecorderJavacriptNode.connect(context.destination)}function CanvasRecorder(htmlElement){function drawCanvasFrame(){window.html2canvas(htmlElement,{onrendered:function(canvas){var duration=(new Date).getTime()-lastTime;return duration?(lastTime=(new Date).getTime(),whammy.frames.push({duration:duration,image:canvas.toDataURL("image/webp")}),void(isRecording&&requestAnimationFrame(drawCanvasFrame))):drawCanvasFrame()}})}if(!window.html2canvas)throw"Please link: //cdn.webrtc-experiment.com/screenshot.js";var isRecording;this.record=function(){isRecording=!0,whammy.frames=[],drawCanvasFrame()},this.stop=function(callback){isRecording=!1,this.blob=whammy.compile(),callback&&callback(this.blob)};var lastTime=(new Date).getTime(),whammy=new Whammy.Video(100)}function WhammyRecorder(mediaStream){function drawFrames(){var duration=(new Date).getTime()-lastTime;return duration?(lastTime=(new Date).getTime(),context.drawImage(video,0,0,canvas.width,canvas.height),whammy.frames.push({duration:duration,image:canvas.toDataURL("image/webp")}),void(isStopDrawing||setTimeout(drawFrames,10))):drawFrames()}function dropBlackFrames(_frames,_framesToCheck,_pixTolerance,_frameTolerance){var localCanvas=document.createElement("canvas");localCanvas.width=canvas.width,localCanvas.height=canvas.height;for(var context2d=localCanvas.getContext("2d"),resultFrames=[],checkUntilNotBlack=-1===_framesToCheck,endCheckFrame=_framesToCheck&&_framesToCheck>0&&_framesToCheck<=_frames.length?_framesToCheck:_frames.length,sampleColor={r:0,g:0,b:0},maxColorDifference=Math.sqrt(Math.pow(255,2)+Math.pow(255,2)+Math.pow(255,2)),pixTolerance=_pixTolerance&&_pixTolerance>=0&&1>=_pixTolerance?_pixTolerance:0,frameTolerance=_frameTolerance&&_frameTolerance>=0&&1>=_frameTolerance?_frameTolerance:0,doNotCheckNext=!1,f=0;endCheckFrame>f;f++){var matchPixCount,endPixCheck,maxPixCount;if(!doNotCheckNext){var image=new Image;image.src=_frames[f].image,context2d.drawImage(image,0,0,canvas.width,canvas.height);var imageData=context2d.getImageData(0,0,canvas.width,canvas.height);matchPixCount=0,endPixCheck=imageData.data.length,maxPixCount=imageData.data.length/4;for(var pix=0;endPixCheck>pix;pix+=4){var currentColor={r:imageData.data[pix],g:imageData.data[pix+1],b:imageData.data[pix+2]},colorDifference=Math.sqrt(Math.pow(currentColor.r-sampleColor.r,2)+Math.pow(currentColor.g-sampleColor.g,2)+Math.pow(currentColor.b-sampleColor.b,2));maxColorDifference*pixTolerance>=colorDifference&&matchPixCount++}}!doNotCheckNext&&maxPixCount*frameTolerance>=maxPixCount-matchPixCount||(checkUntilNotBlack&&(doNotCheckNext=!0),resultFrames.push(_frames[f]))}return resultFrames=resultFrames.concat(_frames.slice(endCheckFrame)),resultFrames.length<=0&&resultFrames.push(_frames[_frames.length-1]),resultFrames}this.record=function(){this.width||(this.width=320),this.height||(this.height=240),this.video||(this.video={width:this.width,height:this.height}),this.canvas||(this.canvas={width:this.width,height:this.height}),canvas.width=this.canvas.width,canvas.height=this.canvas.height,context=canvas.getContext("2d"),this.video&&this.video instanceof HTMLVideoElement?video=this.video.cloneNode():(video=document.createElement("video"),video.src=URL.createObjectURL(mediaStream),video.width=this.video.width,video.height=this.video.height),video.muted=!0,video.play(),lastTime=(new Date).getTime(),whammy=new Whammy.Video,this.disableLogs||(console.log("canvas resolutions",canvas.width,"*",canvas.height),console.log("video width/height",video.width||canvas.width,"*",video.height||canvas.height)),drawFrames()};var isStopDrawing=!1;this.stop=function(callback){isStopDrawing=!0;var _this=this;setTimeout(function(){whammy.frames=dropBlackFrames(whammy.frames,-1),this.advertisement&&this.advertisement.length&&(whammy.frames=this.advertisement.concat(whammy.frames)),_this.blob=whammy.compile(),_this.blob.forEach&&(_this.blob=new Blob([],{type:"video/webm"})),callback&&callback(_this.blob)},10)};var video,lastTime,whammy,canvas=document.createElement("canvas"),context=canvas.getContext("2d")}function GifRecorder(mediaStream){if(!window.GIFEncoder)throw"Please link: https://cdn.webrtc-experiment.com/gif-recorder.js";this.record=function(){function drawVideoFrame(time){lastAnimationFrame=requestAnimationFrame(drawVideoFrame),void 0===typeof lastFrameTime&&(lastFrameTime=time),90>time-lastFrameTime||(context.drawImage(video,0,0,canvas.width,canvas.height),self.onGifPreview&&self.onGifPreview(canvas.toDataURL("image/png")),gifEncoder.addFrame(context),lastFrameTime=time)}this.width||(this.width=video.offsetWidth||320),this.height||(this.height=video.offsetHeight||240),this.video||(this.video={width:this.width,height:this.height}),this.canvas||(this.canvas={width:this.width,height:this.height}),canvas.width=this.canvas.width,canvas.height=this.canvas.height,video.width=this.video.width,video.height=this.video.height,gifEncoder=new window.GIFEncoder,gifEncoder.setRepeat(0),gifEncoder.setDelay(this.frameRate||200),gifEncoder.setQuality(this.quality||10),gifEncoder.start(),startTime=Date.now();var self=this;lastAnimationFrame=requestAnimationFrame(drawVideoFrame)},this.stop=function(){lastAnimationFrame&&cancelAnimationFrame(lastAnimationFrame),endTime=Date.now(),this.blob=new Blob([new Uint8Array(gifEncoder.stream().bin)],{type:"image/gif"}),gifEncoder.stream().bin=[]};var canvas=document.createElement("canvas"),context=canvas.getContext("2d"),video=document.createElement("video");video.muted=!0,video.autoplay=!0,video.src=URL.createObjectURL(mediaStream),video.play();var startTime,endTime,lastFrameTime,gifEncoder,lastAnimationFrame=null}RecordRTC.getFromDisk=function(type,callback){if(!callback)throw"callback is mandatory.";console.log("Getting recorded "+("all"===type?"blobs":type+" blob ")+" from disk!"),DiskStorage.Fetch(function(dataURL,_type){"all"!==type&&_type===type+"Blob"&&callback&&callback(dataURL),"all"===type&&callback&&callback(dataURL,_type.replace("Blob",""))})},RecordRTC.writeToDisk=function(options){console.log("Writing recorded blob(s) to disk!"),options=options||{},options.audio&&options.video&&options.gif?options.audio.getDataURL(function(audioDataURL){options.video.getDataURL(function(videoDataURL){options.gif.getDataURL(function(gifDataURL){DiskStorage.Store({audioBlob:audioDataURL,videoBlob:videoDataURL,gifBlob:gifDataURL})})})}):options.audio&&options.video?options.audio.getDataURL(function(audioDataURL){options.video.getDataURL(function(videoDataURL){DiskStorage.Store({audioBlob:audioDataURL,videoBlob:videoDataURL})})}):options.audio&&options.gif?options.audio.getDataURL(function(audioDataURL){options.gif.getDataURL(function(gifDataURL){DiskStorage.Store({audioBlob:audioDataURL,gifBlob:gifDataURL})})}):options.video&&options.gif?options.video.getDataURL(function(videoDataURL){options.gif.getDataURL(function(gifDataURL){DiskStorage.Store({videoBlob:videoDataURL,gifBlob:gifDataURL})})}):options.audio?options.audio.getDataURL(function(audioDataURL){DiskStorage.Store({audioBlob:audioDataURL})}):options.video?options.video.getDataURL(function(videoDataURL){DiskStorage.Store({videoBlob:videoDataURL})}):options.gif&&options.gif.getDataURL(function(gifDataURL){DiskStorage.Store({gifBlob:gifDataURL})})},MRecordRTC.getFromDisk=RecordRTC.getFromDisk,MRecordRTC.writeToDisk=RecordRTC.writeToDisk,window.requestAnimationFrame||(window.requestAnimationFrame=window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame),window.cancelAnimationFrame||(window.cancelAnimationFrame=window.webkitCancelAnimationFrame||window.mozCancelAnimationFrame),window.AudioContext||(window.AudioContext=window.webkitAudioContext||window.mozAudioContext),window.URL=window.URL||window.webkitURL,navigator.getUserMedia=navigator.webkitGetUserMedia||navigator.mozGetUserMedia,window.webkitMediaStream&&(window.MediaStream=window.webkitMediaStream);var isChrome=!!navigator.webkitGetUserMedia;0===location.href.indexOf("file:")&&console.error("Please load this HTML file on HTTP or HTTPS.");var Storage={AudioContext:window.AudioContext||window.webkitAudioContext},__stereoAudioRecorderJavacriptNode,Whammy=function(){function ArrayToWebM(frames){var info=checkFrames(frames);if(!info)return[];for(var clusterMaxDuration=3e4,EBML=[{id:440786851,data:[{data:1,id:17030},{data:1,id:17143},{data:4,id:17138},{data:8,id:17139},{data:"webm",id:17026},{data:2,id:17031},{data:2,id:17029}]},{id:408125543,data:[{id:357149030,data:[{data:1e6,id:2807729},{data:"whammy",id:19840},{data:"whammy",id:22337},{data:doubleToString(info.duration),id:17545}]},{id:374648427,data:[{id:174,data:[{data:1,id:215},{data:1,id:25541},{data:0,id:156},{data:"und",id:2274716},{data:"V_VP8",id:134},{data:"VP8",id:2459272},{data:1,id:131},{id:224,data:[{data:info.width,id:176},{data:info.height,id:186}]}]}]}]}],frameNumber=0,clusterTimecode=0;frameNumber
@@ -72,8 +72,8 @@
- RecordRTC: WebRTC audio/video recording ®
+ RecordRTC: WebRTC audio/video recording ®
Muaz Khan
Muaz Khan .
@WebRTCWeb .
Github .
- Latest issues .
- What's New?
+ Latest issues .
+ What's New?
+// you can even use URL parameters: +?bufferSize=0&sampleRate=44100 +?leftChannel=false&disableLogs=false +
+// you can even use URL parameters: +?canvas_width=1280&canvas_height=720 +