Chrome extension to capture video of last few minutes of active tab - google-chrome-extension

I am trying to write a Chrome plugin to capture a video of the active tab. My code is based on this post.
When my page action is invoked, I start the recording:
var recordedChunks = null;
var captureOptions = { audio : false, video : true };
chrome.tabCapture.capture(captureOptions,
function(stream) {
if (stream) {
recordedChunks = [];
var options = {mimeType: "video/webm"};
mediaRecorder = new MediaRecorder(stream, options);
mediaRecorder.start();
mediaRecorder.ondataavailable = function(event) {
if (event.data.size > 0) {
recordedChunks.push(event.data);
}
}
}
}
);
When the page action is invoked again, I stop the recording and download a file as:
mediaRecorder.stop();
var blob = new Blob(recordedChunks, {
type: 'video/webm'
});
var url = URL.createObjectURL(blob);
var a = document.createElement('a');
document.body.appendChild(a);
a.style = 'display: none';
a.href = url;
a.download = 'test.webm';
a.click();
window.URL.revokeObjectURL(url);
stream.getVideoTracks()[0].stop();
This works great - I am able to play the downloaded test.webm video.
But I only want to record the last few minutes of video of the active tab. I do not want the recordedChunks array to grow unbounded. So, I tried something like this in the start recording action:
chrome.tabCapture.capture(captureOptions,
function(stream) {
// ...
mediaRecorder.ondataavailable = function(event) {
if (event.data.size > 0) {
recordedChunks.push(event.data);
// CHANGE HERE: keep only the last 1000 blobs
while (recordedChunks.length > 1000) {
recordedChunks.shift();
}
}
}
}
);
But with this modification, the download test.webm video is not playable. How do I capture just the tail of the blob output from MediaRecorder?

Related

MediaRecorder: How to stop Video recording and play back in same video element?

I am using a with mediaRecorder
function getUserMediaSuccess(stream) {
$videoElement[0].srcObject = stream;
$videoElement[0].autoplay = true;
$videoElement[0].muted = true;
$videoElement[0].controls = false;
mediaRecorder = new MediaRecorder(stream, settings.recorderOptions);
}
Once recording is finished, I want to play the recorded chunks.
I tried with:
const blob = new Blob(chunks, { 'type' : settings.recorderOptions.mimeType});
$videoElement[0].src = window.URL.createObjectURL(blob);
and also with
$videoElement[0].pause();
$videoElement[0].removeAttribute('src');
$videoElement[0].load();
$videoElement[0].src = settings.filename;
$videoElement[0].controls = true;
I cannot stop the video element of showing the real time webcam.
I can play back the recorded video in ANOTHER video element. But I want to use the SAME that is used to display the webcam.
I also tried:
localStream.getTracks().forEach(function(track) {
track.stop();
});
Which gives a black screen, but I am unable then to play back again the recorded video.
it think you have done most things correctly.
based on the mozilla MediaRecorder example and some more research i think the magic is to switch between using srcObject and src:
srcObject for mediaStream (getUserMedia live preview)
src with window.URL.createObjectURL
this snippet works (if run un localhost or over https -
(maybe that the embedding does not fullfill all security things to allow access to getUserMedia..)
const videoEl = document.getElementById('theVideoElement');
let mediaRecorder = null;
let mediaChunks = [];
function recordStart() {
console.log('recordStart..');
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia (
{
audio: true,
video: true,
}
)
.then( stream => {
videoEl.srcObject = stream;
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.addEventListener('dataavailable', event => {
mediaChunks.push(event.data);
});
mediaRecorder.addEventListener('stop', event => {
console.log("recorder stopped");
const blob = new Blob(mediaChunks, { 'type' : 'video/webm' });
mediaBlobURL = window.URL.createObjectURL(blob);
mediaChunks = [];
videoEl.src = mediaBlobURL;
});
mediaRecorder.start();
console.log("recorder started", mediaRecorder.state);
window.setTimeout(event => {
console.log("time is over.");
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
// stop getUserMedia stream - this way the cam and mic gets released.
for (const track of stream.getTracks()) {
track.stop();
}
videoEl.srcObject = null;
console.log("stream stopped.");
}, 3000);
})
.catch( err => {
console.error(`The following getUserMedia error occurred:\n ${err}`);
});
} else {
console.error('getUserMedia not supported on your browser!');
}
}
console.info('******************************************');
window.addEventListener('load', (event) => {
console.info('All resources finished loading.');
const buttonEl = document.getElementById('button_start');
buttonEl.addEventListener('click', (event) => {
console.info('click!');
recordStart();
});
});
button {
display: block;
}
video {
display: block;
border: solid 1px black;
}
<button id="button_start">
start
</button>
<video
id="theVideoElement"
autoplay
controls
>
</video>

Upload audio blob after conversion from wav to mp3

function init() {
var cfg = {};
audio = new Recorder(cfg);
}
function toggle( btn ){ // audio && audio.record();
if(audio instanceof Recorder){
var btnLabel = btn.firstChild.nodeValue;
if( btnLabel === 'Record' ){
audio.record();
}else{
audio.stop();
createPreview( 'recordings' );
audio.clear();
}
btn.firstChild.nodeValue = (btnLabel === 'Record') ? 'Stop' : 'Record';
btn.setAttribute('class', (btn.getAttribute('class') === 'btn btn-primary') ? 'btn btn-danger' : 'btn btn-primary');
} else {
init();
toggle( btn );
}
}
function createPreview( containerId ) {
// audio && audio.exportWAV( function(blob) {
var targetContainer = document.getElementById( containerId );
var timestamp = new Date().getTime();
var filename = 'recording_'+ timestamp;
var div = document.createElement('div');
var linkMP3 = document.createElement('a');
linkMP3.setAttribute('id', 'MP3-'+ timestamp);
var iconMP3 = document.createElement('img');
iconMP3.setAttribute('src', 'images/i-mp3.jpeg');
var linkWAV = document.createElement('a');
linkWAV.setAttribute('id', 'WAV-'+ timestamp);
var iconWAV = document.createElement('img');
iconWAV.setAttribute('src', 'images/i-wav.jpeg');
var player = document.createElement('audio');
player.setAttribute('id', 'PLAYER-'+ timestamp);
player.controls = true;
div.appendChild(player);
div.appendChild(linkWAV);
div.appendChild(linkMP3);
targetContainer.appendChild(div);
audio.export( function( mediaObj ) {
if( mediaObj.blob.type == 'audio/mp3' ){
var url = mediaObj.url;
targetLink = document.getElementById( 'MP3-'+ timestamp );
targetLink.href = url;
targetLink.download = filename +'.mp3';
targetLink.innerHTML = targetLink.download;
saveAudio( url, filename );
} else { // 'audio/wav'
var url = URL.createObjectURL( mediaObj.blob );
targetPlayer = document.getElementById( 'PLAYER-'+ timestamp );
targetLink = document.getElementById( 'WAV-'+ timestamp );
targetPlayer.src = url;
targetLink.href = url;
targetLink.download = filename +'.wav';
targetLink.innerHTML = targetLink.download;
}
});
}
function saveAudio( url, filename ){
var firebaseUrl = 'your_firebase_url';
if(firebaseUrl !== 'your_firebase_url'){
console.info('>> saving audio: url');
console.log( url );
ref = new Firebase( firebaseUrl );
ref.set({
filetype: 'audio/mp3',
base64Str: url,
filename: filename +'.mp3'
});
}else{
console.warn('Audio not saved to firebase because firebaseUrl is undefined.');
}
}
I need to record audio in the browser (short clips, spoken voice, mono) and upload it in mp3 format. This by Chris Geirman has almost everything that I need, except that instead of using firebase, I'd like to use jquery to upload audio blobs to a folder on my server. I'm fairly new to all of this, but I'm guessing that I need to replace the saveAudio() function with my own uploadAudio() jquery(?) function (something like this), which will link to a script in /upload.php. So far so good (?), but I can't figure out from Chris's script exactly what it is that I should be uploading / passing to /upload.php. I'm planning to implement the script here.
OK just in case it helps anyone I managed to get it working using this from Soumen Basak.
function uploadAudio( blob ) {
var reader = new FileReader();
reader.onload = function(event){
var fd = {};
fd["fname"] = "test.wav";
fd["data"] = event.target.result;
$.ajax({
type: 'POST',
url: 'upload.php',
data: fd,
dataType: 'text'
}).done(function(data) {
console.log(data);
});
};
reader.readAsDataURL(blob);
}
Replace test.wav with whatever applies - in my case BlahBlah.mp3. Then to reference the blob from Chris Geirman's script, change uploadAudio( blob ); to uploadAudio( mediaObj.blob );.
Be aware that with this set up on localhost, 2 mins of audio took 1'40" to convert from wav to mp3 and move to the uploads directory. Next job, create progress bars, etc!
Upload.php (Thanks again Soumen Basak):
<?
// pull the raw binary data from the POST array
$data = substr($_POST['data'], strpos($_POST['data'], ",") + 1);
// decode it
$decodedData = base64_decode($data);
// print out the raw data,
$filename = $_POST['fname'];
echo $filename;
// write the data out to the file
$fp = fopen($filename, 'wb');
fwrite($fp, $decodedData);
fclose($fp);
?>

Unable to play .aac extension audio file using media source extension.API

I want to play a .aac audio file using MEdia source extension. I replaced the sample code with mime="audio/aac" and used .aac file. Is there any another parameter that needed to be specified.
var vidElement = document.querySelector('audio');
if (window.MediaSource) {
var mediaSource = new MediaSource();
vidElement.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
} else {
console.log("The Media Source Extensions API is not supported.")
}
function sourceOpen(e) {
URL.revokeObjectURL(vidElement.src);
var mime = 'audio/aac';
var mediaSource = e.target;
var sourceBuffer = mediaSource.addSourceBuffer(mime);
var videoUrl = 'example.aac';
fetch(videoUrl)
.then(function(response) {
return response.arrayBuffer();
})
.then(function(arrayBuffer) {
sourceBuffer.addEventListener('updateend', function(e) {
if (!sourceBuffer.updating && mediaSource.readyState === 'open') {
mediaSource.endOfStream();
}
});
sourceBuffer.appendBuffer(arrayBuffer);
});
}
Converting to .mp4 and setting the mimetype to audio/mp4; codecs="mp4a.40.2" worked for me. I couldn't find a way to do it without converting.

Video stream from Blob NodeJS

I am recording MediaStream on client side in this way:
handleStream(stream) {
const ws = new WebSocket('ws://localhost:5432/binary');
var recorder = new MediaRecorder(stream);
recorder.ondataavailable = function(event) {
ws.send(event.data);
};
recorder.start();
}
This data are accepted on server side like this:
const wss = new WebSocket.Server({ port: 5432 });
wss.on('connection', function connection(ws) {
ws.on('message', function incoming(message) {
writeToDisk(message, 'video.webm');
});
});
function writeToDisk(dataURL, fileName) {
var fileBuffer = new Buffer(dataURL, 'base64');
fs.writeFileSync(fileName, fileBuffer);
}
It works like a charm, but I want to take the Buffer and make video live stream served by server side. Is there any way how to do it?
Thanks for your help.
I have already done this here.
You can use the MediaRecorder class to split the video into chunks and send them to the server for broadcast.
this._mediaRecorder = new MediaRecorder(this._stream, this._streamOptions);
this._mediaRecorder.ondataavailable = e => this._videoStreamer.pushChunk(e.data);
this._mediaRecorder.start();
...
this._mediaRecorder.requestData()
Do not forget to restart recording at intervals, so that new clients should not download all the video to connect to stream. Also, during the change of chunks, you should replace <video> by <image> or update video's poster so that the gluing goes smoothly.
async function imageBitmapToBlob(img) {
return new Promise(res => {
const canvas = document.createElement('canvas');
canvas.width = img.width;
canvas.height = img.height;
canvas.getContext('2d').drawImage(img,0,0);
canvas.toBlob(res);
});
}
...
const stream = document.querySelector('video').captureStream();
if(stream.active==true) {
const track = stream.getVideoTracks()[0];
const capturer = new ImageCapture(track);
const bitmap = await imageBitmapToBlob(await capturer.grabFrame());
URL.revokeObjectURL(this._oldPosterUrl);
this._video.poster = this._oldPosterUrl = URL.createObjectURL(bitmap);
track.stop();
}
You can glue Blob objects through their constructor. In the process of obtaining a new chunk, do not forget to clear the memory for the old video with URL.revokeObjectURL() and update video's current time
_updateVideo = async (newBlob = false) => {
const stream = this._video.captureStream();
if(stream.active==true) {
const track = stream.getVideoTracks()[0];
const capturer = new ImageCapture(track);
const bitmap = await imageBitmapToBlob(await capturer.grabFrame());
URL.revokeObjectURL(this._oldPosterUrl);
this._video.poster = this._oldPosterUrl = URL.createObjectURL(bitmap);
track.stop();
}
let data = null;
if(newBlob === true) {
const index = this._recordedChunks.length - 1;
data = [this._recordedChunks[index]];
} else {
data = this._recordedChunks;
}
const blob = new Blob(data, this._options);
const time = this._video.currentTime;
URL.revokeObjectURL(this._oldVideoUrl);
const url = this._oldVideoUrl = URL.createObjectURL(blob);
if(newBlob === true) {
this._recordedChunks = [blob];
}
this._size = blob.size;
this._video.src = url;
this._video.currentTime = time;
}
You should use two WebSocket for video broadcast and two for listening. One WebSocket transfers only video chunks, the second only new blobs with video headers (restart recording at intervals).
const blobWebSocket = new WebSocket(`ws://127.0.0.1:${blobPort}/`);
blobWebSocket.onmessage = (e) => {
console.log({blob:e.data});
this._videoWorker.pushBlob(e.data);
}
const chunkWebSocket = new WebSocket(`ws://127.0.0.1:${chunkPort}/`);
chunkWebSocket.onmessage = (e) => {
console.log({chunk:e.data});
this._videoWorker.pushChunk(e.data);
}
After connecting, the server sends the client all the current video blob and begins to dynamically send new chunks to the client.
const wss = new WebSocket.Server({ port });
let buffer = new Buffer.alloc(0);
function chunkHandler(buf,isBlob=false) {
console.log({buf,isBlob});
if(isBlob === true) {
//broadcast(wss,buf);
buffer = buf;
} else {
const totalLenght = buffer.length + buf.length;
buffer = Buffer.concat([buffer,buf],totalLenght);
broadcast(wss,buf);
}
}
wss.on('connection', function connection(ws) {
if(buffer.length !== 0) {
ws.send(buffer);
}
});

Secondary tile web url

I have to pin secondary tile in my windows phone 8.1 application.
I followed the msdn tutorial : http://code.msdn.microsoft.com/windowsapps/secondary-tiles-sample-edf2a178/
It does work with internal image (ms-appx://.. ) but not with web url (http://)
working sample:
var logo = new Windows.Foundation.Uri("ms-appx:///Images/square30x30Tile-sdk.png");
var currentTime = new Date();
var TileActivationArguments = data.ad_id + " WasPinnedAt=" + currentTime;
var tile = new Windows.UI.StartScreen.SecondaryTile(data.ad_id,
data.subject,
TileActivationArguments,
logo,
Windows.UI.StartScreen.TileSize.square150x150);
tile.visualElements.foregroundText = Windows.UI.StartScreen.ForegroundText.light;
tile.visualElements.square30x30Logo = logo;
tile.visualElements.showNameOnSquare150x150Logo = true;
var selectionRect = this.element.getBoundingClientRect();
// Now let's try to pin the tile.
// We'll make the same fundamental call as we did in pinByElement, but this time we'll return a promise.
return new WinJS.Promise(function (complete, error, progress) {
tile.requestCreateForSelectionAsync({ x: selectionRect.left, y: selectionRect.top, width: selectionRect.width, height: selectionRect.height }, Windows.UI.Popups.Placement.above).done(function (isCreated) {
if (isCreated) {
complete(true);
} else {
complete(false);
}
});
});
And if I use
var logo = new Windows.Foundation.Uri(data.images[0]);
I got an invalid parameter exception.
You can take a look at the documentation for the SecondaryTile.Logo property. In it you'll see this:
The location of the image. This can be expressed as one of these schemes:
ms-appx:///
ms-appdata:///local/
You can download the image first and then set it using the ms-appdata:///local/ scheme. I'm not sure that changing the logo with something from the Internet is a good idea, though. This should be the app's logo, so it should be in the package.
I found the solution
fileExists: function (fileName) {
var applicationData = Windows.Storage.ApplicationData.current;
var folder = applicationData.localFolder;
return folder.getFileAsync(fileName).then(function (file) {
return file;
}, function (err) {
return null;
});
},
download: function (imgUrl, imgName) {
return WinJS.xhr({ url: imgUrl, responseType: "blob" }).then(function (result) {
var blob = result.response;
var applicationData = Windows.Storage.ApplicationData.current;
var folder = applicationData.localFolder;
return folder.createFileAsync(imgName, Windows.Storage.
CreationCollisionOption.replaceExisting).then(function (file) {
// Open the returned file in order to copy the data
return file.openAsync(Windows.Storage.FileAccessMode.readWrite).
then(function (stream) {
return Windows.Storage.Streams.RandomAccessStream.copyAsync
(blob.msDetachStream(), stream).then(function () {
// Copy the stream from the blob to the File stream
return stream.flushAsync().then(function () {
stream.close();
});
});
});
});
}, function (e) {
//var msg = new Windows.UI.Popups.MessageDialog(e.message);
//msg.showAsync();
});
},
var self = this;
this.download(data.images[0], data.ad_id).then(function () {
self.fileExists(data.ad_id).then(function (file) {
var logo = new Windows.Foundation.Uri("ms-appdata:///Local/" + data.ad_id);
....
I need to download the image, store it and then I can use ms-appdata:///Local

Resources