desktopCapturer example... how to make it work for specific application - node.js

I'm trying to follow this tutorial:
https://www.tutorialspoint.com/electron/electron_audio_and_video_capturing.htm
The first part of the tutorial worked fine... I can stream av from my pc camera and mic... into my electron app. But now I'm trying to do is stream audio and video from a specific application running on my windows desktop via the desktopCapturer object.
Problem
I'm not getting any errors. But the electron app's video html tag is not showing the stream from myappthatstreamsAV.
Code
I changed my index.html code to look like this: (just changed stuff inside the tag)
<!DOCTYPE html>
<html>
<head>
<meta charset = "UTF-8">
<title>Audio and Video</title>
</head>
<body>
<video autoplay></video>
<script type = "text/javascript">
var desktopCapturer = require('electron').desktopCapturer;
desktopCapturer.getSources({types: ['window', 'screen']}, (error, sources) => {
if (error) throw error
desktopCapturer.getSources({types: ['window', 'screen']}, (error, sources) => {
if (error) throw error
for (let i = 0; i < sources.length; ++i) {
if (sources[i].name === 'myappthatstreamsAV') {
navigator.webkitGetUserMedia({
audio: true,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: sources[i].id,
minWidth: 1280,
maxWidth: 1280,
minHeight: 720,
maxHeight: 720
}
}
}, handleStream, handleError)
return
}
}
})
function handleStream (stream) {
document.querySelector('video').src = URL.createObjectURL(stream)
}
function handleError (e) {
console.log(e)
}
</script>
</body>
</html>
and the index.js looks like this:
const {app, BrowserWindow} = require('electron')
const url = require('url')
const path = require('path')
let win
// Set the path where recordings will be saved
app.setPath("userData", __dirname + "/saved_recordings")
function createWindow() {
win = new BrowserWindow({width: 800, height: 600,
webPreferences: {
nodeIntegration: true
}
})
win.loadURL(url.format({
pathname: path.join(__dirname, 'index.html'),
protocol: 'file:',
slashes: true
}))
}
app.on('ready', createWindow)
What I've tried so far:
I added some debug statements like this:
<script type = "text/javascript">
var desktopCapturer = require('electron').desktopCapturer;
console.log("1")
console.log(desktopCapturer)
desktopCapturer.getSources({types: ['window', 'screen']}, (error, sources) => {
console.log("2")
if (error) throw error
for (let i = 0; i < sources.length; ++i) {
console.log((sources[i].name));
console.log("3")
and basically, it executes only the first two console.logs:
console.log("1")
console.log(desktopCapturer)
It never gets to 2 or 3.

Changed my code to look like this:
var desktopCapturer = require('electron').desktopCapturer;
console.log("are you here?")
console.log(desktopCapturer)
desktopCapturer.getSources({ types: ['window', 'screen'] }).then(async sources => {
for (const source of sources) {
if (source.name === 'mystreamApp') {
try {
const stream = await navigator.mediaDevices.getUserMedia({
audio: true,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id,
minWidth: 1280,
maxWidth: 1280,
minHeight: 720,
maxHeight: 720
}
}
})
handleStream(stream)
} catch (e) {
handleError(e)
}
return
}
}
})
function handleStream (stream) {
const video = document.querySelector('video')
video.srcObject = stream
video.onloadedmetadata = (e) => video.play()
}
function handleError (e) {
console.log(e)
}
and now I see the video stream.
Audio is still not working. But i'll open another question for that.

Related

How to block the url chrome://extension and new tab in my extension with manifest v3?

My google chrome extension has a background.js and content script that communicate with port.OnMessage but I have noticed that when I run my extension in Chrome://extension it throws an error because it is not a url and the same happens with a new google tab chrome which has no url. How could I block them?
On the internet I got information that said that they were blocked with
"exclude_matches": [
"chrome://extensions/"
]
however, this doesn't work for the version 3 manifest. Also how could it tell you not to run the extension in a new tab (no url)
this is my manifest v3
"name":"Certified Records Full Certificate",
"description":"Esta extensión permite grabar la pantalla o hacer capturas de pantalla",
"version": "1.0",
"manifest_version":3,
"background":{
"service_worker":"background.js"
},
"content_scripts": [
{
"matches": ["<all_urls>"],
"js": ["content-script.js"],
"exclude_matches": [
"chrome://extensions/"
]
}],
"permissions":["storage","activeTab","scripting","tabs","desktopCapture"],
"action":{
"default_popup":"popup.html",
"default_icon":{
"16":"/images/logo-16.png",
"32":"/images/logo-32.png",
"48": "/images/logo-48.png",
"128": "/images/logo-128.png"
}
},
"icons":{
"16":"/images/logo-16.png",
"32":"/images/logo-32.png",
"48": "/images/logo-48.png",
"128": "/images/logo-128.png"
} }
this is my background.js
chrome.runtime.onConnect.addListener(function (port) {
port.onMessage.addListener(function(msg){
if (msg.type === 'SS_UI_REQUEST') {
requestScreenSharing(port,msg);
}
});
});
function requestScreenSharing(port, msg) {
const sources = ['window'];
const tab = port.sender.tab;
desktopMediaRequestId = chrome.desktopCapture.chooseDesktopMedia(
sources,
port.sender.tab,
streamId => {
if (streamId) {
msg.type = 'SS_DIALOG_SUCCESS';
msg.streamId = streamId;
msg.text ="sharing";
} else {
msg.type = 'SS_DIALOG_CANCEL';
msg.text ="cancel";
}
var tab = getTabId();
tab.then((value) => {
const respuesta = chrome.tabs.connect(value.id, {
name: "respuesta",
});
respuesta.postMessage(msg);
});
}
);
}
async function getTabId() {
let queryOptions = { active: true, currentWindow: true };
let [tab] = await chrome.tabs.query(queryOptions);
return tab;
}
this is my content-script.js
chrome.runtime.onConnect.addListener(function (port) {
port.onMessage.addListener(function(msg){
if (msg.type === 'SS_UI_REQUEST') {
console.log(msg);
var background = chrome.runtime.connect();
background.postMessage(msg);
}
if (msg.type === 'SS_DIALOG_SUCCESS') {
console.log(msg);
startScreenStreamFrom(msg.streamId);
}
if (msg.type === 'SS_DIALOG_CANCEL') {
console.log(msg);
}
if(msg.type === "SS_UI_TAKESCREENSHOT")
{
console.log("tomar screenshot");
TakeScreenShot();
}
});
});
function startScreenStreamFrom(streamId) {
console.log("compartiendo pantalla");
navigator.mediaDevices
.getUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: streamId
}
}
})
.then(stream => {
window.stream = stream;
});
}
async function TakeScreenShot(){
setTimeout(async () => {
const screen = window.stream;
const track = screen.getVideoTracks()[0];
const imageCapture = new ImageCapture(track);
await imageCapture.grabFrame()
.then(function(bitmap) {
track.stop();
var canvas = document.createElement('canvas');
canvas.width = bitmap.width
canvas.height = bitmap.height
const context = canvas.getContext('2d')
context.drawImage(bitmap, 0, 0, bitmap.width, bitmap.height)
const image = canvas.toDataURL()
var link = document.createElement('a');
link.download = 'FullCertificateCaptureScreen.png';
link.href = image
link.click();
})
.catch(function(error) {
track.stop();
console.log('grabFrame() error: ', error);
});
}, 1000);
}
this is the popup script
document.getElementById("btn-share").addEventListener("click", function(){
var tab = getTabId();
tab.then((value) => {
chrome.storage.local.set({'pestaña': value.id});
const port = chrome.tabs.connect(value.id, {
name: "conexion",
});
port.postMessage({ type: 'SS_UI_REQUEST', text: 'start' }, '*');
}); //fin de tab.then()
})//fin de click addEventListener
document.getElementById("btn-capture").addEventListener("click", async function(){
chrome.storage.local.get('pestaña', function (result) {
const port = chrome.tabs.connect(result.pestaña, {
name: "tomarScreenShot",
});
port.postMessage({ type: 'SS_UI_TAKESCREENSHOT', text: 'takescreenshot' }, '*');
window.close();
});
});
async function getTabId() {
let queryOptions = { active: true, currentWindow: true };
let [tab] = await chrome.tabs.query(queryOptions);
return tab;
}

Upload Recorded Screen Via Socket.io Not Working

i'm trying to make a zoom/google meet clone.. and i'm using RecordRTC to record the screen and then send it the Node Server via socket.io ...sometimes i get data , and sometimes i don't,..
however i tried to do the same code with websocket ... i didn't get any problem .. always work ... that even made me wonder more,
Please help Me figure the problem where and why ... thank you..
Server Side [Node] :
const express = require('express');
const chalk = require('chalk');
const socketio = require('socket.io')
require('dotenv').config();
const PORT = process.env.PORT || 5000;
const app = express();
app.use(express.static(__dirname + '/public'))
const server = app.listen(PORT, () => {
console.log(chalk.yellowBright.inverse.bold(`Server is Running on PORT ${PORT}`))
})
function writeToDisk(dataURL, fileName) {
var fileExtension = fileName.split('.').pop(),
fileRootNameWithBase = './uploads/' + fileName,
filePath = fileRootNameWithBase,
fileID = 2,
fileBuffer;
// #todo return the new filename to client
while (fs.existsSync(filePath)) {
filePath = fileRootNameWithBase + '(' + fileID + ').' + fileExtension;
fileID += 1;
}
dataURL = dataURL.split(',').pop();
fileBuffer = new Buffer(dataURL, 'base64');
fs.writeFileSync(filePath, fileBuffer);
console.log('filePath', filePath);
}
const io = socketio(server)
io.on('connect', (socket) => {
console.log("Client Has Been Connected")
socket.emit('messageFromServer', { text:'You Are Connected To The Server!'})
socket.on('fromClient',(data)=>{
console.log(chalk.red.bold(data))
if (data.data.video) {
console.log(chalk.red.bold("Video Found"))
writeToDisk(data.data.video.dataURL, fileName + '.webm');
}
})
})
Client Side [Javascript]
var recordButton = document.getElementById('start-recording');
var stopButton = document.getElementById('stop-recording');
var local_video = document.querySelector("#local-video")
const socketio = io('http://localhost:3000/')
console.log('Hello World')
socketio.on('connect', () => {
console.log(socketio.id)
})
function invokeGetDisplayMedia(success, error) {
var displaymediastreamconstraints = {
video: {
displaySurface: 'monitor', // monitor, window, application, browser
logicalSurface: true,
cursor: 'always' // never, always, motion
}
};
displaymediastreamconstraints = {
video: true
};
if (navigator.mediaDevices.getDisplayMedia) {
navigator.mediaDevices.getDisplayMedia(displaymediastreamconstraints).then(success).catch(error);
}
else {
navigator.getDisplayMedia(displaymediastreamconstraints).then(success).catch(error);
}
}
function captureScreen(callback) {
invokeGetDisplayMedia(function (screen) {
callback(screen);
}, function (error) {
console.error(error);
alert('Unable to capture your screen. Please check console logs.\n' + error);
});
}
function startRecording() {
captureScreen(function (stream) {
mediaStream = stream;
local_video.srcObject = stream;
var videoOnlyStream = new MediaStream();
stream.getVideoTracks().forEach(function (track) {
videoOnlyStream.addTrack(track);
});
recordVideo = RecordRTC(videoOnlyStream, {
type: 'video/webm',
canvas: {
width: 1280,
height: 720
},
mandatory: {
// chromeMediaSource: 'screen',
minWidth: 1280,
minHeight: 720,
maxWidth: 1920,
maxHeight: 1080,
minAspectRatio: 1.77
},
recorderType: !!navigator.mozGetUserMedia ? MediaStreamRecorder : WhammyRecorder
});
recordVideo.startRecording();
stopButton.disabled = false;
});
}
function stopRecording() {
recordButton.disabled = false;
stopButton.disabled = true;
// stop video recorder
recordVideo.stopRecording(function () {
recordVideo.getDataURL(function (videoDataURL) {
var files = {
video: {
type: recordVideo.getBlob().type || 'video/webm',
dataURL: videoDataURL
}
};
const data = JSON.stringify(files)
console.log(data)
socketio.emit('fromClient', { "message": "Sent from client!", "data": data });
console.log('EMIT: fromClient');
if (mediaStream) mediaStream.stop();
});
});
}
recordButton.onclick = function () {
recordButton.disabled = true;
startRecording();
}
stopButton.onclick = function () {
stopButton.disabled = true;
stopRecording();
}
HTML :
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>RealTime Record</title>
</head>
<body>
<center>
<h1>Testing Recording</h1>
</center>
<center>
<div class="record-action">
<button id="start-recording">Start Recording</button>
<button id="stop-recording" disabled>Stop Recording</button>
<button id="fromClient">From Client</button>
</div>
<video id="local-video" autoplay style="border: 1px solid rgb(15, 158, 238);"></video>
</center>
<script src="RecordRTC.js"></script>
<script src="/socket.io/socket.io.js"></script>
<script src="client.js"></script>
</body>
</html>

Is there a problem with dialog.showOpenDialog in Electron on Windows?

I'm working on an example out of a book and can't seem to get past this. When I hit Ctrl-o it shows the dialog to open a file, but it never loads in the file into the markup editor. However, if I run it using the debugger in VSCode it works fine.
I believe the problem is with this section:
dialog.showOpenDialog(window, options, paths => {
if (paths && paths.length > 0) {
const content = fs.readFileSync(paths[0]).toString();
window.webContents.send('load', content);
}
});
This is my menu.js file:
const {
app,
Menu,
shell,
ipcMain,
BrowserWindow,
globalShortcut,
dialog
} = require('electron');
const fs = require('fs');
function saveFile() {
console.log('Saving the file');
const window = BrowserWindow.getFocusedWindow();
window.webContents.send('editor-event', 'save');
}
function loadFile() {
console.log('loadFile confirmation');
const window = BrowserWindow.getFocusedWindow();
const options = {
title: 'Pick a markdown file',
filters: [
{ name: 'Markdown files', extensions: ['md'] },
{ name: 'Text files', extensions: ['txt'] }
]
};
dialog.showOpenDialog(window, options, paths => {
if (paths && paths.length > 0) {
const content = fs.readFileSync(paths[0]).toString();
window.webContents.send('load', content);
}
});
}
app.on('ready', () => {
globalShortcut.register('CommandOrControl+S', () => {
saveFile();
});
globalShortcut.register('CommandorControl+O', () => {
console.log('Ctrl-O received');
loadFile();
});
});
ipcMain.on('save', (event, arg) => {
console.log(`Saving content of the file`);
console.log(arg);
const window = BrowserWindow.getFocusedWindow();
const options = {
title: 'Save markdown file',
filters: [
{
name: 'MyFile',
extensions: ['md']
}
]
};
//Broken code from book apparently: dialog.showSaveDialog(window, options, filename => {
let filename = dialog.showSaveDialogSync(window, options);
console.log(filename);
if (filename) {
console.log(`Saving content to the file: ${filename}`);
fs.writeFileSync(filename, arg);
}
//Broken code from book apparently });
});
ipcMain.on('editor-reply', (event, arg) => {
console.log(`Receieved reply from web page: ${arg}`);
});
const template = [
{
label: 'Format',
submenu: [
{
label: 'Toggle Bold',
click() {
const window = BrowserWindow.getFocusedWindow();
window.webContents.send('editor-event',
'toggle-bold'
);
}
}
]
}
];
if (process.env.DEBUG) {
template.push({
label: 'Debugging',
submenu: [
{
label: 'Dev Tools',
role: 'toggleDevTools'
},
{type: 'separator' },
{
role: 'reload',
accelerator: 'Alt+R'
}
]
});
}
const menu = Menu.buildFromTemplate(template);
module.exports = menu;
My index.html:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta
http-equiv="Content-Security-Policy"
content="script-src 'self' 'unsafe-inline';" />
<style>
html, body {
height: 100%;
display: flex;
flex: 1;
flex-direction: column;
}
.CodeMirror {
flex: 1;
}
</style>
<title>Document</title>
<link rel="stylesheet" href="./node_modules/simplemde/dist/simplemde.min.css">
<script src="./node_modules/simplemde/dist/simplemde.min.js"></script>
</head>
<body>
<textarea id="editor"></textarea>
<script>
var editor = new SimpleMDE({
element: document.getElementById('editor')
});
const { ipcRenderer } = require('electron');
ipcRenderer.on('editor-event', (event, arg) => {
console.log(arg);
event.sender.send('editor-reply', `Received ${arg}`);
if (arg === 'toggle-bold') {
editor.toggleBold();
}
if (arg === 'save') {
event.sender.send('save', editor.value());
}
});
ipcRenderer.on('load', (event, content) => {
if (content) {
editor.value(content);
}
});
ipcRenderer.send('editor-reply', 'Page Loaded');
</script>
</body>
</html>
In recent versions of Electron, as stated in the relevant documentation: dialog.showOpenDialog () is no longer making use of a callback function, but is now returning a promise, so the .then syntax must be used instead:
function loadFile() {
console.log('loadFile confirmation');
const window = BrowserWindow.getFocusedWindow();
const options = {
title: 'Pick a markdown file',
filters: [
{ name: 'Markdown files', extensions: ['md'] },
{ name: 'Text files', extensions: ['txt'] }
]
};
dialog.showOpenDialog(window, options).then
(
result => {
if (!result.canceled)
{
let paths = result.filePaths;
if (paths && paths.length > 0) {
const content = fs.readFileSync(paths[0]).toString();
console.log (content);
// window.webContents.send('load', content);
}
}
}
);
}
loadFile();
Alternatively, you can use the dialog.showOpenDialogSync () function, which directly returns an array of file paths, or undefined if the dialog has been cancelled by the user...
TLDR: change the callback for dialog.showOpenDialog inside loadFile to:
dialog.showOpenDialog(window, options, (canceled, paths) => {
Instead of:
dialog.showOpenDialog(window, options, paths => {
Long version:
The callback for dialog.showOpenDialog passes in 3 arguments:
canceled
filePaths
And only on mac: bookmarks
You wanted the 2nd argument filePaths, although if your callback was just: paths => { expecting only one argument Electron would pass in the canceled argument because it's the first and you only said you wanted one argument.
So this means you need to pass in an argument before paths like: (canceled, paths) => {
See the docs

Getting error on video calling application build with simple-peer and react

Recently I made a video calling website using simple-peer,react.js and socket.io. Its working fine between laptop web browsers but I am getting below error on video calling from mobile web browser to laptop web browser. Can someone please advise what is causing this error and how to rectify it.
code-
function VideoComponent(props) {
//const [yourID, setYourID] = useState("");
//const [users, setUsers] = useState({});
const [stream, setStream] = useState();
const [receivingCall, setReceivingCall] = useState(props.receivingCall);
const [caller, setCaller] = useState(props.caller);
const [callerSignal, setCallerSignal] = useState(props.callerSignal);
const [callAccepted, setCallAccepted] = useState(props.callAccepted);
const [open, setOpen] = useState(false)
const [calling, setCalling] = useState(false)
const userVideo = useRef();
const partnerVideo = useRef();
const socket = props.socket
//const ENDPOINT = '/'
useEffect(() => {
if(props.useAudio && props.useVideo){
navigator.mediaDevices.getUserMedia({ video: props.useVideo, audio: props.useAudio }).then(stream => {
setStream(stream);
if (userVideo.current && props.useAudio && props.useVideo) {
userVideo.current.srcObject = stream;
}
})
}
//socket = io(ENDPOINT);
/*socket.on("hey", (data) => {
setReceivingCall(true);
setCaller(data.from);
setCallerSignal(data.signal);
})*/
}, []);
const callPeer=()=> {
setCalling(true)
if(props.selectedUser[0]['status'] !== 'online'){
setOpen(true)
}
if(props.useAudio && props.useVideo){
const peer = new Peer({
initiator: true,
trickle: false,
stream: stream,
});
peer.on("signal", data => {
socket.emit("callUser", { userToCall: props.selectedUser[0]['_id'],
signalData: data, from: props.userDetail[0]['_id']})
})
peer.on("stream", stream => {
if (partnerVideo.current) {
console.log('receiving stream from partner')
partnerVideo.current.srcObject = stream;
}
});
socket.on("callAccepted", signal => {
setCallAccepted(true);
peer.signal(signal);
})
}
}
function acceptCall() {
setCallAccepted(true);
if(props.useAudio && props.useVideo){
const peer = new Peer({
initiator: false,
trickle: false,
stream: stream,
});
peer.on("signal", data => {
socket.emit("acceptCall", { signal: data, receiverID: caller })
})
peer.on("stream", stream => {
partnerVideo.current.srcObject = stream;
});
peer.signal(callerSignal);
}
}
let UserVideo;
if (stream) {
UserVideo = (
<video className='newVideo1' playsInline muted ref={userVideo} autoPlay />
);
}
let PartnerVideo;
if (callAccepted) {
PartnerVideo = (
<video className='newVideo' playsInline ref={partnerVideo} autoPlay />
);
}
let incomingCall;
if (receivingCall && !callAccepted) {
incomingCall = (
<div className='incomingCall'>
<h1>{caller} is calling you</h1>
<Button
variant="contained"
color="secondary"
onClick={acceptCall}
className='acceptButton'
>
Accept call
</Button>
</div>
)
}
Error while calling from mobile web browser(Chrome) to laptop web browser(Chrome)
index.js:17 Uncaught Error: Connection failed.
at h (index.js:17)
at f.value (index.js:654)
at RTCPeerConnection.t._pc.onconnectionstatechange (index.js:119)
It could be related to TURN server. You need to set up one if you are planning to deploy this app.
based on your code, it looks inspired by https://github.com/coding-with-chaim/react-video-chat/blob/master/client/src/App.js
Use the same TURN settings given in his code and see if it works. It worked for me.

Screen Sharing and video/audio calling using WebRTC and Electron on Mac OS

I am trying to create an electron application which can share the desktop with the system audio using webrtc and if I set the constraints :
const constraints = {
audio: {
mandatory: {
chromeMediaSource: 'desktop'
}
},
video: {
mandatory: {
chromeMediaSource: 'desktop'
}
}
}
I got this issue Mac OS audio:
ERROR:adm_helpers.cc(73)] Failed to query stereo recording. and then " NotFoundError: Requested device not found "
You need to use electron's desktopCapturer api.
Example -
// In the renderer process.
const {desktopCapturer} = require('electron')
desktopCapturer.getSources({types: ['window', 'screen']}, (error, sources) => {
if (error) throw error
for (let i = 0; i < sources.length; ++i) {
if (sources[i].name === 'Electron') {
navigator.mediaDevices.getUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: sources[i].id,
minWidth: 1280,
maxWidth: 1280,
minHeight: 720,
maxHeight: 720
}
}
})
.then((stream) => handleStream(stream))
.catch((e) => handleError(e))
return
}
}
})
function handleStream (stream) {
const video = document.querySelector('video')
video.srcObject = stream
video.onloadedmetadata = (e) => video.play()
}
function handleError (e) {
console.log(e)
}
And use the audio flag above for getting the audio while screen sharing.
More details here - https://electronjs.org/docs/api/desktop-capturer
For MacOS users you need to get audio and video streams separately, then merge the streams like so:
const stream = await navigator.mediaDevices.getUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id
}
}
});
navigator.mediaDevices.getUserMedia({
audio: {
mandatory: {
chromeMediaSource: 'desktop'
}
},
video: false
}).then(function(audioStream) {
var audioTracks = audioStream.getAudioTracks();
// merge audio and video tracks
if(audioTracks.length > 0) {
stream.addTrack(audioTracks[0]);
}
recorder = new MediaRecorder(stream, {
mimeType: 'YOUR MIME TYPE'
});
recorder.ondataavailable = yourDataHandler;
recorder.onstop = yourStopHandler;
recorder.start();
}).catch(function(err) {
console.error('audioTrackError', err);
});

Resources