Get camera stream on embedded system - node.js

I have an embedded system with camera and gstreamer and I-m trying to get the stream of my camera. I have a web application built with aurelia and electron.
I tried with mediaDevices.getUserMedia but I get a NotFoundError, but usinge enumerateDevices I get the devices I need.
Can be a problem that the getUserMedia doesn-t work properly with Gstreamer? If I run the same project on my pc it works perfectly.
Here it is my HTML:
<video ref="videoPlayer" hide.bind="screenSharing" id="videoPlayer" autoplay muted></video>
And this is my js:
let j = 0;
navigator.mediaDevices.enumerateDevices()
.then((deviceInfos) => {
for (var i = 0; i !== deviceInfos.length; ++i) {
console.log(deviceInfos[i]);
if (deviceInfos[i].kind === 'videoinput') {
this.deviceInfo[j] = deviceInfos[i];
j++;
}
}
if (this.deviceInfo.length > 1) {
console.log(this.deviceInfo.length);
this.constraints = {
audio: true,
video: {
deviceId: { exact: this.deviceInfo[1].deviceId }
}
};
}
else {
console.log("Only one camera");
this.constraints = {
video: {
deviceId: { exact: this.deviceInfo[0].deviceId }
},
audio: true
};
console.log(this.constraints);
}
})
.then(() => {
navigator.mediaDevices.getUserMedia(this.constraints)
.then((stream) => {
console.log('Got mic+video stream', stream);
this.localStream = stream;
this.videoPlayer.srcObject = this.localStream;
})
.catch((err) => {
console.error(err);
});
})
}
I've seen on internet there some packages like livecam but no idea on how to use it.
I attach the output of mediaDevices.enumerateDevices:
console.log(navigator.mediaDevices.enumerateDevices())
VM149:1 Promise {[[PromiseStatus]]: "pending", [[PromiseValue]]: undefined}__proto__: Promise[[PromiseStatus]]: "resolved"[[PromiseValue]]:
Array(5)0: MediaDeviceInfodeviceId: "default"groupId: "6dbae3b74e14f5e239133b5feea86e5ae7a9741a3e3fd21a86eab9273fe135aa"kind: "audioinput"label: "Default"__proto__:
MediaDeviceInfo1: MediaDeviceInfodeviceId: "d415346fe3db142f8daa611ad3dedb298b5d94b70f4221c38e7e6582f45c3008"groupId: "8d82cc2495eebb4c40bb77a5e0287d4b365ac1de8205684eae39cb605a703f11"kind: "audioinput"label: "Built-in Audio Stereo"__proto__:
MediaDeviceInfo2: MediaDeviceInfodeviceId: "82378e03eff67ac471305e50ac95e629ebf441c1ab1819d6a36aca137e37e89d"groupId: ""kind: "videoinput"label: ""__proto__: MediaDeviceInfodeviceId: (...)groupId: (...)kind: (...)label: (...)toJSON: function toJSON()constructor: function MediaDeviceInfo()Symbol(Symbol.toStringTag): "MediaDeviceInfo"get deviceId: function ()get groupId: function ()get kind: function ()get label: function ()__proto__:
Object3: MediaDeviceInfodeviceId: "default"groupId: "default"kind: "audiooutput"label: "Default"__proto__:
MediaDeviceInfo4: MediaDeviceInfodeviceId: "31a7efff94b610d3fce02b21a319cc43e2541d56d98b4138b6e3fe854b0df38c"groupId: "391b1de381c11ab437d507abc0543f288dd29d999717dbb0e949c006ef120935"kind: "audiooutput"label: "Built-in Audio Stereo"__proto__:
MediaDeviceInfolength: 5__proto__: Array(0)
undefined

Related

getUserMedia notallowederror : permission denied

Um...
when i play 'getUserMedia' on my phone, i got an error
from alert(e)) notallowederror : permission denied
What should i do?
it is a part of the index.js code(where the video & chating happens)...
async function getMedia(deviceId) {
const initialConstraints = {
audio: true,
video: { facingMode: 'user' },
};
const cameraConstraints = {
audio: true,
video: { deviceId: { exact: deviceId } },
};
try {
myStream = await navigator.mediaDevices.getUserMedia(
deviceId ? cameraConstraints : initialConstraints
);
myFace.srcObject = myStream;
if (!deviceId) {
await getCameras();
}
} catch (e) {
console.log(e);
alert(e);
}
}
I'm using rails app(web), and react-native(webview app), node.js(for realtime chating and video call like zoom by socket.io webRTC)
I ran it on the webview-app(hybrid), and that doesn't working... (working weeeell in browser but..)
so i googling .. add the options..
video autoplay="" webkit-playsinline="webkit-playsinline" playsinline="playsinline" muted="true" id="myFace" width="350" height="400"
and video tag's parent is iframe, and it has attributes allow="camera;microphone;autoplay" and so on...
Also added the expo webview options like,,,
return useWebkit
allowInlineMediaPlayback={true}
mediaPlaybackRequiresUserAction={false}
javaScriptEnabled={true}
javaScriptEnabledAndroid
geolocationEnabled={true}
im so beginner.. can you help me out?? THanks!!!
If you get this error in Localhost or any site, you have to go to settings and search for microphone or webcam and enable/disable it manually,

Basic broadcast example using agora.io not working

I'm trying to get agora.io's audio broadcasting working on a webpage, following this example.
Everything works, if I also broadcast video. If I broadcast audio only, nothing shows, nothing is heard, but no errors are showing in the console.
Here's my HTML:
<div class="video-grid" id="video">
<div class="video-view">
<div id="local_stream" class="video-placeholder"></div>
<div id="local_video_info" class="video-profile hide"></div>
<div id="video_autoplay_local" class="autoplay-fallback hide"></div>
</div>
</div>
Here's my Agora code:
var rtc = {
client: null,
joined: false,
published: false,
localStream: null,
remoteStreams: [],
params: {}
};
// Options for joining a channel
//Self-generated token
var option = {
appID: "{{myappid}}",
channel: "event-2123",
uid: "1",
token: "{{mytoken}}"
}
rtc.client = AgoraRTC.createClient({mode: "live", codec: "h264"});
// Initialize the client
rtc.client.init(option.appID, function () {
console.log("init success");
// Join a channel
rtc.client.join(option.token ? option.token : null, option.channel, option.uid ? +option.uid : null, function (uid) {
console.log("join channel: " + option.channel + " success, uid: " + uid);
rtc.params.uid = uid;
rtc.client.setClientRole("host");
rtc.localStream = AgoraRTC.createStream({
streamID: rtc.params.uid,
audio: true,
video: true,
screen: false,
})
// Initialize the local stream
rtc.localStream.init(function () {
console.log("--------");
console.log("init local stream success");
// play stream with html element id "local_stream"
rtc.localStream.play("local_stream");
// Publish the local stream
rtc.client.publish(rtc.localStream, function (err) {
console.log("publish failed");
console.error(err);
})
}, function (err) {
console.error("init local stream failed ", err);
});
}, function(err) {
console.error("client join failed", err)
})
}, (err) => {
console.error(err);
});
This works (but not, it seems, on Safari). But If I change the stream parameters to this, nothing works:
rtc.localStream = AgoraRTC.createStream({
streamID: rtc.params.uid,
audio: true,
video: false,
screen: false,
})
I've noticed that, in some browsers, the video is muted by default. So, if no interface elements are showing, and muting is 'on', perhaps this is the source of the problem?
How to make this work?

How to set up node socket.io

I'm attempting to complete this tutorial and I'm getting this error in the browser console.
index.js:16 Uncaught TypeError: this.io.on is not a function
at index.js:16
This is the full contents of my index.js.
navigator.getUserMedia(
{ video: true, audio: true },
stream => {
const localVideo = document.getElementById("local-video");
if (localVideo) {
localVideo.srcObject = stream;
}
},
error => {
console.warn(error.message);
}
);
this.io.on("connection", socket => {
const existingSocket = this.activeSockets.find(
existingSocket => existingSocket === socket.id
);
if (!existingSocket) {
this.activeSockets.push(socket.id);
socket.emit("update-user-list", {
users: this.activeSockets.filter(
existingSocket => existingSocket !== socket.id
)
});
socket.broadcast.emit("update-user-list", {
users: [socket.id]
});
}
})
What am I missing? I know that 'this' should refer to some enclosing object but what?
Checkout tutorial files list
The part you are referrring to is in socket-connection.ts file, not index.js as author refers.
Or it may be a typescript abrakadabra :)
P.S. and you should also remember that "this" construct is always inside a function (but not in arrow function).

Expo-Audio is not working on iOS. Its stuck on recording button itself

github.com/expo/audio-recording-example
You can check it out about code.
I am using audio from expo-av.
It works fine with android devices even on emulator. On android device, it first asks for audio permission then started to record audio on click the on stop, it provides playback audio.
But testing on iOS, it does not asking for permission too, directly shows the audio recording page, and clicking on record button , recording doesn't started.
I can't understand whether its problem with iOS audio permission or syntax of audio.recording.
I've tried to set permission manually true
this.recordingSettings = JSON.parse(JSON.stringify(Audio.RECORDING_OPTIONS_PRESET_LOW_QUALITY));
// // UNCOMMENT THIS TO TEST maxFileSize:
// this.recordingSettings.android['maxFileSize'] = 12000;
}
_askForPermissions = async () => {
const response = await Permissions.askAsync(Permissions.AUDIO_RECORDING);
this.setState({
haveRecordingPermissions: response.status === 'granted',
});
};
async _stopPlaybackAndBeginRecording() {
this.setState({
isLoading: true,
});
if (this.sound !== null) {
await this.sound.unloadAsync();
this.sound.setOnPlaybackStatusUpdate(null);
this.sound = null;
}
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
interruptionModeIOS: Audio.INTERRUPTION_MODE_IOS_DO_NOT_MIX,
playsInSilentModeIOS: true,
shouldDuckAndroid: true,
interruptionModeAndroid: Audio.INTERRUPTION_MODE_ANDROID_DO_NOT_MIX,
playThroughEarpieceAndroid: false,
staysActiveInBackground: true,
});
if (this.recording !== null) {
this.recording.setOnRecordingStatusUpdate(null);
this.recording = null;
}
const recording = new Audio.Recording();
await recording.prepareToRecordAsync(this.recordingSettings);
recording.setOnRecordingStatusUpdate(this._updateScreenForRecordingStatus);
this.recording = recording;
await this.recording.startAsync(); // Will call this._updateScreenForRecordingStatus to update the screen.
this.setState({
isLoading: false,
});
}
_onRecordPressed = () => {
if (this.state.isRecording) {
this._stopRecordingAndEnablePlayback();
} else {
this._stopPlaybackAndBeginRecording();
}
};
I expect audio recording on iOS but gets stuck on isrecording.
I change my recording settings and everything is goof on both devices android and iOS.
My updated Settings.
this.recordingSettings = JSON.parse(JSON.stringify(Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY: RecordingOptions = {
android: {
extension: '.m4a',
outputFormat: Audio.RECORDING_OPTION_ANDROID_OUTPUT_FORMAT_MPEG_4,
audioEncoder: Audio.RECORDING_OPTION_ANDROID_AUDIO_ENCODER_AAC,
sampleRate: 44100,
numberOfChannels: 2,
bitRate: 128000,
},
ios: {
extension: '.m4a',
outputFormat: Audio.RECORDING_OPTION_IOS_OUTPUT_FORMAT_MPEG4AAC,
audioQuality: Audio.RECORDING_OPTION_IOS_AUDIO_QUALITY_MIN,
sampleRate: 44100,
numberOfChannels: 2,
bitRate: 128000,
linearPCMBitDepth: 16,
linearPCMIsBigEndian: false,
linearPCMIsFloat: false,
},
}));
This used this setting according to my requirement you can used another setting options too.. But just keep Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY same.

CPU usage gets higher as more WebRTC peers are added?

I'm streaming video/audio from a server with an electron app to get the desktop. When one user is connected CPU usage on both cores is 30-50%. As more users join the usage gets higher, when there were ~6 users it was a constant 100% on both cores and video quality becomes laggy and poor.
It's like it's encoding the video for each user that joins? How can I make it encode once and send that stream to everyone? That's my only guess as to why cpu usage would get so much higher anyway, maybe I'm wrong about why. Thank you for any help you can give! I'm open to other ways of doing this as well, as only the server needs to send video out.
Getting the video and audio:
function getAudio(audioID){
navigator.mediaDevices.getUserMedia( { video: false, audio: {deviceId: {exact: audioID},
autoGainControl: false, channelCount: 2, echoCancellation: false, noiseSuppression: false, sampleRate: 44100, sampleSize: 16 } } )
.then(function(stream) {
console.log("audio got??");
var audio = stream.getAudioTracks()[0];
mediaStream.addTrack(audio);
})
.catch(function(err) {
console.log(err.message);
});
}
desktopCapturer.getSources({ types: ['screen'] })
.then(async sources => {
console.log(sources);
let constraints2 = {
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
maxWidth: 1280,
maxHeight: 720
}
}
}
let constraints3 = {
frameRate: {max: 24}
}
navigator.mediaDevices.getUserMedia(constraints2)
.then(function(stream){
mediaStream = stream;
let track = stream.getVideoTracks()[0];
track.applyConstraints(constraints3);
setTimeout(function(){
getAudio(audioID, 0);
}, 2000);
})
.catch(console.error);
})
.catch(console.error);
Calling the peers that join:
peer = new Peer(myPeerID, {host: 'selfhostedpeerjs.com', port: 9000, path: '/', key: 'peerjs', config: {'iceServers': [{ url: 'stun:stun.l.google.com:19302' },
{url:'stun:stun1.l.google.com:19302'},
{url:'stun:stun2.l.google.com:19302'},
{url:'stun:stun3.l.google.com:19302'},
{url:'stun:stun4.l.google.com:19302'}
]}
});
peer.on('open', function(id){
console.log("My peer id is: " + id);
});
peer.on('connection', function(conn)
{
conn.on('open', function(){
console.log("connection opened");
var id = conn.peer;
//conn.send('Hello!');
console.log("Trying to call now");
var call = peer.call(id, mediaStream);
call.on('error', function(err){
console.log('calling error');
console.log(err);
})
});

Resources