WebRTC Video Track to ffmpeg in Node - node.js

I have succesfully managed to establish a WebRTC connection between Node (server) and a browser. Server gets the video track on onTrack callback inside the RTCPeerConnection. Is there any way I can potentially convert the video track and make it work on ffmpeg so I can output it to rtmp.
Thanks in advance.

The way I have done this is to use a socket to the node server, and then use ffmpeg to convert to RTMP:
I spawn FFMPEG
var spawn = require('child_process').spawn;
spawn('ffmpeg',['-h']).on('error',function(m){
console.error("FFMpeg not found in system cli; please install ffmpeg properly or make a softlink to ./!");
process.exit(-1);
});
I make sure Im getting video from the socket, and then I pipe it into FFMPEG and out to my RTMP server:
var ops=[
'-i','-',
'-c:v', 'libx264', '-preset', 'ultrafast', '-tune', 'zerolatency', // video codec config: low latency, adaptive bitrate
'-c:a', 'aac', '-ar', audioBitrate, '-b:a', audioEncoding, // audio codec config: sampling frequency (11025, 22050, 44100), bitrate 64 kbits
//'-max_muxing_queue_size', '4000',
//'-y', //force to overwrite
//'-use_wallclock_as_timestamps', '1', // used for audio sync
//'-async', '1', // used for audio sync
//'-filter_complex', 'aresample=44100', // resample audio to 44100Hz, needed if input is not 44100
//'-strict', 'experimental',
'-bufsize', '5000',
'-f', 'flv', socket._rtmpDestination
];
}
console.log("ops", ops);
console.log(socket._rtmpDestination);
ffmpeg_process=spawn('ffmpeg', ops);
console.log("ffmpeg spawned");
you can see my code:https://github.com/dougsillars/browserLiveStream/blob/master/server.js
and a working example at livestream.a.video

Related

Reducing latency of Discord.js audio streaming

I'm implementing a Discord.js bot which streams my microphone to a voice channel using Prism Media. The problem is there's a delay of about 3 seconds from when the audio is recorded to when it is played.
The code below is how I'm currently initializing the audio player.
const { createAudioPlayer, createAudioResource, NoSubscriberBehavior, StreamType } = require('#discordjs/voice')
const prism = require('prism-media')
const player = createAudioPlayer({
behaviors: {
noSubscriber: NoSubscriberBehavior.Play,
maxMissedFrames: 250
}
})
player.play(
createAudioResource(
new prism.FFmpeg({
args: [
'-analyzeduration', '0',
'-loglevel', '0',
'-f', 'dshow',
'-i', 'audio=Microphone (Realtek(R) Audio)',
'-acodec', 'libopus',
'-f', 'opus',
'-ar', '48000',
'-ac', '2'
]
}),
{
inputType: StreamType.OggOpus
}
)
)
Since Prism Media uses FFmpeg to record audio, I started by verifying if FFmpeg by itself already shows this issue. With the command below I am able to reproduce the problem.
ffmpeg -f dshow -i "audio=Microphone (Realtek(R) Audio)" -f opus - | ffplay -
I've also tried various other flags to no effect. Such as:
-audio_buffer_size 50
-fflags nobuffer
-flags low_delay
By encoding in other file formats, I'm able to reduce the latency of the raw command, but Discord.js expects the format to be Opus.
Also, I've chosen Prism Media because it is used in the examples I found, but I'm open to changing to another audio library, as long as it is compatible with Electron.
How can I reduce latency to less than a second?

Mp4 files generated from FFMPEG can not be trimmed on quick time player

I am using FFMPEG to generate mp4 file while recording the screen on a Linux machine.
I am using Spawn with nodeJs:
const transcodeStreamToOutput = spawn('ffmpeg',[
'-hide_banner',
'-loglevel', 'error',
// disable interaction via stdin
'-nostdin',
// screen image size
'-s', `${BROWSER_SCREEN_WIDTH}x${BROWSER_SCREEN_HEIGHT}`,
// video frame rate
'-r', `${VIDEO_FRAMERATE}`,
// hides the mouse cursor from the resulting video
'-draw_mouse', '0',
// grab the x11 display as video input
'-f', 'x11grab',
'-i', `${DISPLAY}`,
// grab pulse as audio input
'-f', 'pulse',
'-ac', '2',
'-i', 'default',
// codec video with libx264
'-c:v', 'libx264',
'-pix_fmt', 'yuv420p',
'-profile:v', 'main',
'-preset', 'veryfast',
'-x264opts', 'nal-hrd=cbr:no-scenecut',
'-minrate', `${VIDEO_BITRATE}`,
'-maxrate', `${VIDEO_BITRATE}`,
'-g', `${VIDEO_GOP}`,
// apply a fixed delay to the audio stream in order to synchronize it with the video stream
'-filter_complex', 'adelay=delays=1000|1000',
// codec audio with aac
'-c:a', 'aac',
'-b:a', `${AUDIO_BITRATE}`,
'-ac', `${AUDIO_CHANNELS}`,
'-ar', `${AUDIO_SAMPLERATE}`,
// adjust fragmentation to prevent seeking(resolve issue: muxer does not support non seekable output)
'-movflags', 'frag_keyframe+empty_moov',
// set output format to mp4 and output file to stdout
'-f', 'mp4', '-'
]
);
The file opens normally on Mac using quick time player but my problem is that it is not editable.
The trim option is disabled on QuickTime player.

How can i increase my fps output ? (ffmpeg, nodejs)

I'm having quite the trouble understanding how fps output works.
I have a video workflow through node and ffmpeg that transform picture into scrolling videos, here is the command :
const ffmpeg = spawn('ffmpeg', ['-f', 'lavfi', '-i', 'color=s=1280x720', '-loop', '1', '-i', `${path}/${video.name}`, '-filter_complex', `[1:v]scale=1280:-2,format=yuv420p,fps=fps=60[fg]; [0:v][fg]overlay=y=-\'t*h*0.02\'[v]`, '-map', '[v]', '-t', `${clipDuration}`, `./${path}/${video.name}-wip.mp4`])
ffmpeg.stderr.on('data', (data) => {
console.log(`${data}`);
});
ffmpeg.on('close', (code) => {
const ffmpeg2 = spawn('ffmpeg', ['-i', `./${path}/${video.name}-wip.mp4`, '-vf', `tpad=stop_mode=clone:stop_duration=3,fade=type=in:duration=1,fade=type=out:duration=1:start_time=${clipDuration + 2}`, `./${path}/${video.name}.mp4`])
ffmpeg2.stderr.on('data', (data) => {
console.log(`${data}`);
});
ffmpeg2.on('close', (code) => {
resolve();
});
})
First ffmpeg command create a scrolling video from picture,
second ffmpeg command add a fade out transition and a pause to this video.
FPS output for this is 25. How can i increase it to 60 so that scrolling isn't stuttering anymore ?
Thanks for your time.
try this
const ffmpeg2 = spawn('ffmpeg', ['-i', `./${path}/${video.name}-wip.mp4`, '-vf', `framerate=fps=60,tpad=stop_mode=clone:stop_duration=3,fade=type=in:duration=1,fade=type=out:duration=1:start_time=${clipDuration + 2}`, `./${path}/${video.name}.mp4`])
Note this from https://superuser.com/questions/1265642/ffmpeg-slideshow-with-crossfade:
ffmpeg -i temp.mp4 -vf "framerate=fps=60" -codec:v mpeg4 out.mp4
In commmand , Use it
ffmpeg -i main.mp4 -vf "framerate=fps=60" -codec:v mpeg4 out.mp4

Ffmpeg : Bitstream not supported by this decoder

I'm trying to create a livestream web app using NodeJS. The code I currently have emits a raw (webm format) binary stream from the webcam on the client using socket IO and the node server receives this raw data. Using fluent-ffmpeg, I want to encode this binary stream into mpegts and send it to an RTMP server in real time, without creating any intermediary files. However when trying to convert the blobs in ffmpeg I get the following error :
Error while decoding stream #0:1: Invalid data found when processing input
[NULL # 000001b15e67bd80] Invalid sync code 61f192.
[libvpx # 000001b15e6c5000] Failed to decode frame: Bitstream not supported by this decoder
My relevant frontend client code :
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
video_show(stream);//only show locally, not remotely
socket.emit('config_rtmpDestination',url);
socket.emit('start','start');
var options = {mimeType: 'video/webm;codecs=h264'};
mediaRecorder = new MediaRecorder(stream, options);
mediaRecorder.start(2000);
mediaRecorder.onstop = function(e) {
stream.stop();
}
mediaRecorder.ondataavailable = function(e) {
//var arraybuffer = Uint8Array.from(e.data).buffer;
socket.emit("binarystream", blob);
//chunks.push(e.data);
}
}).catch(function(err) {
console.log('The following error occured: ' + err);
show_output('Local getUserMedia ERROR:'+err);
});
Relevant NodeJS server code :
socket.on('binarystream',function(m){
feedStream(m);
});
socket.on('start',function(m){
...
var ops=[
'-vcodec', socket._vcodec,'-i','-',
'-c:v', 'libx264', '-preset', 'veryfast', '-tune', 'zerolatency',
'-an', '-bufsize', '1000',
'-f', 'mpegts', socket._rtmpDestination
];
ffmpeg_process=spawn('ffmpeg', ops);
feedStream=function(data){
ffmpeg_process.stdin.write(data);
}
...
}
For anyone who is bumping to this issue.. try replacing libvpx with libvpx-vp9 or to the more advanced version of libvpx

How do I close a Node.js FFMPEG child process that is actively streaming from a live capture source?

I'm new to Node.js and have figured out how to utilize child.spawn to launch an instance of FFMPEG that is being used to capture live video and send it over to Adobe Media Server via rtmp.
Every example I've seen of FFMPEG being used in conjunction with Node.js has been with a time limited sample, so the child process closes once FFMPEG reaches the end of the file it is converting.
In this case, there is no "end of file".
If I instantiate:
var ffmpeg = child.spawn('ffmpeg.exe', [args]);
it creates the live feed.
I have tried immediately shutting the child process down with a:
setTimeout(function() {
ffmpeg.stdin.resume();
ffmpeg.stdin.write('insert command to echo q to close FFMPEG');
ffmpeg.stdin.end();
});
However, that does not seem to work. I continue to see my rtmp feed on my test box.
Is there any way to pass FFMPEG a shut down command via stdin in Node.js?
Thanks in advance!
Rick
The following code is loaded by my main app.js as a module using the child_process.fork() method:
var spawn = require('child_process').spawn;
var ffmpeg = spawn('C:\\Program Files (x86)\\ffmpeg\\bin\\ffmpeg.exe', ['-y', '-threads', '-0', '-re', '-rtbufsize', '204800000', '-probesize', '4096', '-vsync', '2', '-async', '30', '-f', 'dshow', '-s', '320x240', '-i', 'video=Integrated Webcam:audio=Microphone Array (IDT High Defi', '-c:a', 'libvo_aacenc', '-ab', '48000', '-ar', '22050', '-ac', '2', '-c:v', 'libx264', '-s', '400x300', '-g', '96', '-x264opts', 'bitrate=1200', '-preset', 'ultrafast', '-profile:v', 'baseline', '-pix_fmt', 'yuv420p', '-aspect', '4:3', '-f', 'flv', 'rtmp://server']);
setTimeout(function() {
ffmpeg.stderr.on('data', function() {
ffmpeg.stdin.setEncoding('utf8');
ffmpeg.stdin.write('q');
process.exit();
});
}, 10000);
It was far less complicated than I was making it. The main app.js is a basic HTML page that is served up and uses socket.io to receive an event and its corresponding data. In this case, a 'true' event loads the module.js file which launches a live capture session of FFMPEG, feeds it into a RTMP server, and gracefully shuts down FFMPEG on a timeout of 10 seconds.
My next task is to shut it down via an event triggered from a web interface as opposed to the current testing method of a timeout.
Looking at the task manager in Windows, the FFMPEG process closes as does the secondary node process.
The reason for this is that none of the node-ffmpeg modules that I found supported live streaming via capture input. They appear to be primarily for transcoding existing content. The final outcome of this will ideally be a web based interface that can start and stop FFMPEG. Our use case will be replacing Adobe Flash Media Live Encoder as the source for our Adobe Media Server due to its inability to save standard mp4 files.
You can simply kill it.
ffmpeg.kill(SIGHUB)
or any other kill signal you wish, see http://en.wikipedia.org/wiki/Unix_signal
If I understand your example correctly, you pass all args of the node process to ffmpeg including the stream. In order to get your ffmeg.end() to work you would have to stream directly from your node process. I think that ffmpeg does not stop when it continuously receives data from your camera.
What follows is more or less a final solution to the problem of launching and closing a live FFMPEG session using Node.js:
var spawn = require('child_process').spawn
, fs = require('fs');
function ffmpeg(cmd, opts, callback) {
var p;
//console.log(callback());
if(p == undefined) {
var p = spawn(cmd, opts);
p.stderr.on('data', function(data) {
/*p.stdin.setEncoding('utf8');
p.stdin.write('q');
process.exit()
*/
fs.readFile(__dirname + '/server-state.json', function(error, data) {
if(error) {
console.log(error);
} else {
content = JSON.parse(data);
console.log(content['State']);
if(content['State'] == 'false') {
p.stdin.setEncoding('utf8');
p.stdin.write('q');
process.exit()
}
}
});
});
return p;
}
}
ffmpeg_var = ffmpeg('C:\\Program Files (x86)\\ffmpeg\\bin\\ffmpeg.exe', ['-y', '-threads', '-0', '-re', '-rtbufsize', '204800000', '-probesize', '4096', '-vsync', '2', '-async', '30', '-f', 'dshow', '-s', '320x240', '-i', 'video=Integrated Webcam:audio=Microphone Array (IDT High Defi', '-c:a', 'libvo_aacenc', '-ab', '48000', '-ar', '22050', '-ac', '2', '-c:v', 'libx264', '-s', '400x300', '-g', '96', '-x264opts', 'bitrate=1200', '-preset', 'ultrafast', '-profile:v', 'baseline', '-pix_fmt', 'yuv420p', '-aspect', '4:3', '-f', 'mp4', __dirname + '/IntegrityTest.mp4'], function() {
});
This code is encapsulated in a "module.js" file that is instantiated via a child_process.fork() in the root application.js file. It reads a text file where the 'state' is stored. This state is toggled via a write/read method in the root application. In the on('data') event, it reads the file and if it detects that the state has changed to false, it then shuts down FFMPEG by writing the 'q' command to the stdin.
Outside of refining it by using a database if I were to implement it on a larger scale, I am more than open to feedback regarding a more elegant way to code this.
you need to send EOF after the file is done streaming to ffmpeg. then ffmpeg will finish and shutdown correctly

Resources