How do I queue file for ffmpeg on Node.js? - node.js

I need to encode/convert many videos on my (linux) server. And FFMPEG is resource intensive.
I am running Node.js and using fluent-ffmpeg
const Ffmpeg = require("fluent-ffmpeg");
videos.forEach(video=> {
covertVideo(filename, (compressionResult)=>{
//do something here
});
});
function covertVideo(filename, callback){
var ffmpeg = new FfmpegCommand(filename);
ffmpeg.setFfmpegPath(pathToFfmpeg);
ffmpeg.addOptions("-threads 1")
.fps(23)
.complexFilter([
"scale=w='if(gt(a,0.75),240,trunc(320*a/2)*2)':h='if(lt(a,0.75),320,trunc(240/a/2)*2)',pad=w=240:h=320:x='if(gt(a,0.75),0,(240-iw)/2)':y='if(lt(a,0.75),0,(320-ih)/2)':color=black[scaled]"
])
.on('error', function(err) {
console.log('An error occurred: ' + err.message);
return callback(false);
})
.on('end', function() {
console.log('Compression finished !');
return callback(true);
})
.save("./tmp/"+filename);
}
So, I am using foreach loop to iterate through the file to be encoded. At each loop this function above is called.
The problem here is that the foreach enters the second loop and start encoding when the first is still being encoded by FFMPEG thereby showing "Resource temporarily unavailable error".
Is there a way to queue files sent to FFMPEG so that next file is encoded when the previous is done and not concurrently.
OR
Is there a way for the Nodejs foreach to wait for ffmpeg to finish the first video before proceeding to the next loop.

Related

electron fluent-ffmpeg mergeToFile() command promise not triggering

I am trying to use fluent-ffmpeg with my electron app to concatenate multiple audio files together with an image in a video. So if i have three files:
song1.mp3 1:00
song2.mp3 0:30
song3.mp3 2:00
front.jpg
I could create output.mp4 which would be 3:30 seconds long, and play each file one after the other in order. With front.jpg set as the background image.
I am trying to create the concatenated audio file first for this video, then I can just render a vid with two inputs; image and the 3:30second long concatenated audio file. But I'm having difficulty getting my electron app to wait for the ffmpeg job to run and complete.
I know how to do all of these ffmpeg jobs on the command-line, but I've been following this guide for how to package ffmpeg into an electron app that can run on mac/win10/linux environments. I'm developing it on win10 right now.
gur.com/LtykP.png
I have a button:
<button onClick='fullAlbum("upload-${uploadNumber}")'>FULLALBUM</button>
that when I click runs the fullAlbum() function that calls combineMp3FilesOrig to run the actual ffmpeg job:
async function fullAlbum(uploadName) {
//document.getElementById("buttonId").disabled = true;
//get table
var table = $(`#upload_${uploadNumber}_table`).DataTable()
//get all selected rows
var selectedRows = table.rows( '.selected' ).data()
//get outputFile location
var path = require('path');
var outputDir = path.dirname(selectedRows[0].audioFilepath)
//create outputfile
var timestamp = new Date().getUTCMilliseconds();
let outputFilepath = `${outputDir}/output-${timestamp}.mp3`
console.log('fullAlbum() button pressed: ', timestamp)
await combineMp3FilesOrig(selectedRows, outputFilepath, '320k', timestamp);
//document.getElementById("buttonId").disabled = false;
console.log(`fullAlbum() /output-${timestamp}.mp3 should be created now`)
}
function combineMp3FilesOrig(selectedRows, outputFilepath, bitrate, timestamp) {
console.log(`combineMp3FilesOrig(): ${outputFilepath}`)
//begin get ffmpeg info
const ffmpeg = require('fluent-ffmpeg');
//Get the paths to the packaged versions of the binaries we want to use
const ffmpegPath = require('ffmpeg-static').replace('app.asar','app.asar.unpacked');
const ffprobePath = require('ffprobe-static').path.replace('app.asar','app.asar.unpacked');
//tell the ffmpeg package where it can find the needed binaries.
ffmpeg.setFfmpegPath(ffmpegPath);
ffmpeg.setFfprobePath(ffprobePath);
//end set ffmpeg info
//create ffmpeg command
console.log(`combineMp3FilesOrig(): create command`)
const command = ffmpeg();
//set command inputs
command.input('C:\\Users\\marti\\Documents\\martinradio\\uploads\\CharlyBoyUTurn\\5. Akula (Club Mix).flac')
command.input('C:\\Users\\marti\\Documents\\martinradio\\uploads\\CharlyBoyUTurn\\4. Civilian Barracks.flac')
return new Promise((resolve, reject) => {
console.log(`combineMp3FilesOrig(): command status logging`)
command.on('progress', function(progress) {
console.info(`Processing : ${progress.percent} % done`);
})
.on('codecData', function(data) {
console.log('codecData=',data);
})
.on('end', function() {
console.log('file has been converted succesfully; resolve() promise');
resolve();
})
.on('error', function(err) {
console.log('an error happened: ' + err.message, ', reject()');
reject(err);
})
console.log(`combineMp3FilesOrig(): add audio bitrate to command`)
command.audioBitrate(bitrate)
console.log(`combineMp3FilesOrig(): tell command to merge inputs to single file`)
command.mergeToFile(outputFilepath);
console.log(`combineMp3FilesOrig(): end of promise`)
});
console.log(`combineMp3FilesOrig(): end of function`)
}
When I click my button once, my console.logs show the promise is entered, the command is created, but the function just ends without waiting for a resolve();
Waiting a couple minutes doesnt change anything.
If I press the button again:
A new command gets created, reaches the end of the promise, but this time actually starts, and triggers the previous command to start. Both jobs then run and their files are rendered at the correct length (12:08) and the correct quality (320k)
Is there something with my promise I need to fix involving async functions and promises in an electron app? I tried editing my ffmpeg command to include
command.run()
At the end of my promise to ensure it gets triggered; but that leads to an err in console saying Uncaught (in promise) Error: No output specified because apparently in fluent-ffmpeg command.mergeToFile(outputFilepath); isnt good enough and I need to include .output(outputFilepath) as well. If I change command.run() to command.output(outputFilepath).run(), when i click my button, the ffmpeg job gets triggered and rendered perfectly fine. EXCEPT THAT THE FILE IS ALWAYS 128kbps
So I'm trying to figure out why my included code block, my ffmpeg command doesn't run the first time when its created.
I've played about with this and I'm seeing the same issue with your original code, the file is being output with 128k bitrate.
This code seems to work, though the max bitrate I'm getting is 320k (I presume this is a limitation of the codec).
After testing again I think I'm getting the same behaviour as you are, in that the file takes some time to generate. If I return a Promise from the combineMp3FilesOrig function, then await in the click handler. I disable the button until the call is complete, obviously your button id will be different.
function combineMp3FilesOrig(selectedRows, outputFile, bitrate) {
const command = ffmpeg();
var count = selectedRows.length;
for(var i = 0; i < count; i++){
command.input(selectedRows[i].audioFilepath)
}
return new Promise((resolve, reject) => {
command.on('progress', function(progress) {
console.info(`Processing : ${progress.percent} % done`);
})
.on('codecData', function(data) {
console.log('codecData=',data);
})
.on('end', function() {
console.log('file has been converted succesfully');
resolve();
})
.on('error', function(err) {
console.log('an error happened: ' + err.message);
reject(err);
}).audioBitrate(bitrate).mergeToFile(outputFile);
});
}
async function convertFiles() {
document.getElementById("buttonId").disabled = true;
await combineMp3FilesOrig(selectedRows, 'output.mp3', '320k');
document.getElementById("buttonId").disabled = false;
}

fluent-ffmpeg get codec data without specifying output

I am using fluent-ffmpeg node module for getting codec data from a file.
It works if I give an output but I was wondering if there is any option to run fluent-ffmpeg without giving to it an output.
This is what I am doing:
readStream.end(new Buffer(file.buffer));
var process = new ffmpeg(readStream);
process.on('start', function() {
console.log('Spawned ffmpeg');
}).on('codecData', function(data) {
//get recording duration
const duration = data.duration;
console.log(duration)
}).save('temp.flac');
As you can see I am saving the file to temp.flac so I can get the seconds duration of that file.
If you don't want to save the ffmpeg process result to a file, one thing that comes to mind is to redirect the command output to /dev/null.
In fact, as the owner of the fluent-ffmpeg repository said in one comment, there is no need to specify a real file name for the destination when using null format.
So, for example, something like that will work:
let process = new ffmpeg(readStream);
process
.addOption('-f', 'null') // set format to null
.on('start', function() {
console.log('Spawned ffmpeg');
})
.on('codecData', function(data) {
//get recording duration
let duration = data.duration;
console.log(duration)
})
.output('nowhere') // or '/dev/null' or something else
.run()
It remains a bit hacky, but we must set an output to avoid the "No output specified" error.
When no stream argument is present, the pipe() method returns a PassThrough stream, which you can pipe to somewhere else (or just listen to events on).
var command = ffmpeg('/path/to/file.avi')
.videoCodec('libx264')
.audioCodec('libmp3lame')
.size('320x240')
.on('error', function(err) {
console.log('An error occurred: ' + err.message);
})
.on('end', function() {
console.log('Processing finished !');
});
var ffstream = command.pipe();
ffstream.on('data', function(chunk) {
console.log('ffmpeg just wrote ' + chunk.length + ' bytes');
});

Cut multiple parts from a video and merge them together

I am trying to cut specific parts out from a video and then merge all those parts into a single video file using nodejs and ffmpeg.
Here's my code and currently I can cut only one part out of the video from .setStartTime to .setDuration and that part is being saved.
var ffmpeg = require('fluent-ffmpeg');
var command = ffmpeg()
.input('./videos/placeholder-video.mp4')
.setStartTime('00:00:03')
.setDuration('02')
.output('./videos/test.mp4')
.on('start', function(commandLine) {
console.log('Started: ' + commandLine);
})
.on('end', function(err) {
if(!err)
{
console.log('conversion Done');
}
})
.on('error', function(err){
console.log('error: ', +err);
}).run();
How do I cut out multiple parts out from the video and merge them in a single video file. I know about .mergeToFile method but how do I use it after cutting different parts from my video.
I tried using .setStartTime and .setDuration twice like below but the first one's are being ignored.
.input('./videos/placeholder-video.mp4')
.setStartTime('00:00:03')
.setDuration('02')
.setStartTime('00:00:15')
.setDuration('05')
.output('./videos/test.mp4')
PLEASE READ UPDATE
Could not test it yet, but try following:
Set the input video two times, like this:
.input('./videos/placeholder-video.mp4')
.setStartTime('00:00:03')
.setDuration('02')
.input('./videos/placeholder-video.mp4')
.setStartTime('00:00:15')
.setDuration('05')
Then you can merge the multiple inputs with .mergeToFile.
UPDATE
My answer cannot work due to restriction of .setDuration. It is an output option, so it defines how long transcoding to the output file is done: https://github.com/fluent-ffmpeg/node-fluent-ffmpeg
It is not used to define the length/duration of the input.
Another option would be .loop, but apparently it is not supported for this purpose: https://video.stackexchange.com/questions/12905/repeat-loop-input-video-with-ffmpeg/12906#12906
If you really want to use nodejs, you need multiple commands
cut the input video to temporary files, one for each cut
merge the temporary files to one output file
Something like this:
var command1 = ffmpeg()
.input('./small.mp4')
.seekInput('00:00:02')
.withDuration(1)
.output('./temp1.mp4')
.run();
var command2 = ffmpeg()
.input('./small.mp4')
.seekInput('00:00:01')
.withDuration(3)
.output('./temp2.mp4')
.run();
var command2 = ffmpeg()
.input('./temp1.mp4')
.input('./temp2.mp4')
.mergeToFile('./merge.mp4', './temp');
Big problem: your nodejs script will not start if temporary files are not present.
So it would be much more easier to use a bash or batch script.
This is how I would solve it:
(async function() {
const ffmpeg = require('fluent-ffmpeg')
function ffmpegPromise(fn) {
return new Promise((res, rej) => {
fn.on('error', (e) => rej(e))
.on('end', () => res('finished!'))
.run()
})
}
const file1 = ffmpeg().input('./videos/placeholder-video.mp4')
.setStartTime('00:00:03')
.setDuration(2)
.output('./videos/test1.mp4')
const file2 = ffmpeg().input('./videos/placeholder-video.mp4')
.setStartTime('00:00:10')
.setDuration(2)
.output('./videos/test2.mp4')
const files = await Promise.all([ffmpegPromise(file1), ffmpegPromise(file2)])
console.log(files) // ['finished!', 'finished!']
ffmpeg()
.mergeAdd('./videos/test1.mp4')
.mergeAdd('./videos/test2.mp4')
.mergeToFile('./videos/mergedFile.mp4')
.on('end', () => {
fs.unlink('./videos/test1.mp4', err => {
if (err) console.error(err)
})
fs.unlink('./videos/test2.mp4', err => {
if (err) console.error(err)
})
})
})()
File1 instantiates ffmpeg. The code is not run until ffmpegPromise is called with the '.run()' command. It starts at 3 seconds and takes 2 seconds of video. Then saves the 2 second video clip to file './videos/test1.mp4'.
File2 does the same thing except it starts at 10 seconds and takes 2 seconds of video. Then saves the 2 second video clip to file './videos/test2.mp4'.
ffmpegPromise passes the ffmpeg instantiated function to a promise, so it can be awaited.
Promise.all calls both 'file1' and 'file2' at the same time, and returns 'finished!' when files are saved.
ffpeg().mergeAdd()...mergeToFile(...) takes both 2 second video clips and merges them into one file, './videos/mergedFile.mp4'. When completed, the 'end' event is fired and fs.unlink will delete the test1 and test2 videos.

How do you use Node.js to stream an MP4 file with ffmpeg?

I've been trying to solve this problem for several days now and would really appreciate any help on the subject.
I'm able to successfully stream an mp4 audio file stored on a Node.js server using fluent-ffmpeg by passing the location of the file as a string and transcoding it to mp3. If I create a file stream from the same file and pass that to fluent-ffmpeg instead it works for an mp3 input file, but not a mp4 file. In the case of the mp4 file no error is thrown and it claims the stream completed successfully, but nothing is playing in the browser. I'm guessing this has to do with the meta data being stored at the end of an mp4 file, but I don't know how to code around this. This is the exact same file that works correctly when it's location is passed to ffmpeg, rather than the stream. When I try and pass a stream to the mp4 file on s3, again no error is thrown, but nothing streams to the browser. This isn't surprising as ffmpeg won't work with the file locally as stream, so expecting it to handle the stream from s3 is wishful thinking.
How can I stream the mp4 file from s3, without storing it locally as a file first? How do I get ffmpeg to do this without transcoding the file too? The following is the code I have at the moment which isn't working. Note that it attempts to pass the s3 file as a stream to ffmpeg and it's also transcoding it into an mp3, which I'd prefer not to do.
.get(function(req,res) {
aws.s3(s3Bucket).getFile(s3Path, function (err, result) {
if (err) {
return next(err);
}
var proc = new ffmpeg(result)
.withAudioCodec('libmp3lame')
.format('mp3')
.on('error', function (err, stdout, stderr) {
console.log('an error happened: ' + err.message);
console.log('ffmpeg stdout: ' + stdout);
console.log('ffmpeg stderr: ' + stderr);
})
.on('end', function () {
console.log('Processing finished !');
})
.on('progress', function (progress) {
console.log('Processing: ' + progress.percent + '% done');
})
.pipe(res, {end: true});
});
});
This is using the knox library when it calls aws.s3... I've also tried writing it using the standard aws sdk for Node.js, as shown below, but I get the same outcome as above.
var AWS = require('aws-sdk');
var s3 = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_KEY,
region: process.env.AWS_REGION_ID
});
var fileStream = s3.getObject({
Bucket: s3Bucket,
Key: s3Key
}).createReadStream();
var proc = new ffmpeg(fileStream)
.withAudioCodec('libmp3lame')
.format('mp3')
.on('error', function (err, stdout, stderr) {
console.log('an error happened: ' + err.message);
console.log('ffmpeg stdout: ' + stdout);
console.log('ffmpeg stderr: ' + stderr);
})
.on('end', function () {
console.log('Processing finished !');
})
.on('progress', function (progress) {
console.log('Processing: ' + progress.percent + '% done');
})
.pipe(res, {end: true});
=====================================
Updated
I placed an mp3 file in the same s3 bucket and the code I have here worked and was able to stream the file through to the browser without storing a local copy. So the streaming issues I face have something to do with the mp4/aac container/encoder format.
I'm still interested in a way to bring the m4a file down from s3 to the Node.js server in it's entirety, then pass it to ffmpeg for streaming without actually storing the file in the local file system.
=====================================
Updated Again
I've managed to get the server streaming the file, as mp4, straight to the browser. This half answers my original question. My only issue now is that I have to download the file to a local store first, before I can stream it. I'd still like to find a way to stream from s3 without needing the temporary file.
aws.s3(s3Bucket).getFile(s3Path, function(err, result){
result.pipe(fs.createWriteStream(file_location));
result.on('end', function() {
console.log('File Downloaded!');
var proc = new ffmpeg(file_location)
.outputOptions(['-movflags isml+frag_keyframe'])
.toFormat('mp4')
.withAudioCodec('copy')
.seekInput(offset)
.on('error', function(err,stdout,stderr) {
console.log('an error happened: ' + err.message);
console.log('ffmpeg stdout: ' + stdout);
console.log('ffmpeg stderr: ' + stderr);
})
.on('end', function() {
console.log('Processing finished !');
})
.on('progress', function(progress) {
console.log('Processing: ' + progress.percent + '% done');
})
.pipe(res, {end: true});
});
});
On the receiving side I just have the following javascript in an empty html page:
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context = new AudioContext();
function process(Data) {
source = context.createBufferSource(); // Create Sound Source
context.decodeAudioData(Data, function(buffer){
source.buffer = buffer;
source.connect(context.destination);
source.start(context.currentTime);
});
};
function loadSound() {
var request = new XMLHttpRequest();
request.open("GET", "/stream/<audio_identifier>", true);
request.responseType = "arraybuffer";
request.onload = function() {
var Data = request.response;
process(Data);
};
request.send();
};
loadSound()
=====================================
The Answer
The code above under the title 'updated again' will stream an mp4 file, from s3, via a Node.js server to a browser without using flash. It does require that the file be stored temporarily on the Node.js server so that the meta data in the file is moved from the end of the file to the front. In order to stream without storing the temporary file, you need to actual modify the file on S3 first and make this meta data change. If you have changed the file in this way on S3 then you can modify the code under the title 'updated again' so that the result from S3 is piped straight into the ffmpeg constructor, rather than into a file stream on the Node.js server, then providing that file location to ffmepg, as the code does now. You can change the final 'pipe' command to 'save(location)' to get a version of the mp4 file locally with the meta data moved to the front. You can then upload that new version of the file to S3 and try out the end to end streaming. Personally I'm now going to create a task that modifies the files in this way as they are uploaded to s3 in the first place. This allows me to record and stream in mp4 without transcoding or storing a temp file on the Node.js server.
One of the main issues here is that you cannot seek on a piped stream. So you would have to store the file first. However, if you want to just stream from the beginning you can use a slightly different construction and pipe. Here is an example of the most straight forward way to do it.
// can just create an in-memory read stream without saving
var stream = aws.s3(s3Bucket).getObject(s3Path).createReadStream();
// fluent-ffmpeg supports a readstream as an arg in constructor
var proc = new ffmpeg(stream)
.outputOptions(['-movflags isml+frag_keyframe'])
.toFormat('mp4')
.withAudioCodec('copy')
//.seekInput(offset) this is a problem with piping
.on('error', function(err,stdout,stderr) {
console.log('an error happened: ' + err.message);
console.log('ffmpeg stdout: ' + stdout);
console.log('ffmpeg stderr: ' + stderr);
})
.on('end', function() {
console.log('Processing finished !');
})
.on('progress', function(progress) {
console.log('Processing: ' + progress.percent + '% done');
})
.pipe(res, {end: true});
Blockquote
How can I stream the mp4 file from s3, without storing it locally as a file first? How do I get ffmpeg to do this without transcoding the file too? The following is the code I have at the moment which isn't working. Note that it attempts to pass the s3 file as a stream to ffmpeg and it's also transcoding it into an mp3, which I'd prefer not to do.
AFAIK - if the moov atom is in the right place in media file, for S3 hosted mp4, nothing special is require for streaming because you can rely on http for that. If the client request "chunked" encoding it will get just that, a chunked stream terminated by the "END-OF" marker shown below.
0\r\n
\r\n
By including the chunked header, the client is saying " I want a stream" . Under the covers, S3 is just nginx or apache isn't it? They both honor the headers.
test it with curl CLI as your client...
> User-Agent: curl/7.28.1-DEV
> Host: S3.domain
> Accept: */*
> Transfer-Encoding: chunked
> Content-Type: video/mp4
> Expect: 100-continue
May want to try out adding the codecs to the "Content-Type:" header. I dont know, but dont think it would be required for this type of streaming ( the atom resolves that stuff )
I had an issue buffering file streams from the S3 file object. The s3 filestream does not have the correct headers set and it seems it does not implement piping correctly.
I think a better solution is to use this nodejs module called s3-streams. It sets the correct headers and buffers the output so that the stream can be correctly piped to the output response socket. It saves you from saving the filestream locally first before restreaming it.

How do I stream large files over ssh in Node?

I'm trying to stream a cat command using the ssh2 module but it just hangs at some point of the execution. I'm executing cat there.txt where there.txt is around 10 MB or so.
For example:
local = fs.createWriteStream('here.txt');
conn.exec('cat there.txt', function(err, stream) {
if (err) throw err;
stream.pipe(local).on('finish, function() { console.log('Done'); });
}
This just completely stops at one point. I've even piped the stream to local stdout, and it just hangs after a while. In my actual code, I pipe it through a bunch of other transform streams so I think this is better than transferring the files to the local system first (the files may get larger than 200MB).
I had just started working with streams recently so I when I was piping the ssh stream through various transform streams, I wasn't ending on a writable stream like I was in my example (I should've included my actual code, sorry!). This caused it to hang. This was originally so that I could execute multiple commands remotely and put their output sorted into a single file.
So, my original code was stream.pipe(transformStream), then push the transformStream to an array once it's finished. And then sort it using the mergesort-stream npm module. Instead of that, I just write the results from the multiple ssh commands (transformed) to temporary files and then sort them all at once.
Try out the createReadStream for serving huge files:
fs.exists(correctfilepath, function(exists) {
if (exists) {
var readstream = fs.createReadStream(correctfilepath);
console.log("About to serve " + correctfilepath);
res.writeHead(200);
readstream.setEncoding("binary");
readstream.on("data", function (chunk) {
res.write(chunk, "binary");
});
readstream.on("end", function () {
console.log("Served file " + correctfilepath);
res.end();
});
readstream.on('error', function(err) {
res.write(err + "\n");
res.end();
return;
});
} else {
res.writeHead(404);
res.write("No data\n");
res.end();
}
});

Resources