Differentiate between error and standard terminal log with ffmpeg - nodejs - node.js

I'm using ffmpeg in node js. Both the standard terminal output and the error seems to be sent to stdout, so I don't know how to differentiate between error and success... Here's my code:
var convertToMp3 = function(filePath) {
var ffmpeg = child_process.spawn('ffmpeg',['-i', filePath, '-y', 'output.mp3']);
var err = '';
ffmpeg.stderr
.on('data', function(c) { err += c; })
.on('end', function() { console.log('stderr:', err); });
var d = '';
ffmpeg.stdout
.on('data', function(c){d +=c;})
.on('end', function(){ console.log('stdout', d); });
}
wether the conversion succeeds or fails, stdout is empty and stderr contains what I'd get if I'd run the corresponding command in the terminal

Ffmpeg outputs all of its logging data to stderr, to leave stdout free for piping the output data to some other program or another ffmpeg instance.
When running ffmpeg as an automatic process it's often useful give the option
-loglevel error
which turns it completely mute in a normal scenario and only outputs the error data (to stderr), which is normally what you would expect from a command-line program.

Inspired by #aergistal comment, here is a solution that works for me, where callback is to be executed at the end of the task, with the signature function(error, success), as usual.
var convertToMp3 = function(filePath, callback) {
var ffmpeg = child_process.spawn('ffmpeg',['-i', filePath, '-y', 'output.mp3']);
var err = '';
ffmpeg.stderr.on('data', function(c) { err += c; }).on('end', function() { console.log('stderr:', err); });
ffmpeg.on('exit', function(code) {
if (code) {
callback({code: code, message: err});
} else {
callback(null, {success: true, message: err});
}
});
}
or in the new js style:
const convertToMp3 = (filePath) => new Promise((resolve, reject) {
const ffmpeg = child_process.spawn('ffmpeg', ['-i', filePath, '-y', 'output.mp3']);
let output = '';
ffmpeg.stderr
.on('data', c => { output += c; });
ffmpeg.on('exit', code => {
if (code) {
reject({ code: code, message: output });
} else {
resolve(output);
}
});
});
const ffmpegOutput = await convertToMp3('./video.mp4');
...

Related

How to correctly handle child_process `close` event on try catch block? [duplicate]

I'm using the Bluebird promise library under Node.js, it's great! But I have a question:
If you take a look at the documentation of Node's child_process.exec and child_process.execFile you can see that both of these functions are returning a ChildProcess object.
So what's the recommended way to promisify such functions?
Note that the following works (I get a Promise object):
var Promise = require('bluebird');
var execAsync = Promise.promisify(require('child_process').exec);
var execFileAsync = Promise.promisify(require('child_process').execFile);
But how can one get access to the original return value of the original Node.js functions? (In these cases I would need to be able to access the originally returned ChildProcess objects.)
Any suggestion would be appreciated!
EDIT:
Here is an example code which is using the return value of the child_process.exec function:
var exec = require('child_process').exec;
var child = exec('node ./commands/server.js');
child.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function(data) {
console.log('stderr: ' + data);
});
child.on('close', function(code) {
console.log('closing code: ' + code);
});
But if I would use the promisified version of the exec function ( execAsync from above ) then the return value will be a promise, not a ChildProcess object. This is the real problem I am talking about.
I would recommend using standard JS promises built into the language over an additional library dependency like Bluebird.
If you're using Node 10+, the Node.js docs recommend using util.promisify which returns a Promise<{ stdout, stderr }> object. See an example below:
const util = require('util');
const exec = util.promisify(require('child_process').exec);
async function lsExample() {
try {
const { stdout, stderr } = await exec('ls');
console.log('stdout:', stdout);
console.log('stderr:', stderr);
} catch (e) {
console.error(e); // should contain code (exit code) and signal (that caused the termination).
}
}
lsExample()
Handle errors first from stderr.
It sounds like you'd like to return two things from the call:
the ChildProcess
a promise that resolves when the ChildProcess completes
So "the recommended way to promisify such functions"? Don't.
You're outside the convention. Promise returning functions are expected to return a promise, and that's it. You could return an object with two members (the ChildProcess & the promise), but that'll just confuse people.
I'd suggest calling the unpromisified function, and creating a promise based off the returned childProcess. (Maybe wrap that into a helper function)
This way, it's quite explicit for the next person who reads the code.
Something like:
var Promise = require('bluebird');
var exec = require('child_process').execFile;
function promiseFromChildProcess(child) {
return new Promise(function (resolve, reject) {
child.addListener("error", reject);
child.addListener("exit", resolve);
});
}
var child = exec('ls');
promiseFromChildProcess(child).then(function (result) {
console.log('promise complete: ' + result);
}, function (err) {
console.log('promise rejected: ' + err);
});
child.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
child.on('close', function (code) {
console.log('closing code: ' + code);
});
If you're just wanting to promisify specifically child_process.exec() and child_process.execFile(), in recent node versions there is a better answer here.
Since Node v12 the built-in util.promisify allows access to the ChildProcess object in the returned Promise for built-in functions where it would have been returned by the un-promisified call. From the docs:
The returned ChildProcess instance is attached to the Promise as a child property.
This correctly and simply satisfies the need to access ChildProcess in the original question and makes other answers out of date providing that Node v12+ can be used.
Adapting the example (and concise style) provided by the questioner, access to the ChildProcess can be achieved like:
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const promise = exec('node ./commands/server.js');
const child = promise.child;
child.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function(data) {
console.log('stderr: ' + data);
});
child.on('close', function(code) {
console.log('closing code: ' + code);
});
// i.e. can then await for promisified exec call to complete
const { stdout, stderr } = await promise;
Here's another way:
function execPromise(command) {
return new Promise(function(resolve, reject) {
exec(command, (error, stdout, stderr) => {
if (error) {
reject(error);
return;
}
resolve(stdout.trim());
});
});
}
Use the function:
execPromise(command).then(function(result) {
console.log(result);
}).catch(function(e) {
console.error(e.message);
});
Or with async/await:
try {
var result = await execPromise(command);
} catch (e) {
console.error(e.message);
}
There's probably not a way to do nicely that covers all use cases. But for limited cases, you can do something like this:
/**
* Promisified child_process.exec
*
* #param cmd
* #param opts See child_process.exec node docs
* #param {stream.Writable} opts.stdout If defined, child process stdout will be piped to it.
* #param {stream.Writable} opts.stderr If defined, child process stderr will be piped to it.
*
* #returns {Promise<{ stdout: string, stderr: stderr }>}
*/
function execp(cmd, opts) {
opts || (opts = {});
return new Promise((resolve, reject) => {
const child = exec(cmd, opts,
(err, stdout, stderr) => err ? reject(err) : resolve({
stdout: stdout,
stderr: stderr
}));
if (opts.stdout) {
child.stdout.pipe(opts.stdout);
}
if (opts.stderr) {
child.stderr.pipe(opts.stderr);
}
});
}
This accepts opts.stdout and opts.stderr arguments, so that stdio can be captured from the child process.
For example:
execp('ls ./', {
stdout: new stream.Writable({
write: (chunk, enc, next) => {
console.log(chunk.toString(enc));
next();
}
}),
stderr: new stream.Writable({
write: (chunk, enc, next) => {
console.error(chunk.toString(enc));
next();
}
})
}).then(() => console.log('done!'));
Or simply:
execp('ls ./', {
stdout: process.stdout,
stderr: process.stderr
}).then(() => console.log('done!'));
Just want to mention that there's a nice tool that will solve your problem completely:
https://www.npmjs.com/package/core-worker
This package makes it a lot easier to handle processes.
import { process } from "CoreWorker";
import fs from "fs";
const result = await process("node Server.js", "Server is ready.").ready(1000);
const result = await process("cp path/to/file /newLocation/newFile").death();
or combine these functions:
import { process } from "core-worker";
const simpleChat = process("node chat.js", "Chat ready");
setTimeout(() => simpleChat.kill(), 360000); // wait an hour and close the chat
simpleChat.ready(500)
.then(console.log.bind(console, "You are now able to send messages."))
.then(::simpleChat.death)
.then(console.log.bind(console, "Chat closed"))
.catch(() => /* handle err */);
Here are my two cents. Uses spawn which streams the output and writes to stdout and stderr. The error and standard output is captured in buffers and are returned or rejected.
This is written I Typescript, feel free to remove typings if using JavaScript:
import { spawn, SpawnOptionsWithoutStdio } from 'child_process'
const spawnAsync = async (
command: string,
options?: SpawnOptionsWithoutStdio
) =>
new Promise<Buffer>((resolve, reject) => {
const [spawnCommand, ...args] = command.split(/\s+/);
const spawnProcess = spawn(spawnCommand, args, options);
const chunks: Buffer[] = [];
const errorChunks: Buffer[] = [];
spawnProcess.stdout.on("data", (data) => {
process.stdout.write(data.toString());
chunks.push(data);
});
spawnProcess.stderr.on("data", (data) => {
process.stderr.write(data.toString());
errorChunks.push(data);
});
spawnProcess.on("error", (error) => {
reject(error);
});
spawnProcess.on("close", (code) => {
if (code === 1) {
reject(Buffer.concat(errorChunks).toString());
return;
}
resolve(Buffer.concat(chunks));
});
});
Just another example you might run into issues when running multiple commands when destructuring with the same const's you can rename them like this.
const util = require('util');
const exec = util.promisify(require('child_process').exec);
async function runCommands() {
try {
const { stdout, stderr } = await exec('ls');
console.log('stdout:', stdout);
console.log('stderr:', stderr);
const { stdout: stdoutTwo, stderr: stderrTwo } = await exec('ls');
console.log('stdoutTwo:', stdoutTwo);
console.log('stderrTwo:', stderrTwo);
const { stdout: stdoutThree, stderr: stderrThree } = await exec('ls');
console.log('stdoutThree:', stdoutThree);
console.log('stderrThree:', stderrThree);
} catch (e) {
console.error(e); // should contain code (exit code) and signal (that caused the termination).
}
}
runCommands()
Here's mine. It doesn't deal with stdin or stdout, so if you need those then use one of the other answers on this page. :)
// promisify `child_process`
// This is a very nice trick :-)
this.promiseFromChildProcess = function (child) {
return new Promise((resolve, reject) => {
child.addListener('error', (code, signal) => {
console.log('ChildProcess error', code, signal);
reject(code);
});
child.addListener('exit', (code, signal) => {
if (code === 0) {
resolve(code);
} else {
console.log('ChildProcess error', code, signal);
reject(code);
}
});
});
};

How to get fd of child.stdin?

How to get fd of child.stdin to do fs.writeSync/readSync? 'open' never gets fired.
const { spawn } = require('child_process');
const child = spawn('cat');
const cmd = 'foo\n';
let buff = new Buffer.from(cmd);
// child.stdin.fd is undefined
child.stdin.write(buff);
// never gets called
child.stdin.on('open', function (fd) {
fs.writeSync(fd, buff, 0, buff.length, null, err => {
if (err) throw 'Could not send command: ' + err;
console.log('Command succesfully sent');
});
});
child.stdout.on('data', (chunk) => {
// echo
process.stdout.write(chunk.toString());
});
child.on('close', (code) => {
console.log(`exited with code ${code}`);
});
The fd of child.stdin can be accessed via child.stdin._handle.fd. Not sure if this is recommended but I tested it in both v0.12.10 and v10.16.0.

child_process.spawn doesn't emit any events

I'm trying to run ripgrep from my Node app and am seeing a strange behavior with child_process.spawn: none of the events fire and the app never finishes (is stuck somewhere inside the spawn call):
import { spawn } from 'child_process';
async function run() {
await spawnWrapper('rg', ['-F', '"demo"'], { cwd: __dirname });
}
export function spawnWrapper(command, args, options) {
return new Promise((resolve, reject) => {
let stdout = '';
let stderr = '';
const child = spawn(command, args, options);
console.log('spawn wrapper');
child.on('close', (code, signal) => {
console.log('close');
resolve({ code, signal, stdout, stderr });
});
child.on('error', (error) => {
console.log('error');
(error as any).stderr = stderr;
reject(error);
});
child.on('exit', (code, signal) => {
console.log('exit');
resolve({ code, signal, stdout, stderr });
});
child.stdout.setEncoding('utf8');
child.stderr.setEncoding('utf8');
child.stdout.on('data', (data) => {
console.log('stdout data');
stdout += data;
});
child.stderr.on('data', (data) => {
console.log('stderr data');
stderr += data;
});
});
}
I only get "spawn wrapper" in the console, no other events. I've never seen this behavior with other binaries, maybe it's something with ripgrep but still, shouldn't I be getting at least some hints by Node? Any suggestions on how to debug this?
It was caused by ripgrep waiting for input which was not obvious to me (on command line, it just executes straight away). Details here: https://github.com/BurntSushi/ripgrep/issues/410

Wait for callback of async function in last stream.on('data') event

I am using fast-csv to iterate over a CSV file using a stream. For each row of the CSV file, I want to create a job in redis, for which I use kue. Parsing a row is synchronous function. The whole thing looks like this:
var csvStream = fastCsv(_config.csvOptions)
.on('data', function(data) {
var stream = this;
stream.pause();
var payload = parseRow(data, _config);
console.log(payload); // the payload is always printed to the console
var job = kue.create('csv-row', {
payload: payload
})
.save(function(err) {
if (!err) console.log('Enqueued at ' + job.id);
else console.log('Redis error ' + JSON.stringify(err));
stream.resume();
});
})
.on('end', function() {
callback(); // this ends my parsing
});
The simple console.log(payload); shows for every row of my CSV file, however the job is not created. I.e., none of the outputs in the callback of save are being printed, and the job is not in my redis.
I assume, because it's the last row of the CSV file, the stream already emits end and so the last kue.create() cannot be executed before the process terminates?
Is there a way to halt the stream's end until kue is done?
You can solve this problem using async library. You can use the below pattern for any streams.
var AsyncLib = require('async');
var worker = function (payload, cb) {
//do something with payload and call callback
return cb();
};
var concurrency = 5;
var streamQueue = AsyncLib.queue(worker, concurrency);
var stream = //some readable stream;
stream.on('data', function(data) {
//no need to pause and resume
var payload = '//some payload';
streamQueue.push(payload);
})
.on('end', function() {
//register drain event on end and callback
streamQueue.drain = function () {
callback();
};
});
I have just found myself in the same situation. And I resolved it with pattern similar to sync.WaitGroup from Go language.
In its simplest form this will look like (function will return Promise):
function process() {
const locker = {
items: 0,
resolve: null,
lock: function() { this.items++ },
unlock: function() {
if (this.items > 0) this.items--;
if (this.items === 0) {
this.resolve()
this.resolve = null
}
},
};
return new Promise(function(resolve, reject) {
locker.resolve = resolve;
locker.lock();
fastCsv(_config.csvOptions)
.on('end', function() { locker.unlock(); })
.on('data', function(data) {
locker.lock();
const payload = parseRow(data, _config);
kue.create('csv-row', { payload: payload })
.save(function(err) {
if (!err) console.log('Enqueued at ' + job.id);
else console.log('Redis error ' + JSON.stringify(err));
locker.unlock();
});
});
});
}
process().then(function () {
console.log('Now ALL data processed!');
});
In real application you can extract locker to distinct class/module, add error handling, etc... But the principle is the same – wait until not only the stream is empty but also all nonblocking operations created in it are finished.

Stream a file from NodeJS

I am using weed-fs to store files and trying to stream file to client using below code
var fileName = [__dirname, '/uploads/', req.params.id, ".png"].join('');
try {
var writeStream = fs.createWriteStream(fileName);
weedfs.read(req.params.id, writeStream);
writeStream.on("close", function () {
var readStream = fs.createReadStream(fileName);
response.pipe(readStream);
response.on('error', function (err) {
readStream.end();
});
response.on('end', function (err) {
removeFile(fileName);
});
});
} catch (e) {
res.send(404);
}
But I am getting error Error: connect ECONNREFUSED
If I look at the weedfs read method, I found below code
read: function(fid, stream, cb) {
var ins = this;
if ((typeof stream == "function") && !cb) {
cb = stream;
stream = false;
}
this.find(fid, function(pub) {
if (pub[0]) {
if (stream) {
ins.http(pub[0]).pipe(stream);
......
Am I doing something wrong. Please help me to fix this issue.

Resources