child_process.spawn doesn't emit any events - node.js

I'm trying to run ripgrep from my Node app and am seeing a strange behavior with child_process.spawn: none of the events fire and the app never finishes (is stuck somewhere inside the spawn call):
import { spawn } from 'child_process';
async function run() {
await spawnWrapper('rg', ['-F', '"demo"'], { cwd: __dirname });
}
export function spawnWrapper(command, args, options) {
return new Promise((resolve, reject) => {
let stdout = '';
let stderr = '';
const child = spawn(command, args, options);
console.log('spawn wrapper');
child.on('close', (code, signal) => {
console.log('close');
resolve({ code, signal, stdout, stderr });
});
child.on('error', (error) => {
console.log('error');
(error as any).stderr = stderr;
reject(error);
});
child.on('exit', (code, signal) => {
console.log('exit');
resolve({ code, signal, stdout, stderr });
});
child.stdout.setEncoding('utf8');
child.stderr.setEncoding('utf8');
child.stdout.on('data', (data) => {
console.log('stdout data');
stdout += data;
});
child.stderr.on('data', (data) => {
console.log('stderr data');
stderr += data;
});
});
}
I only get "spawn wrapper" in the console, no other events. I've never seen this behavior with other binaries, maybe it's something with ripgrep but still, shouldn't I be getting at least some hints by Node? Any suggestions on how to debug this?

It was caused by ripgrep waiting for input which was not obvious to me (on command line, it just executes straight away). Details here: https://github.com/BurntSushi/ripgrep/issues/410

Related

How to correctly handle child_process `close` event on try catch block? [duplicate]

I'm using the Bluebird promise library under Node.js, it's great! But I have a question:
If you take a look at the documentation of Node's child_process.exec and child_process.execFile you can see that both of these functions are returning a ChildProcess object.
So what's the recommended way to promisify such functions?
Note that the following works (I get a Promise object):
var Promise = require('bluebird');
var execAsync = Promise.promisify(require('child_process').exec);
var execFileAsync = Promise.promisify(require('child_process').execFile);
But how can one get access to the original return value of the original Node.js functions? (In these cases I would need to be able to access the originally returned ChildProcess objects.)
Any suggestion would be appreciated!
EDIT:
Here is an example code which is using the return value of the child_process.exec function:
var exec = require('child_process').exec;
var child = exec('node ./commands/server.js');
child.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function(data) {
console.log('stderr: ' + data);
});
child.on('close', function(code) {
console.log('closing code: ' + code);
});
But if I would use the promisified version of the exec function ( execAsync from above ) then the return value will be a promise, not a ChildProcess object. This is the real problem I am talking about.
I would recommend using standard JS promises built into the language over an additional library dependency like Bluebird.
If you're using Node 10+, the Node.js docs recommend using util.promisify which returns a Promise<{ stdout, stderr }> object. See an example below:
const util = require('util');
const exec = util.promisify(require('child_process').exec);
async function lsExample() {
try {
const { stdout, stderr } = await exec('ls');
console.log('stdout:', stdout);
console.log('stderr:', stderr);
} catch (e) {
console.error(e); // should contain code (exit code) and signal (that caused the termination).
}
}
lsExample()
Handle errors first from stderr.
It sounds like you'd like to return two things from the call:
the ChildProcess
a promise that resolves when the ChildProcess completes
So "the recommended way to promisify such functions"? Don't.
You're outside the convention. Promise returning functions are expected to return a promise, and that's it. You could return an object with two members (the ChildProcess & the promise), but that'll just confuse people.
I'd suggest calling the unpromisified function, and creating a promise based off the returned childProcess. (Maybe wrap that into a helper function)
This way, it's quite explicit for the next person who reads the code.
Something like:
var Promise = require('bluebird');
var exec = require('child_process').execFile;
function promiseFromChildProcess(child) {
return new Promise(function (resolve, reject) {
child.addListener("error", reject);
child.addListener("exit", resolve);
});
}
var child = exec('ls');
promiseFromChildProcess(child).then(function (result) {
console.log('promise complete: ' + result);
}, function (err) {
console.log('promise rejected: ' + err);
});
child.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
child.on('close', function (code) {
console.log('closing code: ' + code);
});
If you're just wanting to promisify specifically child_process.exec() and child_process.execFile(), in recent node versions there is a better answer here.
Since Node v12 the built-in util.promisify allows access to the ChildProcess object in the returned Promise for built-in functions where it would have been returned by the un-promisified call. From the docs:
The returned ChildProcess instance is attached to the Promise as a child property.
This correctly and simply satisfies the need to access ChildProcess in the original question and makes other answers out of date providing that Node v12+ can be used.
Adapting the example (and concise style) provided by the questioner, access to the ChildProcess can be achieved like:
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const promise = exec('node ./commands/server.js');
const child = promise.child;
child.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function(data) {
console.log('stderr: ' + data);
});
child.on('close', function(code) {
console.log('closing code: ' + code);
});
// i.e. can then await for promisified exec call to complete
const { stdout, stderr } = await promise;
Here's another way:
function execPromise(command) {
return new Promise(function(resolve, reject) {
exec(command, (error, stdout, stderr) => {
if (error) {
reject(error);
return;
}
resolve(stdout.trim());
});
});
}
Use the function:
execPromise(command).then(function(result) {
console.log(result);
}).catch(function(e) {
console.error(e.message);
});
Or with async/await:
try {
var result = await execPromise(command);
} catch (e) {
console.error(e.message);
}
There's probably not a way to do nicely that covers all use cases. But for limited cases, you can do something like this:
/**
* Promisified child_process.exec
*
* #param cmd
* #param opts See child_process.exec node docs
* #param {stream.Writable} opts.stdout If defined, child process stdout will be piped to it.
* #param {stream.Writable} opts.stderr If defined, child process stderr will be piped to it.
*
* #returns {Promise<{ stdout: string, stderr: stderr }>}
*/
function execp(cmd, opts) {
opts || (opts = {});
return new Promise((resolve, reject) => {
const child = exec(cmd, opts,
(err, stdout, stderr) => err ? reject(err) : resolve({
stdout: stdout,
stderr: stderr
}));
if (opts.stdout) {
child.stdout.pipe(opts.stdout);
}
if (opts.stderr) {
child.stderr.pipe(opts.stderr);
}
});
}
This accepts opts.stdout and opts.stderr arguments, so that stdio can be captured from the child process.
For example:
execp('ls ./', {
stdout: new stream.Writable({
write: (chunk, enc, next) => {
console.log(chunk.toString(enc));
next();
}
}),
stderr: new stream.Writable({
write: (chunk, enc, next) => {
console.error(chunk.toString(enc));
next();
}
})
}).then(() => console.log('done!'));
Or simply:
execp('ls ./', {
stdout: process.stdout,
stderr: process.stderr
}).then(() => console.log('done!'));
Just want to mention that there's a nice tool that will solve your problem completely:
https://www.npmjs.com/package/core-worker
This package makes it a lot easier to handle processes.
import { process } from "CoreWorker";
import fs from "fs";
const result = await process("node Server.js", "Server is ready.").ready(1000);
const result = await process("cp path/to/file /newLocation/newFile").death();
or combine these functions:
import { process } from "core-worker";
const simpleChat = process("node chat.js", "Chat ready");
setTimeout(() => simpleChat.kill(), 360000); // wait an hour and close the chat
simpleChat.ready(500)
.then(console.log.bind(console, "You are now able to send messages."))
.then(::simpleChat.death)
.then(console.log.bind(console, "Chat closed"))
.catch(() => /* handle err */);
Here are my two cents. Uses spawn which streams the output and writes to stdout and stderr. The error and standard output is captured in buffers and are returned or rejected.
This is written I Typescript, feel free to remove typings if using JavaScript:
import { spawn, SpawnOptionsWithoutStdio } from 'child_process'
const spawnAsync = async (
command: string,
options?: SpawnOptionsWithoutStdio
) =>
new Promise<Buffer>((resolve, reject) => {
const [spawnCommand, ...args] = command.split(/\s+/);
const spawnProcess = spawn(spawnCommand, args, options);
const chunks: Buffer[] = [];
const errorChunks: Buffer[] = [];
spawnProcess.stdout.on("data", (data) => {
process.stdout.write(data.toString());
chunks.push(data);
});
spawnProcess.stderr.on("data", (data) => {
process.stderr.write(data.toString());
errorChunks.push(data);
});
spawnProcess.on("error", (error) => {
reject(error);
});
spawnProcess.on("close", (code) => {
if (code === 1) {
reject(Buffer.concat(errorChunks).toString());
return;
}
resolve(Buffer.concat(chunks));
});
});
Just another example you might run into issues when running multiple commands when destructuring with the same const's you can rename them like this.
const util = require('util');
const exec = util.promisify(require('child_process').exec);
async function runCommands() {
try {
const { stdout, stderr } = await exec('ls');
console.log('stdout:', stdout);
console.log('stderr:', stderr);
const { stdout: stdoutTwo, stderr: stderrTwo } = await exec('ls');
console.log('stdoutTwo:', stdoutTwo);
console.log('stderrTwo:', stderrTwo);
const { stdout: stdoutThree, stderr: stderrThree } = await exec('ls');
console.log('stdoutThree:', stdoutThree);
console.log('stderrThree:', stderrThree);
} catch (e) {
console.error(e); // should contain code (exit code) and signal (that caused the termination).
}
}
runCommands()
Here's mine. It doesn't deal with stdin or stdout, so if you need those then use one of the other answers on this page. :)
// promisify `child_process`
// This is a very nice trick :-)
this.promiseFromChildProcess = function (child) {
return new Promise((resolve, reject) => {
child.addListener('error', (code, signal) => {
console.log('ChildProcess error', code, signal);
reject(code);
});
child.addListener('exit', (code, signal) => {
if (code === 0) {
resolve(code);
} else {
console.log('ChildProcess error', code, signal);
reject(code);
}
});
});
};

How to catch error while using fork child process in node?

I am using the fork method to spawn a child process in my electron app, my code looks like this
'use strict'
const fixPath = require('fix-path');
let func = () => {
fixPath();
const child = childProcess.fork('node /src/script.js --someFlags',
{
detached: true,
stdio: 'ignore',
}
});
child.on('error', (err) => {
console.log("\n\t\tERROR: spawn failed! (" + err + ")");
});
child.stderr.on('data', function(data) {
console.log('stdout: ' +data);
});
child.on('exit', (code, signal) => {
console.log(code);
console.log(signal);
});
child.unref();
But my child process exits immediately with exit code 1 and signal, Is there a way I can catch this error? When I use childprocess.exec method I can catch using stdout.on('error'... Is there a similar thing for fork method? If not any suggestions on how I can work around this?
Setting the option 'silent:true' and then using event handlers stderr.on() we can catch the error if any. Please check the sample code below:
let func = () => {
const child = childProcess.fork(path, args,
{
silent: true,
detached: true,
stdio: 'ignore',
}
});
child.on('error', (err) => {
console.log("\n\t\tERROR: spawn failed! (" + err + ")");
});
child.stderr.on('data', function(data) {
console.log('stdout: ' +data);
});
child.on('exit', (code, signal) => {
console.log(code);
console.log(signal);
});
child.unref();

run a few node.js web servers from within a node.js application

I would like to control a few web sites using a UI to start and stop them, changing the ports that the different web server's listen to.
PM2 is a command line utility to manage node sites, but does not have a user interface that I can supply my customer.
How can I run a node.js web site from within another node.js web application.
The following node.js code does not seem to do the work.
const { exec } = require('child_process');
....
exec('node app.js', (err, stdout, stderr) => {
if (err) {
console.log(`err: ${err}`);
return;
}
// the *entire* stdout and stderr (buffered)
console.log(`stdout: ${stdout}`);
console.log(`stderr: ${stderr}`);
});
note: regular linux commands instead of the 'node app.js' command work as expected.
Got the following code to work in case you want to run the same:
This is the code on the server that will spawn a new web server.
app.get('/start', ( req, res ) => {
var node = spawn('node', ['app.js &'], { shell: true });
node.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
node.stderr.on('data', function (data) {
console.log('stderr: ' + data.toString());
});
node.on('exit', function (code) {
console.log('child process exited with code ' +
code.toString());
});
// Notify client
res.status(200).send( {} );
});
The simplest alternative would be to keep a collection of ExpressJS instances and create/destroy them as needed within NodeJS:
const express = require("express");
var app1 = express();
app1.use("/", function(req, res){ res.send("APP 1"); });
var app2 = express();
app2.use("/", function(req, res){ res.send("APP 2"); });
// start them
var server1 = app.listen(9001);
var server2 = app.listen(9002);
// close the first one
server1.close();
// port 9001 is closed
// port 9002 is still listening
However, if you need to spawn independent processess, you could have:
const { spawn } = require("child_process");
// Prevent Ctrl+C from killing the parent process before the children exit
process.on("SIGINT", () => {
console.log("Terminating the process...");
if (runningProcesses > 0) {
setTimeout(() => process.exit(), 3000);
} else {
process.exit();
}
});
function asyncSpawn(command, parameters = []) {
return new Promise((resolve, reject) => {
runningProcesses++;
console.log(command, ...parameters, "\n");
spawn(command, parameters, {
stdio: "inherit"
})
.on("close", code => {
if (code)
reject(new Error(command + " process exited with code " + code));
else resolve();
})
.on("exit", error => {
runningProcesses--;
if (error) reject(error);
else resolve();
});
});
}
And launch new processes like this:
asyncSpawn("node", ["server.js", "--param1"])
.then(() => console.log("DONE"))
.catch(err => console.error(err));

nodejs exec ffmpeg, callback is fired before ffmpeg finishes

I run ffmpeg command via a nodejs script:
var exec = require('child_process').exec;
exec('ffmpeg -with -some -options somefile.mp4',function(error, stdout, stderr){
// some processing
});
I've noticed that sometimes, the callback is fired even before ffmpeg finishes. How can I make sure to do the callback processing only after ffmpeg finishes? and what may have caused it to fire the callback without finishing?
Instead of exec use process.spawn. You can then have callbacks for different events.
this.ffmpeg = process.spawn("ffmpeg",
"-f dshow -some -other -options somefile.mp4".split(" "),
{ shell: true })
this.ffmpeg.on('error', (error) => {
console.log("ffmpeg error: " + error)
})
this.ffmpeg.stderr.on('data', (data) => {
console.error(`stderr: ${data}`)
})
this.ffmpeg.on('exit', (code, signal) => {
console.log("ffmpeg shutdown: code " + code + ", signal " + signal)
})
This is my approach
downloadHLS = function (uri, opath) {
var self = this;
// defaults
var loglevel = self.logger.isDebug() ? 'debug' : 'warning';
return new Promise((resolve, reject) => {
const args = [
'-y',
'-loglevel', loglevel,
'-v', 'quiet',
'-i', uri,
opath
];
const opts = {
cwd: self._options.tempDir
};
cp.spawn('ffmpeg', args, opts)
.stderr.on('data', (data) => {
self.logger.warn(`stderr: ${data}`);
})
.on('message', msg => self.logger.info(msg))
.on('error', reject)
.on('exit', (code, signal) => {
self.logger.info("downloadHLS [exit] code:%s signal:%s", code, signal);
})
.on('close', () => {
self.logger.info("downloadHLS [close]");
return resolve();
})
});
}//downloadHLS
The event called when ffpmeg ends the processing is close. The event exit is not being called in this modality actually.

nodejs child process executing multiple times

I'm using node child process to execute a python process get a url is reached. The thing is i'm getting the process running multiple times even though the url is being reached just one time.
Here is the code:
server.get('/', function(req, res, next) {
console.log('spawning process');
var child = exec('python reporter.py', function(error, stdout, stderr) {
if (error || stderr) return console.log(error, stderr);
var data = JSON.parse(stdout);
console.log('Process ready');
});
});
It's possible that whatever you're using to send the GET response, is retrying that request when it's not getting a response. So put in a response:
e.g.
server.get('/', function(req, res, next) {
console.log('spawning process');
var child = exec('python reporter.py', function(error, stdout, stderr) {
if (error || stderr) return console.log(error, stderr);
var data = JSON.parse(stdout);
console.log('Process ready');
res.status(200).send()
});
});
For anyone who is facing the same issue in the latest version of node:
const { spawn } = require('child_process');
const ls = spawn('ls', ['-lh', '/usr']);
ls.stderr.on('data', (data) => {
console.error(`stderr: ${data}`);
});
ls.on('close', (code) => {
console.log(`child process exited with code ${code}`);
});
as per nodejs.org
// Capture your data in "data" and try putting your response in the "close".
Close gets executed when all the data processing is completed.

Resources