I am making an application that handles java commands and want to send java command to my child spawn process how can i achieve this
const {
spawn
} = require('child_process');
// Start child process
var child = spawn('java', ['-server', '-Xms1G', `-Xmx${document.getElementById('ram').value}G`, `-jar`, `${jarname}`], {
cwd: `${jarfolder}`
});
child.stdout.on('data', (data) => {
$("ol").append(`<li>${data}</li><br>`);
});
child.stderr.on('data', (data) => {
$("ol").append(`<li>${data}</li><br>`);
});
child.on('error', (error) => console.log(`error: ${error.message}`));
child.on('exit', (code, signal) => {
if (code) $("ol").append(`<li>Process exit with code: ${code}</li><br>`);
if (signal) $("ol").append(`<li>Process killed with signal: ${signal}</li><br>`);
});
demo code
example i have a button that calls a function and
function test(){
var command=document.getElementsById("command").innerHTML;
// send the command to terminatal
I'm using the Bluebird promise library under Node.js, it's great! But I have a question:
If you take a look at the documentation of Node's child_process.exec and child_process.execFile you can see that both of these functions are returning a ChildProcess object.
So what's the recommended way to promisify such functions?
Note that the following works (I get a Promise object):
var Promise = require('bluebird');
var execAsync = Promise.promisify(require('child_process').exec);
var execFileAsync = Promise.promisify(require('child_process').execFile);
But how can one get access to the original return value of the original Node.js functions? (In these cases I would need to be able to access the originally returned ChildProcess objects.)
Any suggestion would be appreciated!
EDIT:
Here is an example code which is using the return value of the child_process.exec function:
var exec = require('child_process').exec;
var child = exec('node ./commands/server.js');
child.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function(data) {
console.log('stderr: ' + data);
});
child.on('close', function(code) {
console.log('closing code: ' + code);
});
But if I would use the promisified version of the exec function ( execAsync from above ) then the return value will be a promise, not a ChildProcess object. This is the real problem I am talking about.
I would recommend using standard JS promises built into the language over an additional library dependency like Bluebird.
If you're using Node 10+, the Node.js docs recommend using util.promisify which returns a Promise<{ stdout, stderr }> object. See an example below:
const util = require('util');
const exec = util.promisify(require('child_process').exec);
async function lsExample() {
try {
const { stdout, stderr } = await exec('ls');
console.log('stdout:', stdout);
console.log('stderr:', stderr);
} catch (e) {
console.error(e); // should contain code (exit code) and signal (that caused the termination).
}
}
lsExample()
Handle errors first from stderr.
It sounds like you'd like to return two things from the call:
the ChildProcess
a promise that resolves when the ChildProcess completes
So "the recommended way to promisify such functions"? Don't.
You're outside the convention. Promise returning functions are expected to return a promise, and that's it. You could return an object with two members (the ChildProcess & the promise), but that'll just confuse people.
I'd suggest calling the unpromisified function, and creating a promise based off the returned childProcess. (Maybe wrap that into a helper function)
This way, it's quite explicit for the next person who reads the code.
Something like:
var Promise = require('bluebird');
var exec = require('child_process').execFile;
function promiseFromChildProcess(child) {
return new Promise(function (resolve, reject) {
child.addListener("error", reject);
child.addListener("exit", resolve);
});
}
var child = exec('ls');
promiseFromChildProcess(child).then(function (result) {
console.log('promise complete: ' + result);
}, function (err) {
console.log('promise rejected: ' + err);
});
child.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
child.on('close', function (code) {
console.log('closing code: ' + code);
});
If you're just wanting to promisify specifically child_process.exec() and child_process.execFile(), in recent node versions there is a better answer here.
Since Node v12 the built-in util.promisify allows access to the ChildProcess object in the returned Promise for built-in functions where it would have been returned by the un-promisified call. From the docs:
The returned ChildProcess instance is attached to the Promise as a child property.
This correctly and simply satisfies the need to access ChildProcess in the original question and makes other answers out of date providing that Node v12+ can be used.
Adapting the example (and concise style) provided by the questioner, access to the ChildProcess can be achieved like:
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const promise = exec('node ./commands/server.js');
const child = promise.child;
child.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function(data) {
console.log('stderr: ' + data);
});
child.on('close', function(code) {
console.log('closing code: ' + code);
});
// i.e. can then await for promisified exec call to complete
const { stdout, stderr } = await promise;
Here's another way:
function execPromise(command) {
return new Promise(function(resolve, reject) {
exec(command, (error, stdout, stderr) => {
if (error) {
reject(error);
return;
}
resolve(stdout.trim());
});
});
}
Use the function:
execPromise(command).then(function(result) {
console.log(result);
}).catch(function(e) {
console.error(e.message);
});
Or with async/await:
try {
var result = await execPromise(command);
} catch (e) {
console.error(e.message);
}
There's probably not a way to do nicely that covers all use cases. But for limited cases, you can do something like this:
/**
* Promisified child_process.exec
*
* #param cmd
* #param opts See child_process.exec node docs
* #param {stream.Writable} opts.stdout If defined, child process stdout will be piped to it.
* #param {stream.Writable} opts.stderr If defined, child process stderr will be piped to it.
*
* #returns {Promise<{ stdout: string, stderr: stderr }>}
*/
function execp(cmd, opts) {
opts || (opts = {});
return new Promise((resolve, reject) => {
const child = exec(cmd, opts,
(err, stdout, stderr) => err ? reject(err) : resolve({
stdout: stdout,
stderr: stderr
}));
if (opts.stdout) {
child.stdout.pipe(opts.stdout);
}
if (opts.stderr) {
child.stderr.pipe(opts.stderr);
}
});
}
This accepts opts.stdout and opts.stderr arguments, so that stdio can be captured from the child process.
For example:
execp('ls ./', {
stdout: new stream.Writable({
write: (chunk, enc, next) => {
console.log(chunk.toString(enc));
next();
}
}),
stderr: new stream.Writable({
write: (chunk, enc, next) => {
console.error(chunk.toString(enc));
next();
}
})
}).then(() => console.log('done!'));
Or simply:
execp('ls ./', {
stdout: process.stdout,
stderr: process.stderr
}).then(() => console.log('done!'));
Just want to mention that there's a nice tool that will solve your problem completely:
https://www.npmjs.com/package/core-worker
This package makes it a lot easier to handle processes.
import { process } from "CoreWorker";
import fs from "fs";
const result = await process("node Server.js", "Server is ready.").ready(1000);
const result = await process("cp path/to/file /newLocation/newFile").death();
or combine these functions:
import { process } from "core-worker";
const simpleChat = process("node chat.js", "Chat ready");
setTimeout(() => simpleChat.kill(), 360000); // wait an hour and close the chat
simpleChat.ready(500)
.then(console.log.bind(console, "You are now able to send messages."))
.then(::simpleChat.death)
.then(console.log.bind(console, "Chat closed"))
.catch(() => /* handle err */);
Here are my two cents. Uses spawn which streams the output and writes to stdout and stderr. The error and standard output is captured in buffers and are returned or rejected.
This is written I Typescript, feel free to remove typings if using JavaScript:
import { spawn, SpawnOptionsWithoutStdio } from 'child_process'
const spawnAsync = async (
command: string,
options?: SpawnOptionsWithoutStdio
) =>
new Promise<Buffer>((resolve, reject) => {
const [spawnCommand, ...args] = command.split(/\s+/);
const spawnProcess = spawn(spawnCommand, args, options);
const chunks: Buffer[] = [];
const errorChunks: Buffer[] = [];
spawnProcess.stdout.on("data", (data) => {
process.stdout.write(data.toString());
chunks.push(data);
});
spawnProcess.stderr.on("data", (data) => {
process.stderr.write(data.toString());
errorChunks.push(data);
});
spawnProcess.on("error", (error) => {
reject(error);
});
spawnProcess.on("close", (code) => {
if (code === 1) {
reject(Buffer.concat(errorChunks).toString());
return;
}
resolve(Buffer.concat(chunks));
});
});
Just another example you might run into issues when running multiple commands when destructuring with the same const's you can rename them like this.
const util = require('util');
const exec = util.promisify(require('child_process').exec);
async function runCommands() {
try {
const { stdout, stderr } = await exec('ls');
console.log('stdout:', stdout);
console.log('stderr:', stderr);
const { stdout: stdoutTwo, stderr: stderrTwo } = await exec('ls');
console.log('stdoutTwo:', stdoutTwo);
console.log('stderrTwo:', stderrTwo);
const { stdout: stdoutThree, stderr: stderrThree } = await exec('ls');
console.log('stdoutThree:', stdoutThree);
console.log('stderrThree:', stderrThree);
} catch (e) {
console.error(e); // should contain code (exit code) and signal (that caused the termination).
}
}
runCommands()
Here's mine. It doesn't deal with stdin or stdout, so if you need those then use one of the other answers on this page. :)
// promisify `child_process`
// This is a very nice trick :-)
this.promiseFromChildProcess = function (child) {
return new Promise((resolve, reject) => {
child.addListener('error', (code, signal) => {
console.log('ChildProcess error', code, signal);
reject(code);
});
child.addListener('exit', (code, signal) => {
if (code === 0) {
resolve(code);
} else {
console.log('ChildProcess error', code, signal);
reject(code);
}
});
});
};
I'm trying to run ripgrep from my Node app and am seeing a strange behavior with child_process.spawn: none of the events fire and the app never finishes (is stuck somewhere inside the spawn call):
import { spawn } from 'child_process';
async function run() {
await spawnWrapper('rg', ['-F', '"demo"'], { cwd: __dirname });
}
export function spawnWrapper(command, args, options) {
return new Promise((resolve, reject) => {
let stdout = '';
let stderr = '';
const child = spawn(command, args, options);
console.log('spawn wrapper');
child.on('close', (code, signal) => {
console.log('close');
resolve({ code, signal, stdout, stderr });
});
child.on('error', (error) => {
console.log('error');
(error as any).stderr = stderr;
reject(error);
});
child.on('exit', (code, signal) => {
console.log('exit');
resolve({ code, signal, stdout, stderr });
});
child.stdout.setEncoding('utf8');
child.stderr.setEncoding('utf8');
child.stdout.on('data', (data) => {
console.log('stdout data');
stdout += data;
});
child.stderr.on('data', (data) => {
console.log('stderr data');
stderr += data;
});
});
}
I only get "spawn wrapper" in the console, no other events. I've never seen this behavior with other binaries, maybe it's something with ripgrep but still, shouldn't I be getting at least some hints by Node? Any suggestions on how to debug this?
It was caused by ripgrep waiting for input which was not obvious to me (on command line, it just executes straight away). Details here: https://github.com/BurntSushi/ripgrep/issues/410
I'm using node child process to execute a python process get a url is reached. The thing is i'm getting the process running multiple times even though the url is being reached just one time.
Here is the code:
server.get('/', function(req, res, next) {
console.log('spawning process');
var child = exec('python reporter.py', function(error, stdout, stderr) {
if (error || stderr) return console.log(error, stderr);
var data = JSON.parse(stdout);
console.log('Process ready');
});
});
It's possible that whatever you're using to send the GET response, is retrying that request when it's not getting a response. So put in a response:
e.g.
server.get('/', function(req, res, next) {
console.log('spawning process');
var child = exec('python reporter.py', function(error, stdout, stderr) {
if (error || stderr) return console.log(error, stderr);
var data = JSON.parse(stdout);
console.log('Process ready');
res.status(200).send()
});
});
For anyone who is facing the same issue in the latest version of node:
const { spawn } = require('child_process');
const ls = spawn('ls', ['-lh', '/usr']);
ls.stderr.on('data', (data) => {
console.error(`stderr: ${data}`);
});
ls.on('close', (code) => {
console.log(`child process exited with code ${code}`);
});
as per nodejs.org
// Capture your data in "data" and try putting your response in the "close".
Close gets executed when all the data processing is completed.
I have the below code
// Parent.js
var cp = require('child_process');
var child = cp.fork('./pChild.js');
child.on('message', function(m) {
// Receive results from child process
console.log('received: ' + m);
});
// Send child process some work
child.send('First Fun');
// pChild.js
process.on('message', function(m) {
console.log("Helloooooooooo from pChild.js")
// Pass results back to parent process
process.send("Fun1 complete");
});
How to handle error in parent thrown from pChild.js and kill the process?
Unhandled errors in the child process will cause it to exit, which will emit the 'exit' event on the child object.
child.on('exit', function (code, signal) {
console.log('Child exited:', code, signal);
});
If the error is handled within the child, it can be sent as another message:
// in pChild.js
/* ... */.on('error', function (error) {
process.send({ error: error.message || error });
});
Updated answer
On child
process.on('uncaughtException', (err) => {
process.send({isError: true});
});
On master
master.on('message',({isError, data})=>{
if(isError) {
master.kill('SIGINT');
return;
}
console.log('message from child', data);
master.kill('SIGINT');
});