I want to capture the data written to stderr of child process in parent.
const cluster = require('cluster');
if (cluster.isMaster) {
const child = cluster.fork();
cluster.workers[child.id].process.stderr.on('data',(data)=>{console.log(data});
}
else {
console.error("Child data");
}
I tried to add a callback to the child process stderr in master. But stderr is null in the parent.
How can I capture "Child data"(stderr of child process) in parent?
The requirement is to use cluster node module and capture console.error. I can not use process.send to send data to parent.
I prefer to do this with a Promise so I promisify the exec command like this:
const util = require("util");
const exec = util.promisify(require("child_process").exec);
let runCommand = async (cmd) => {
const result = await exec(cmd)
if (result.stdout) console.log("StdOut:",result.stdout)
if (result.stderr) console.log("StdErr:",result.stderr)
return result
}
runCommand("some command")
.then( result => console.log("Finished successfully),
reason => console.log("Failed with",reason.code"))
If an error is returned (non-zero) then the promise will not resolve and you can handle the rejection also.
Related
I would like to start a node-script called myScript.js, that starts a child process npm start, store the stdout of npm start into a global variable let myVar, and make sure that if the main program myScript.js is exited for any reason, the child process is killed as well. Nothing from the child's stdout should appear in the terminal window after ctr-c or similar.
My current solution does not kill on close:
const childProcess = require('child_process');
let myVar = ''
const child = childProcess.spawn('npm', ['start'], {
detached: false
});
process.on('exit', function () {
child.stdin.pause();
child.kill();
});
child.stdout.on('data', (data) => {
myVar = `${data}`
});
Can this be accomplished?
Small change, but I think that might look something like this:
const childProcess = require('child_process')
const child = childProcess.spawn('npm', ['start'], {shell:true});
var myVar = ''; child.stdout.setEncoding('utf8');
child.stdout.on('data', function(data) {
myVar = data.toString();
});
child.on('close', function(exitcode) {
// on the close of the child process, use standard output or maybe call a function
});
process.on('exit', function() {
// I don't think pausing std.in is strictly necessary
child.kill()
})
Further reading
node's child_process documentation, including event names
geeksforgeeks article on child_process.spawn
This StackOverflow thread on getting and using the output of a child_process' pipe
This StackOverflow thread on the difference between the close and exit events
I currently pipe the stdout of a child process to an (Express) response object like this:
try{
let proc = spawn(exe, args);
proc.stdout.pipe(response);
let exitCode = await ending.completion(proc);
}
finally {
// my own clean up goes here (e.g. releasing locks)
}
where ending.completion is defined as
async function(process){
return new Promise(function(resolve,reject){
let done = false;
process.on('exit', (code, signal)=>{
if(!done){
done = true;
if(code !== null){
resolve(code);
} else {
reject(signal);
}
}
});
process.on('error', (err)=>{
if(!done){
done = true;
reject(err);
}
});
});
};
This generally works fine except for the fact that proc internally does a lot of waiting (on the download/transfer) before it can complete/return. I would prefer allowing it to dump its output into a buffer, which then trickles into the response object as the download proceeds. How can I introduce such a buffer?
(A second (and I think related) problem is that if the download is aborted client-side, proc never completes but keeps waiting on its stdout.)
Looks like exec method does exactly what you need.
Spawns a shell then executes the command within that shell, buffering
any generated output.
Use it instead of spawn. Note that unlike spawn, exec expects the whole command (with all the args) as a single string. Here is example from an officials documentation:
const util = require('util');
const exec = util.promisify(require('child_process').exec;
async function lsExample() {
const { stdout, stderr } = await exec('ls');
console.log('stdout:', stdout);
console.error('stderr:', stderr);
}
lsExample();
Began to study worker_threads in nodejs
After completion of work, the last worker must finish the script
process.exit();
my index.js
const { Worker } = require("worker_threads");
const logUpdate = require("log-update");
const threads = 100;
const someData = 'some data';
let names = [...Array(threads)].fill(0);
for (let i = 0; i < threads; i++) {
const port = new Worker(require.resolve("./worker.js"), {
workerData: { someData, i }
});
port.on("message", (data) => handleMessage(data, i));
port.on("error", (e) => console.log(e));
port.on("exit", (code) => console.log(`Exit code: ${code}`));
}
function handleMessage(_, index) {
names[index]++;
logUpdate(names.map((status, i) => `Thread ${i}: ${status} ${_}`).join("\n"));
}
worker.js
const { parentPort, workerData } = require("worker_threads" )
const { someData, i } = workerData;
(async () => {
parentPort.postMessage( `worker start ${i} some data ${someData}`);
process.exit();
})();
Now workers are created and work out, but after they are completed, the script does not complete its work
From whatever knowledge I have, I understand that if there is no error caused in the worker until its exit, then the return code is 0, otherwise 1 (automatically). Look at process exit codes.
So when you do process.exit() at that time you can pass the code as an argument. (or not have process.exit() at all).
Generally what I have seen, and do is:
Wrap the worker creation in a Promise returning function.
NOTE:
worker.terminate() : terminates the worker thread.
(Personal choice) I choose to do parentPort.postMessage(JSON.stringify(true)) if I complete my task
in my worker thread. Hence worker.on('message', message => {if (message === true) {worker.terminate();resolve(true);} else {console.log(message)}}); in the main thread.
Whenever I have to create threads for repetitive task-delegations. I use .map function on the array. the function inside returns the same return value as that from my worker creator (Promised returning) function. Hence eventually I map the array into array of promises.
Then I put this array of promises into Promise.all().
Then I check for returned values of Promise.all()
Checkout:
My Gist (as an example for Promise.all()).
Medium (Just for worker_thread basics. An Awesome article)
I would like to know if it is possible to wait for all child process created using the spawn function to finish before continuing execution.
I have a code looking like this:
const spawn = window.require('child_process').spawn;
let processes = [];
let thing = [];
// paths.length = 2
paths.forEach((path) => {
const pythonProcess = spawn("public/savefile.py", ['-d', '-j', '-p', path, tempfile]);
pythonProcess.on('exit', () => {
fs.readFile(tempfile, 'utf8', (err, data) => {
thing.push(...)
});
});
processes.push(pythonProcess);
});
console.log(processes) // Here we have 2 child processes
console.log(thing) // empty array.. the python processes didnt finish yet
return thing // of course it doesn't work. I want to wait for all the processes to have finished their callbacks to continue
As you can guess, I would like to know how I could get all the python scripts running at the same time, and wait for all of them to finish to continue my js code.
I'm running node 10.15.3
Thank you
ForEach to push Promise into an array of Promise and Promise.all()
Have you tried spawnSync ?
Is generally identical to spawn with the exception that the function
will not return until the child process has fully closed.
import { spawnSync } from "child_process";
spawnSync('ls', ['-la']);
I'm building a discord bot that wraps a terraria server in node.js so server users can restart the server and similar actions. I've managed to finish half the job, but I can't seem to create a command to execute commands on the terraria server. I've set it to write the command to the stdin of the child process and some basic debugging verifies that it does, but nothing apparently happens.
In the Node.js docs for child process stdin, it says "Note that if a child process waits to read all of its input, the child will not continue until this stream has been closed via end()." This seems likely to be the problem, as calling the end() function on it does actually send the command as expected. That said, it seems hard to believe that I'm unable to continuously send commands to stdin without having to close it.
Is this actually the problem, and if so what are my options for solving it? My code may be found below.
const discordjs = require("discord.js");
const child_process = require("child_process");
const tokens = require("./tokens");
const client = new discordjs.Client();
const terrariaServerPath = "C:\\Program Files (x86)\\Steam\\steamapps\\common\\Terraria\\TerrariaServer.exe"
const terrariaArgs = ['-port', '7777', "-maxplayers", "8", "-world", "test.wld"]
var child = child_process.spawn(terrariaServerPath, terrariaArgs);
client.on('ready', () => {
console.log(`Logged in as ${client.user.tag}!`);
});
client.on('disconnect', () => {
client.destroy();
});
client.on('message', msg => {
if (msg.channel.name === 'terraria') {
var msgSplit = msg.content.split(" ");
if (msgSplit[0] === "!restart") {
child.kill();
child = child_process.spawn(terrariaServerPath, terrariaArgs);
registerStdio();
msg.reply("restarting server")
}
if (msgSplit[0] === "!exec") {
msg.reply(msgSplit[1]);
child.stdin.write(msgSplit[1] + "\n");
child.stdin.end();
}
}
});
client.login(tokens.discord_token);
var registerStdio = function () {
child.stdout.on('data', (data) => {
console.log(`${data}`);
});
child.stderr.on('data', (data) => {
console.error(`${data}`);
});
}
registerStdio();
I was able to solve the problem by using the library node-pty. As near as I can tell, the problem was that the child process was not reading the stdin itself and I was unable to flush it. Node-pty creates a virtual terminal object which can be written to instead of stdin. This object does not buffer writes and so any input is immediately sent to the program.