I'm using node child process to execute a python process get a url is reached. The thing is i'm getting the process running multiple times even though the url is being reached just one time.
Here is the code:
server.get('/', function(req, res, next) {
console.log('spawning process');
var child = exec('python reporter.py', function(error, stdout, stderr) {
if (error || stderr) return console.log(error, stderr);
var data = JSON.parse(stdout);
console.log('Process ready');
});
});
It's possible that whatever you're using to send the GET response, is retrying that request when it's not getting a response. So put in a response:
e.g.
server.get('/', function(req, res, next) {
console.log('spawning process');
var child = exec('python reporter.py', function(error, stdout, stderr) {
if (error || stderr) return console.log(error, stderr);
var data = JSON.parse(stdout);
console.log('Process ready');
res.status(200).send()
});
});
For anyone who is facing the same issue in the latest version of node:
const { spawn } = require('child_process');
const ls = spawn('ls', ['-lh', '/usr']);
ls.stderr.on('data', (data) => {
console.error(`stderr: ${data}`);
});
ls.on('close', (code) => {
console.log(`child process exited with code ${code}`);
});
as per nodejs.org
// Capture your data in "data" and try putting your response in the "close".
Close gets executed when all the data processing is completed.
Related
I was using Express with GET to run the following command
app.get('/get_temp_hum', function(req, res) {
var process = spawn('python',["ECIES_decotp.py"]);
process.stdout.on('data', function(data) {
console.log(data.toString());
} )
process.stderr.on('data', function(data) {
console.log(data.toString());
} )
process.stderr.on('exit', function(code) {
console.log('Exited with code' + code);
} )
fs.readFile('c.json', 'utf8', function (err, data) {
if (err) throw err;
//console.log(data);
temp_hum_json = data;
})
res.send(temp_hum_json);
});
And I call the URL of that GET so I can have it show the value of res.send(temp_hum_json)
But for some reason, the first time I call it, it shows me a blank page instead, and it will only show the result after I call the result again after that. The same thing happens too when I call it with Postman. It happens everytime I boot up my server. Which is problematic because I need the value to be show with consistency so I can put the URL on server. It may have a thing to do with some part being asynchronous command, how can I fix this?
Spawning a process and also readFile in NodeJS executed asynchronously that's why.
If you want to get result with consistency you need to use callbacks. When you span a process you can get data from that process in realtime with
process.stdout.on("data", data => {
//do whatever you want with data here
})
If the python process gives you a json file and you need to read that file you should read it in exit code 0 that Python process sends after finishing the execution
process.stderr.on('exit', function(code) {
if(code ===0){
fs.readFile('c.json', 'utf8', function (err, data) {
if (err) throw err;
res.send(data);
})
}
})
The final code must be like this:
app.get('/get_temp_hum', function(req, res) {
var process = spawn('python',["ECIES_decotp.py"]);
process.stdout.on('data', function(data) {
// do whatever you want here with stream data from python process
console.log(data.toString());
} )
process.stderr.on('data', function(data) {
console.log(data.toString());
} )
process.stderr.on('exit', function(code) {
if(code === 0){
fs.readFile('c.json', 'utf8', function (err, data) {
if (err) throw err;
res.send(data);
})
}
} )
});
I would like to control a few web sites using a UI to start and stop them, changing the ports that the different web server's listen to.
PM2 is a command line utility to manage node sites, but does not have a user interface that I can supply my customer.
How can I run a node.js web site from within another node.js web application.
The following node.js code does not seem to do the work.
const { exec } = require('child_process');
....
exec('node app.js', (err, stdout, stderr) => {
if (err) {
console.log(`err: ${err}`);
return;
}
// the *entire* stdout and stderr (buffered)
console.log(`stdout: ${stdout}`);
console.log(`stderr: ${stderr}`);
});
note: regular linux commands instead of the 'node app.js' command work as expected.
Got the following code to work in case you want to run the same:
This is the code on the server that will spawn a new web server.
app.get('/start', ( req, res ) => {
var node = spawn('node', ['app.js &'], { shell: true });
node.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
node.stderr.on('data', function (data) {
console.log('stderr: ' + data.toString());
});
node.on('exit', function (code) {
console.log('child process exited with code ' +
code.toString());
});
// Notify client
res.status(200).send( {} );
});
The simplest alternative would be to keep a collection of ExpressJS instances and create/destroy them as needed within NodeJS:
const express = require("express");
var app1 = express();
app1.use("/", function(req, res){ res.send("APP 1"); });
var app2 = express();
app2.use("/", function(req, res){ res.send("APP 2"); });
// start them
var server1 = app.listen(9001);
var server2 = app.listen(9002);
// close the first one
server1.close();
// port 9001 is closed
// port 9002 is still listening
However, if you need to spawn independent processess, you could have:
const { spawn } = require("child_process");
// Prevent Ctrl+C from killing the parent process before the children exit
process.on("SIGINT", () => {
console.log("Terminating the process...");
if (runningProcesses > 0) {
setTimeout(() => process.exit(), 3000);
} else {
process.exit();
}
});
function asyncSpawn(command, parameters = []) {
return new Promise((resolve, reject) => {
runningProcesses++;
console.log(command, ...parameters, "\n");
spawn(command, parameters, {
stdio: "inherit"
})
.on("close", code => {
if (code)
reject(new Error(command + " process exited with code " + code));
else resolve();
})
.on("exit", error => {
runningProcesses--;
if (error) reject(error);
else resolve();
});
});
}
And launch new processes like this:
asyncSpawn("node", ["server.js", "--param1"])
.then(() => console.log("DONE"))
.catch(err => console.error(err));
I'm trying to run ripgrep from my Node app and am seeing a strange behavior with child_process.spawn: none of the events fire and the app never finishes (is stuck somewhere inside the spawn call):
import { spawn } from 'child_process';
async function run() {
await spawnWrapper('rg', ['-F', '"demo"'], { cwd: __dirname });
}
export function spawnWrapper(command, args, options) {
return new Promise((resolve, reject) => {
let stdout = '';
let stderr = '';
const child = spawn(command, args, options);
console.log('spawn wrapper');
child.on('close', (code, signal) => {
console.log('close');
resolve({ code, signal, stdout, stderr });
});
child.on('error', (error) => {
console.log('error');
(error as any).stderr = stderr;
reject(error);
});
child.on('exit', (code, signal) => {
console.log('exit');
resolve({ code, signal, stdout, stderr });
});
child.stdout.setEncoding('utf8');
child.stderr.setEncoding('utf8');
child.stdout.on('data', (data) => {
console.log('stdout data');
stdout += data;
});
child.stderr.on('data', (data) => {
console.log('stderr data');
stderr += data;
});
});
}
I only get "spawn wrapper" in the console, no other events. I've never seen this behavior with other binaries, maybe it's something with ripgrep but still, shouldn't I be getting at least some hints by Node? Any suggestions on how to debug this?
It was caused by ripgrep waiting for input which was not obvious to me (on command line, it just executes straight away). Details here: https://github.com/BurntSushi/ripgrep/issues/410
I am trying to set a timeout in between readstream.write but I keep getting Error: write after end. How can I set a timeout in between res.write. Thank you. Here is my code.
app.get('/video', function (req, res) {
var readStream = fs.createReadStream('video.mjpeg');
res.writeHead(200, {'Content-Type': 'multipart/x-mixed- replace;boundary=ThisString'});
readStream.on("data", (chunk) => {
res.write(chunk, 'binary');
setTimeout(function () {
res.write('ThisString');
}, 200);
console.log("Writing to client");
});
readStream.on("end", () => {
readStream.destroy();
res.end();
console.log("End of data");
});
});
I am trying to display the video on the client side, the problem is that the mjpeg gets read quickly thus cannot create the video. Any help will be appreciated.
I wrote quickly some line of code of what I understand you need.
StackOverflow is not my favorite IDE so don't be too hard on it ^^
I don't really know the readStream but data event is called many time write ?
The queue is waiting the precedent chunk and its timeout are correctly finished.
Is it what you need ?
var kue = require('kue'),
jobs = kue.createQueue();
app.get('/video', function (req, res) {
var readStream = fs.createReadStream('video.mjpeg');
res.writeHead(200, {'Content-Type': 'multipart/x-mixed- replace;boundary=ThisString'});
jobs.process('write chunck', function (job, done) {
setTimeout(function() {
res.write(job.chunck, 'binary');
done && done();
}, 200);
});
jobs.process('write end', function(job, done) {
readStream.destroy();
res.end();
console.log("End of data");
done & done();
});
readStream.on("data", (chunk) => {
jobs.create('write chunck', {
chunk: chunk
});
});
readStream.on("end", () => {
jobs.create('write end', {});
});
});
am trying to run a test.bat file inside node.js
here is the code
var exec = require('child_process').execFile;
case '/start':
req.on('data', function (chunk) {});
req.on('end', function () {
console.log("INSIDE--------------------------------:");
exec('./uli.bat', function (err, data) {
console.log(err);
console.log(data);
res.end(data);
});
});
break;
while running this node.js file am getting
INSIDE--------------------------------:
{ [Error: Command failed: '.' is not recognized as an internal or ext
nd,
operable program or batch file.
] killed: false, code: 1, signal: null }
I have found the solution for it.. and its works fine for me. This opens up a new command window and runs my main node JS in child process. You need not give full path of cmd.exe.
I was making that mistake.
var spawn = require('child_process').spawn,
ls = spawn('cmd.exe', ['/c', 'my.bat']);
ls.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
ls.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
ls.on('exit', function (code) {
console.log('child process exited with code ' + code);
});
The easiest way I know for execute that is following code :
require('child_process').exec("path/to/your/file.bat", function (err, stdout, stderr) {
if (err) {
// Ooops.
// console.log(stderr);
return console.log(err);
}
// Done.
console.log(stdout);
});
You could replace "path/to/your/file.bat" by __dirname + "/file.bat" if your file is in the directory of your current script for example.
In Windows, I don't prefer spawn as it creates a new cmd.exe and we have to pass the .bat or .cmd file as an argument. exec is a better option. Example below:
Please note that in Windows you need to pass the path with double backslashes. E.g. C:\\path\\batfilename.bat
const { exec } = require('child_process');
exec("path", (err, stdout, stderr) => {
if (err) {
console.error(err);
return;
}
console.log(stdout);
});
An easier way I know for executing that is the following code :
function Process() {
const process = require('child_process');
var ls = process.spawn('script.bat');
ls.stdout.on('data', function (data) {
console.log(data);
});
ls.stderr.on('data', function (data) {
console.log(data);
});
ls.on('close', function (code) {
if (code == 0)
console.log('Stop');
else
console.log('Start');
});
};
Process();