I am writing some code that lists some functionalities which are run by cmd, How can I enclose cmd executables in my code?
Here is a small example to use child_process.
Execute command in cmd.
const { exec } = require("child_process");
function os_func() {
this.execCommand = function(cmd, callback) {
exec(cmd, (error, stdout, stderr) => {
if (error) {
console.error(`exec error: ${error}`);
return;
}
callback(stdout);
});
}
}
app.get("/", (req, res) => {
console.log("inside get");
var os = new os_func();
os.execCommand('arp -a', function (returnvalue) {
res.end(returnvalue)
});
});
There is a global function i.e:
process.argv
Or if you want to take command-line arguments a lot better try npm package yargs.
Related
I'm, trying to write a simple nodejs script that controls if in my root directory of my web application project exists the node_modules folder.
this is the code:
const fs = require('fs');
const { series } = require('async');
const { exec } = require('child_process');
try {
if (!fs.existsSync('node_modules')) {
console.log('installing');
series([
() => exec('yarn install', (error, stdout, stderr) => {
console.log(error);
console.log(stdout);
console.log(stderr);
}),
], (err, results) => {
console.log(err);
console.log(results);
});
} else {
console.log('running');
series([
() => exec('webpack serve --config webpack.dev.js', (error, stdout, stderr) => {
console.log(error);
console.log(stdout);
console.log(stderr);
}),
], (err, results) => {
console.log(err);
console.log(results);
});
}
} catch (error) {
console.log(error);
}
Actually, this script works, but the problem is that when the command is run via exec (e.g. of webpack serve --config webpack.dev.js) the command is actually run, but I lose all the console messages that the build of webpack serve generates, for the rest the command is launched and correctly executes what it needs to do, but I have lost all console messages that command was giving me.
Is there a way to handle this issue using exec or should it be done in other ways?
a thousand thanks.
var exec = require('child process').execFile;
exec('C:\something.exe', function (err, data) {
console.log(err)
console.log(data.toString());
});
You can accomplish this by using the exec function from the built-in child_process module as shown in the NodeJS documentation.
const { exec } = require('child_process');
exec('start program.exe', (error, stdout, stderr) => {
if (error) {
throw error;
}
console.log(stdout);
});
I like to integrate exec from nodejs in a custom function to handle all the errors in this one function.
const exec = require('child_process').exec;
function os_func() {
this.execCommand = function(cmd) {
var ret;
exec(cmd, (error, stdout, stderr) => {
if (error) {
console.error(`exec error: ${error}`);
return;
}
ret = stdout;
});
return ret;
}
}
var os = new os_func();
This function returns undefined because exec isn't finished when the value returns.
How can i solve that? Can i force the function to wait for exec?
you can use promise as :
const exec = require('child_process').exec;
function os_func() {
this.execCommand = function (cmd) {
return new Promise((resolve, reject)=> {
exec(cmd, (error, stdout, stderr) => {
if (error) {
reject(error);
return;
}
resolve(stdout)
});
})
}
}
var os = new os_func();
os.execCommand('pwd').then(res=> {
console.log("os >>>", res);
}).catch(err=> {
console.log("os >>>", err);
})
Since the command is executed asynchronously you will want to use a callback to handle the return value once the command has finished executing:
const exec = require('child_process').exec;
function os_func() {
this.execCommand = function(cmd, callback) {
exec(cmd, (error, stdout, stderr) => {
if (error) {
console.error(`exec error: ${error}`);
return;
}
callback(stdout);
});
}
}
var os = new os_func();
os.execCommand('SomeCommand', function (returnvalue) {
// Here you can get the return value
});
Yet another solution using ES6 modules:
import fs from "node:fs";
import {exec} from "node:child_process";
import util from "node:util";
// promisify exec
const execPromise = util.promisify(exec);
try {
// wait for exec to complete
const {stdout, stderr} = await execPromise("ls -l");
} catch (error) {
console.log(error);
}
exec will deal with it in an async fashion, so you should receive a callback or return a promise.
One thing you could do in order to make it sync is to use execSync instead:
https://nodejs.org/api/child_process.html#child_process_child_process_execsync_command_options
The child_process.execSync() method is generally identical to
child_process.exec() with the exception that the method will not
return until the child process has fully closed. When a timeout has
been encountered and killSignal is sent, the method won't return until
the process has completely exited. Note that if the child process
intercepts and handles the SIGTERM signal and doesn't exit, the parent
process will wait until the child process has exited.
Adding what worked for me, as none of the above did the trick!
const { exec } = require("child_process");
const util = require("util");
const execPromise = util.promisify(exec);
function parentFunction() {
...
// Trigger 'exec', then a-wait for it to finish
await execWrapper('<your-command-here>');
...
}
...
async function execWrapper(cmd) {
const { stdout, stderr } = await execPromise(cmd);
if (stdout) {
console.log(`stderr: ${stdout}`);
}
if (stderr) {
console.log(`stderr: ${stderr}`);
}
}
NOTE: This isn't your example, but just a generic one; for me - the cmd was a Docker build command. You could probably have execWrapper return back the stdout if needed.
You can do it with callback. Maybe you can try something like this:
function os_func() {
this.execCommand = function(cmd, myCallback) {
var ret;
exec(cmd, (error, stdout, stderr) => {
if (error) {
console.error(`exec error: ${error}`);
return;
}
ret = stdout;
myCallback(ret);
});
}
function myCallback(ret){
// TODO: your stuff with return value...
}
I have to run file test.js which is at different location than the my running application. To do that i have tried the fallowing code
var execFile = require("child_process").execFile;
exports.sync = function(req, res) {
console.log("sync called");
var child = execFile("C:/Users/rhush/Desktop/test", function(error, stdout, stderr) {
if (error) {
throw error;
}
console.log(stdout);
res.send({ status: stdout });
});
};
and my test file is here :
function testing() {
console.log('sync job running');
}
testing();
but i got the error
please correct if i am doing any mistake.
To run a js file using execFile you need to pass node command with file name, Use this one:
var execFile = require("child_process").execFile;
exports.sync = function(req, res) {
console.log("sync called");
var child = execFile("node", ["C:/Users/rhush/Desktop/test.js"], function(error, stdout, stderr) {
if (error) {
throw error;
}
res.send({ status: stdout });
});
};
am trying to run a test.bat file inside node.js
here is the code
var exec = require('child_process').execFile;
case '/start':
req.on('data', function (chunk) {});
req.on('end', function () {
console.log("INSIDE--------------------------------:");
exec('./uli.bat', function (err, data) {
console.log(err);
console.log(data);
res.end(data);
});
});
break;
while running this node.js file am getting
INSIDE--------------------------------:
{ [Error: Command failed: '.' is not recognized as an internal or ext
nd,
operable program or batch file.
] killed: false, code: 1, signal: null }
I have found the solution for it.. and its works fine for me. This opens up a new command window and runs my main node JS in child process. You need not give full path of cmd.exe.
I was making that mistake.
var spawn = require('child_process').spawn,
ls = spawn('cmd.exe', ['/c', 'my.bat']);
ls.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
ls.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
ls.on('exit', function (code) {
console.log('child process exited with code ' + code);
});
The easiest way I know for execute that is following code :
require('child_process').exec("path/to/your/file.bat", function (err, stdout, stderr) {
if (err) {
// Ooops.
// console.log(stderr);
return console.log(err);
}
// Done.
console.log(stdout);
});
You could replace "path/to/your/file.bat" by __dirname + "/file.bat" if your file is in the directory of your current script for example.
In Windows, I don't prefer spawn as it creates a new cmd.exe and we have to pass the .bat or .cmd file as an argument. exec is a better option. Example below:
Please note that in Windows you need to pass the path with double backslashes. E.g. C:\\path\\batfilename.bat
const { exec } = require('child_process');
exec("path", (err, stdout, stderr) => {
if (err) {
console.error(err);
return;
}
console.log(stdout);
});
An easier way I know for executing that is the following code :
function Process() {
const process = require('child_process');
var ls = process.spawn('script.bat');
ls.stdout.on('data', function (data) {
console.log(data);
});
ls.stderr.on('data', function (data) {
console.log(data);
});
ls.on('close', function (code) {
if (code == 0)
console.log('Stop');
else
console.log('Start');
});
};
Process();