I'm executing mongodump command from nodejs child_process.
I have tried both exec and spawn, but the progress printed by mongodump is hitting the data event of stderr instead of stdout
var exec = require('child_process').exec,
ls = exec('mongodump --gzip --archive="/home/test-machine/test.archive" --db myDB');
ls.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
ls.stderr.on('data', function (data) {
console.log('stderr: ' + data.toString());
});
ls.on('exit', function (code) {
console.log('child process exited with code ' + code.toString());
});
The mongodump is not crashing, or throwing any error, its executing as it should. But somehow nodejs is treating the output as stderr
mongodump writes log messages to stderr, which isn't very uncommon, although it makes parsing the output for errors a bit harder because you need to manually filter out the error messages.
Related
I am executing a command and listening to its stdout using spawn like below
const { spawn } = require('child_process');
const childProcess = spawn('RV', ["...jpg"]);
child.stdout.on('data', function(data) {
console.log(data.toString());
})
child.stderr.on('data', function(data) {
console.log("error", data.toString());
})
child.on('error', function(err) {
console.log("Error", err);
})
child.on('exit', function(code, signal) {
console.log("Child process exited")
});
Child process stdout is not live. If i close the child process, it prints the remaining stdout. I understand this is because the stdout is buffered till it receives a new line.
Nodejs always cann't capture child process's stdout data completely, unless child process fllush(stdout)
But if i execute the same command in terminal, it just shows all the stdout logs without the need to close the subprocess. If this is an issue with the application i am calling then it shouldn't work with terminal right?
Is there an option in nodejs's childprocess to execute the command just like executing in terminal?
I am running the following code:
var exec = require('child_process').exec;
var command = "security set-key-partition-list -S apple-tool:,apple: -s -k password /path/to/keychain/login.keychain-db";
exec(serverConfig.securityCall, function (error, stdout, stderr) {
if (error !== null) {
console.log('exec error: ' + error);
console.log('STDERR: ' + stderr);
console.log('STDOUT: ' + stdout);
}
});
I get the error: exec error: Error: stdout maxBuffer exceeded.
Is there a way to suppress the stdout? I don't need it.
I saw this post: Stdout buffer issue using node child_process
So, I changed it to a spawn
var spawn = require('child_process').spawn;
var child = spawn('security', ['set-key-partition-list', '-S apple-tool:,apple: -s -k password /path/to/keychain/login.keychain-db'], {stdio:['ignore', 'ignore', 'pipe']});
child.stderr.on('data', function (data) {
console.log('stderr: ' + data);
stderr = 'stderr: ' + data
});
child.on('close', function (code) {
console.log('child process exited with code ' + code);
if (!code) { //0 = success 1= error
console.log("SUCCESS");
} else {
console.log('STDERR: ' + stderr);
}
});
but I get this error:
stderr: password to unlock default: security: SecKeychainItemSetAccessWithPassword: The user name or passphrase you entered is not correct.
If I run this from command line it works, so i know my password is correct. (password and path to keychain have been redacted for security purposes).
How can I get this to work with spawn or exec?
The error you get is coming from your security application, not from Node. There is a tricky part of using spawn. Every single option should be a separate array element.
So this array element should be separated to multiple elements
'-S apple-tool:,apple: -s -k password /path/to/keychain/login.keychain-db'
Something like
['-S', 'apple-tool:,apple:', '-s', '-k', 'password', '/path/to/keychain/login.keychain-db']
Honestly I don't understand why it is not explained well in the documentation.
I can run a bash command in node.js like so:
var sys = require('sys')
var exec = require('child_process').exec;
function puts(error, stdout, stderr) { sys.puts(stdout) }
exec("ls -la", function(err, stdout, stderr) {
console.log(stdout);
});
How do I get the exit code of that command (ls -la in this example)? I've tried running
exec("ls -la", function(err, stdout, stderr) {
exec("echo $?", function(err, stdout, stderr) {
console.log(stdout);
});
});
This somehow always returns 0 regardless of the the exit code of the previous command though. What am I missing?
Those 2 commands are running in separate shells.
To get the code, you should be able to check err.code in your callback.
If that doesn't work, you need to add an exit event handler
e.g.
dir = exec("ls -la", function(err, stdout, stderr) {
if (err) {
// should have err.code here?
}
console.log(stdout);
});
dir.on('exit', function (code) {
// exit code is code
});
From the docs:
If a callback function is provided, it is called with the arguments (error, stdout, stderr). On success, error will be null. On error, error will be an instance of Error. The error.code property will be the exit code of the child process while error.signal will be set to the signal that terminated the process. Any exit code other than 0 is considered to be an error.
So:
exec('...', function(error, stdout, stderr) {
if (error) {
console.log(error.code);
}
});
Should work.
child_process.spawnSync()
This function exposes the nicest sync interface: https://nodejs.org/api/child_process.html#child_process_child_process_spawnsync_command_args_options
Example:
#!/usr/bin/env node
const child_process = require('child_process');
let out;
out = child_process.spawnSync('true');
console.log('status: ' + out.status);
console.log('stdout: ' + out.stdout.toString('utf8'));
console.log('stderr: ' + out.stderr.toString('utf8'));
console.log();
out = child_process.spawnSync('false');
console.log('status: ' + out.status);
console.log('stdout: ' + out.stdout.toString('utf8'));
console.log('stderr: ' + out.stderr.toString('utf8'));
console.log();
out = child_process.spawnSync('echo', ['abc']);
console.log('status: ' + out.status);
console.log('stdout: ' + out.stdout.toString('utf8'));
console.log('stderr: ' + out.stderr.toString('utf8'));
console.log();
Output:
status: 0
stdout:
stderr:
status: 1
stdout:
stderr:
status: 0
stdout: abc
stderr:
Tested in Node.js v10.15.1, Ubuntu 19.10.
If anyone is looking for an await/Promise version:
const exec = require('util').promisify(require('child_process').exec);
let out = await exec(`echo hello`).catch(e => e);
console.log(out.stdout); // outputs "hello"
console.log(out.code); // Note: `out.code` is *undefined* if successful (instead of 0).
If the command is successful, then the it'll output an object like {stderr, stdout}. If it has a non-zero exit code, then it'll output an error object with {stderr, stdout, code, killed, signal, cmd} and the usual JavaScript Error object properties like message and stack.
In node documentation i found this information for the callback function:
On success, error will be null. On error, error will be an instance of Error. The error.code property will be the exit code of the child process while error.signal will be set to the signal that terminated the process. Any exit code other than 0 is considered to be an error.
I have a problem, I need to read from a console output in real time. I have a file that I need to execute, tried doing something like this test.exe > text.txt but when I try to read while exe file is running I can't see anything until exe finishes and write all lines in the same time. I need to do this using node.js
You should be able to use child_process.spawn() to start the process and read from its stdout/stderr streams:
var spawn = require('child_process').spawn;
var proc = spawn('test.exe');
proc.stdout.on('data', function(data) {
process.stdout.write(data);
});
proc.stderr.on('data', function(data) {
process.stderr.write(data);
});
proc.on('close', function(code, signal) {
console.log('test.exe closed');
});
test.exe probably buffers it's output.
Disable buffering on redirected stdout Pipe (Win32 API, C++)
windows console program stdout is buffered when using pipe redirection
Python C program subprocess hangs at "for line in iter"
You can try to run it with spawn, or with a pseudo tty
const spawn = require('child_process').spawn;
const type = spawn('type.exe');
type.stdout.on('data', (data) => {
console.log(`stdout: ${data}`);
});
type.stderr.on('data', (data) => {
console.log(`stderr: ${data}`);
});
type.on('close', (code) => {
console.log(`child process exited with code ${code}`);
});
In node, I am running a perl script using child_process, and console.log in the callback seems to be behaving strangely.
child = exec("perl " + validationScript,
{cwd: myDir},
function(err, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
return cb(err);
});
stdout is printing correctly, but the stderr print statement is not executing. (If I comment out the stdout print, then stderr is printed correctly). So some buffering problem? How can I make them print correctly?
I am new to asynchronous programming, so sorry if this question is too basic.
have you tried it without return? The following code works for me. aa provokes an error and I have something on the stderr, if I only use ls I'll get no error but a correct stdout.
child = exec('ls aa',
function(err, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
cb(err);
});
function cb(data) {
console.log(data);
}