Not able to take the MongoDB backup/dump using node js - node.js

When executing the written script, it exit with code 'one' while it should save the dump of the database
const cron = require('node-cron');
const { exit } = require('process');
const spawn = require('child_process').spawn;
let backupProcess = spawn('mongodump', [
'--host=localhost',
'--port=27017',
'--out=.',
'--db=testdb',
], {shell: true});
backupProcess.on('exit', (code, signal) => {
console.log('Function called');
if(code)
console.log('Backup process exited with code ', code);
else if (signal)
console.error('Backup process was killed with singal ', signal);
else
console.log('Successfully backedup the database')
});
while executing the same through VS code terminal, getting following error,
Function called
Backup process exited with code 1 while should save the dump at given location`your text`
Actual Result : Backup process exited with code 1
Expected result : Must dump the database

Related

node.js - detect exit of external process by PID

Provided I have the PID for an already running process, how can I detect the exit of that process in Node.js?
For example:
var pid = 1200; // any PID
getProcessByPID(pid).onExit = () => {
// Code on exit goes here
};
I know its an old post, just needed the information myself. You can do code on a process exiting by listening for the 'exit' keyword. 'code' is the exit code of the process.
const forked = fork(`${__dirname}/file.js`)
forked.on('message', (msg) => {
console.log('message from child = ', msg)
})
forked.on('exit', (code) => {
console.log('Exiting worker with code =', code)
})

Nodejs listing directories with child_process.spawn ends before receiving all data

const spawn = require('child_process').spawn;
function main() {
ls = spawn('dir', {shell: true});
ls.stdout.on('data', data => console.log('DATA RECEIVED'));
ls.on('exit', code => console.log('EXITED'));
}
main();
This runs on Windows, and the result is
DATA RECEIVED
EXITED
DATA RECEIVED
Why is EXITED printed out before the last line? Is there any way safe to check whether all the data is received or not?

Node.js: Spawning an echo process with the ">" flag

I am looking to spawn an echo process to write some text to a "file".
*The fs package is off limits because the "file" is a communication pathway for a linux driver.
Below is my code to just see if I can get an echo process working with writing to a normal file however the spawn doesn't appear to like the > flag. Any ideas?
var spawn = require('child_process').spawn;
echo = spawn('echo', ["test", ">", __dirname+"/test.txt"]);
echo.on('error', function (err) {
console.log('ls error', err);
});
echo.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
echo.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
echo.on('close', function (code) {
console.log('child process exited with code ' + code);
});
Right now I just get the following output with no written file:
stdout: test > <*PATH*>/test.txt
child process exited with code 0
I ended up just creating a bash file (echo-test.sh) with the following contents:
echo "test" > <*PATH*>/test.txt
and executed in node like such:
echo = spawn('bash', [__dirname+"/echo-test.sh"]);

Node.js Spawn: How to check if the process Exited Immediately

In my app, I need to spawn a debugger process given a dump file to debug, say "example.dmp". If the dump file is not found or not it is not a dump file. The process will be successfully spawned but it will exit immediately. I would like to throw an error message in an async function which can be try catched later.
const spawnDebuggerProcess = (debuggerConfigs) => {
let debuggerProcess = spawn(config.debuggerName, debuggerConfigs)
debuggerProcess.on('exit', (code, signal) => {
console.log('pid ' + `${debuggerProcess.pid}`)
console.log('child process exited with ' + `code ${code} and signal ${signal}`)
})
debuggerProcess.on('error', (error) => {})
if (debuggerProcess.pid !== undefined) {
return debuggerProcess
}
throw externalError.createError({
name: externalError.SPAWN_DEBUGGER_PROCESS_ERROR,
description: 'Failed to spawn debugging process'
})
}
One of my thought is to time give this function a time window before returning. If the process exited before the time window I should throw an error. But since node.js is async. I don't have an idea of how can this be realized. Thanks!
==== EDIT =====
const spawnDebuggerProcess = async (debuggerConfigs) => {
let debuggerProcess = spawn(config.debuggerProcess.debuggerName, debuggerConfigs)
/** added a flag */
let exited = false
debuggerProcess.on('exit', (code, signal) => {
console.log('pid ' + `${debuggerProcess.pid}`)
console.log('child process exited with ' + `code ${code} and signal ${signal}`)
/** set flag to true if exited */
exited = true
})
debuggerProcess.on('error', (error) => {})
if (debuggerProcess.pid !== undefined) {
/** delay this promise for a certain amount of time, act as the time window */
await delay(config.debuggerProcess.immediateExitDelay)
/** check the flag */
if (exited) {
throw externalError.createError({
name: externalError.SPAWN_DEBUGGER_PROCESS_ERROR,
description: 'Process exited immediately'
})
}
return debuggerProcess
}
throw externalError.createError({
name: externalError.SPAWN_DEBUGGER_PROCESS_ERROR,
description: 'Failed to spawn debugging process'
})
}
It seems to be working, but I am not sure if it is a good practice.

Node Kue and Child Process - get error from spawned process

I try to spawn a child process performing cpu intensive calculations through a job queue with Kue. My code at the moment looks like this:
consumer.js
var kue = require('kue');
var util = require('util');
var spawn = require('child_process').spawn;
var jobs = kue.createQueue();
jobs.process('calc', 2, function(job, done){
var work = spawn('Rscript', ['opti2.R', job.data.file]);
work.stderr.on('data', function (data) {
job.log('stderr: ' + data);
});
work.stdout.on('data', function (data) {
job.log('stdout: ' + data);
});
work.on('exit', function (code, signal) {
console.log('child process exited with code ' + code + ' with singal ' + signal);
if(code != 0){
done(****How to get the stderr of the child process as an error here***);
} else {
done(Error());
}
});
});
The code somewhat do what i would like it to do, but is there a better way to report the job as failed (to Kue) and get the stderr from the spawned process?
You can use job.log method to send data directly to Kue.
I would also recommend you to switch from .spawn to .exec, because it returns stdout and stderr as strings in its final callback along with a good error, which suits your needs well:
var exec = require('child_process').exec;
jobs.process('calc', 2, function(job, done){
exec('Rscript opti2.R ' + job.data.file, function (error, stdout, stderr) {
if (stdout.length > 0) job.log('stdout: ' + stdout);
if (stderr.length > 0) job.log('stderr: ' + stderr);
done(error);
});
});
Though solution should work with .spawn as well: simply replace each console.log call in your code with job.log.
Though, you may want to bufferize your stderr in order to send it to Kue in one chunk:
jobs.process('calc', 2, function(job, done){
var work = spawn('Rscript', ['opti2.R', job.data.file]);
var stderr = '';
work.stderr.on('data', function (data) {
stderr += data;
});
work.stdout.on('data', function (data) {
job.log(data); // sending arriving `stdout` chunks as normal log events
});
work.on('close', function (code, signal) {
console.log('child process exited with code ' + code + ' with singal ' + signal);
if(code != 0){
done(stderr); // sending all collected stderr as an explanation
} else {
done();
}
});
});
I would also recommend using close event instead of exit, because it waits for child's stdio streams.
For more information see Event: 'exit' docs:
This event is emitted after the child process ends.
Note that the child process stdio streams might still be open.
and Event: 'close' docs:
This event is emitted when the stdio streams of a child process have
all terminated.

Resources