Using exec throws process error after save in node webkit - node.js

I'm using exec() in node webkit to open a file in the operating system's default program using start filename after it saves the file. Here is my code:
var fs = require('fs');
var myFile = "C:/TEMP/" + fileName;
fs.writeFile(myFile, rtf, function(err) {
if(err) {
alert("error");
}
});
var sys = require('sys');
var exec = require('child_process').exec;
var child;
child = exec("start " + myFile, function (error, stdout, stderr) {
if (error !== null) {
console.log('exec error: ' + error);
}
});
Sometimes it works, but sometimes I'm getting this error: exec error: Error: Command failed: The process cannot access the file because it is being used by another process.
Edit: I saw that Microsoft has this issue with rtf and txt documents opening in wordpad (which is what mine was being opened in), so I thought it might have been that process hanging up and not dying properly. Before I called it quits for the night, I changed the default program to open in Libre Office and had better results (it didn't happen as often), but if I tried to fire the function quickly in sequence, I would still get the error.

writeFile is async. You need a callback like this:
var fs = require('fs');
var myFile = "C:/TEMP/" + fileName;
fs.writeFile(myFile, rtf, function(err) {
if(err) {
alert("error");
}else{
lire();
});
var sys = require('sys');
var exec = require('child_process').exec;
var child;
function lire(){
child = exec("start " + myFile, function (error, stdout, stderr) {
if (error !== null) {
console.log('exec error: ' + error);
}
});
};

Related

node.js on windows : Is it possible to get the absolute path of process from its pid?

I would like to get the path of a running process on Windows os
I can find his pid in node.js
var exec = require('child_process').exec;
exec('tasklist', function(err, stdout, stderr) {
var lines = stdout.toString().split('\n');
var results = new Array();
lines.forEach(function (line) {
if (line.indexOf('GestionDocument') > -1) {
console.log(line);
var parts = line.split(/[\s,]+/);
console.log(parts[1]);
}
});
that displays me
16248
Do you know how to get path folder of a process from his pid?

Node child process exec async only called once

I have an array with some IDs like a = [abc,cde,efg]. I pass this array and a second one containing the names with fit to the IDs to a child process in Node.
I want to wait until the child process finished and processes the next array member after that.
The code to achieve it is (with the suggested Edit):
function downloadSentinel(promObj) {
return new Promise((resolve,reject) => {
function makeRequest(url, i, callback) {
url = promObj.ID;
name = promObj.Name;
var sys = require('util'),
exec = require('child_process').exec,
child;
var directory = __dirname.substring(0, __dirname.indexOf("\\app_api"));
console.log(directory);
child = exec(directory + '\\downloadProducts.sh' + promObj.ID[i] + ' ' + promObj.Name[i], function (error, stdout, stderr) {
child.on("error", function (error) {
console.log(error);
})
child.stdout.on('data', function (data) {
console.log(data.toString());
});
child.on('exit', function(exit) {
console.log(exit);
callback();
})
})
}
async.eachOfLimit(promObj.requestURLS, 2, makeRequest, function (err) {
if (err) reject(promObj)
else {
resolve(promObj);
}
});
});
}
I am using npm-async to control the concurrency flow because I want to limit the curl requests I do inside the shell script. The shell sript works without an error. Now the script is only called twice because of the async.eachOfLimit limit. Then the other array IDs are not processed anymore.
EDIT
This is the code I tried at last, but then all possible urls are evoked instead of only 2. I found this here Another Stackoverflow question. I also tried with async.timesLimit
function downloadSentinel(promObj,req,res) {
async.eachOfLimit(promObj.requestURLS, 2,function (value,i,callback) {
console.log('I am here ' + i + 'times');
url = promObj.requestURLS;
name = promObj.Name;
console.log(promObj.requestURLS);
var sys = require('util'),
exec = require('child_process').exec,
child;
var directory = __dirname.substring(0, __dirname.indexOf("\\app_api"));
console.log(directory);
console.log("executing:", directory + '\\downloadProducts.sh ' + promObj.requestURLS[i] + ' ' + promObj.Name[i]);
child = exec(directory + '\\downloadProducts.sh' + ' ' + promObj.requestURLS[i] + ' ' + promObj.Name[i], function (error, stdout, stderr) {
if (error != null){
console.log(error);
}
// this task is resolved
return callback(error, stdout);
});
}, function (err) {
if (err) console.log('error')
else {
console.log('Done');
}
});
}
What have I missed? Thanks in advance.
exec is asynchronous, it does not wait for the launched process to finish.
Move the call to callback() inside the child.on('exit' ...) handler or use execSync

Passing arguments to child process in nodejs which has some dependency in execFile method

I trying to pass some of the arguments to the child process in node js. I'am actually running a phantom script inside the child process.I'am using execFile method to run my child script. This is how my index.js looks:
var childProcess = require('child_process');
var path = require('path');
var phantomjs = require('phantomjs');
var binPath = phantomjs.path
console.log('inside index method ');
// Set the path as described here: https://aws.amazon.com/blogs/compute/running-executables-in-aws-lambda/
// Set the path to the phantomjs binary
//var phantomPath = path.join(__dirname, 'phantomjs_linux-x86_64');
// Arguments for the phantom script
var processArgs = [
path.join(__dirname, 'ThumbnailCreator.js'),
'myargs'
];
// Launch the child process
childProcess.execFile(binPath, processArgs, function(error, stdout, stderr) {
console.log('stdout: ', stdout);
console.log('stderr: ', stderr);
if (error !== null) {
console.log('exec error: ', error);
}
});
And m trying to print the argument which i have passed.But it is not printing anything. This is how m trying to print it in the child process:
console.log(process.argv[1]);
You can parse it like this in ThumbnailCreator.js
var system = require('system');
var args = system.args;
args[0] is file name
args[1] will give you value passed in first argument.

execute a batch file from nodejs

Would it be possible to run a batch file from a nodejs application?
After googling for some time we can use child_process to execute the commands. Tried the same module but without success.
Could somebody guide me?
This creates a NodeJS module with a single function named exec() to execute batch scripts.
var exec = require('child_process').exec,
path = require('path'),
os = require('os');
fs = require('fs');
// HACK: to make our calls to exec() testable,
// support using a mock shell instead of a real shell
var shell = process.env.SHELL || 'sh';
// support for Win32 outside Cygwin
if (os.platform() === 'win32' && process.env.SHELL === undefined) {
shell = process.env.COMSPEC || 'cmd.exe';
}
// Merges the current environment variables and custom params for the environment used by child_process.exec()
function createEnv(params) {
var env = {};
var item;
for (item in process.env) {
env[item] = process.env[item];
}
for(item in params) {
env[item] = params[item];
}
return env;
}
// scriptFile must be a full path to a shell script
exports.exec = function (scriptFile, workingDirectory, environment, callback) {
var cmd;
if (!workingDirectory) {
callback(new Error('workingDirectory cannot be null'), null, null);
}
if (!fs.existsSync(workingDirectory)) {
callback(new Error('workingDirectory path not found - "' + workingDirectory + '"'), null, null);
}
if (scriptFile === null) {
callback(new Error('scriptFile cannot be null'), null, null);
}
if (!fs.existsSync(scriptFile)) {
callback(new Error('scriptFile file not found - "' + scriptFile + '"'), null, null);
}
// transform windows backslashes to forward slashes for use in cygwin on windows
if (path.sep === '\\') {
scriptFile = scriptFile.replace(/\\/g, '/');
}
// TODO: consider building the command line using a shell with the -c argument to run a command and exit
cmd = '"' + shell + '" "' + scriptFile + '"';
// execute script within given project workspace
exec(cmd,
{
cwd: workingDirectory,
env: createEnv(environment)
},
function (error, stdout, stderr) {
// TODO any optional processing before invoking the callback
callback(error, stdout, stderr);
}
);
};
I have found the solution for it.. and its works fine for me. This opens up a new command window and runs my main node JS in child process. You need not give full path of cmd.exe. I was making that mistake.
var spawn = require('child_process').spawn,
ls = spawn('cmd.exe', ['/c', 'startemspbackend.bat']);
ls.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
ls.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
ls.on('exit', function (code) {
console.log('child process exited with code ' + code);
});
An easier way I know for executing that is the following code :
function Process() {
const process = require('child_process');
var ls = process.spawn('script.bat');
ls.stdout.on('data', function (data) {
console.log(data);
});
ls.stderr.on('data', function (data) {
console.log(data);
});
ls.on('close', function (code) {
if (code == 0)
console.log('Stop');
else
console.log('Start');
});
};
Process();

Setting process.stdout to a file per node.js core cluster worker

I'm trying to use node core's cluster feature.
I would like the stdout and stderr streams to output to a file, one for each worker id.
Something much like the following:
var fs = require('fs'),
env = process.env,
workerId = env.NODE_WORKER_ID || env.NODE_UNIQUE_ID;
process.stdout = fs.createWriteStream(__dirname + '/app#' + workerId + '.log', {
encoding: 'utf8'
});
Unfortunately, it does not seem to rewrite process.stdout like this.
Is there a way to achieve this, or should this be done differently? Currently when I run my cluster I am getting all output from all processes in one console, which is extremely messy.
I ended up doing the following:
//create a new stdout file stream
var stdoutFS = fs.createWriteStream(stdoutFile, {
encoding: 'utf8',
flags : 'a+'
});
//create a new stderr file stream
var stderrFS = fs.createWriteStream(stderrFile, {
encoding: 'utf8',
flags : 'a+'
});
//pipe stdout to a worker file
var unhookStdout = hookWriteStream(stdout, function(string, encoding, fd) {
stdoutFS.write(string, encoding || 'utf8');
});
console.log('\n\nPrepared new stdout hook to worker file.');
//pipe stderr to a worker file
var unhookStderr = hookWriteStream(stderr, function(string, encoding, fd) {
stderrFS.write(string, encoding || 'utf8');
});
console.log('Prepared new stderr hook to worker file.');
//unhook when things go wrong
stdoutFS.once('close', function() {
unhookStdout();
console.log('Unhooked stdout.');
});
stdoutFS.once('error', function(err) {
unhookStdout();
console.error('Error: Unhooked stdout due to error %j.', err);
});
stderrFS.once('close', function() {
unhookStderr();
console.log('Unhooked stderr.');
});
stderrFS.once('error', function(err) {
unhookStderr();
console.error('Error: Unhooked stderr due to error %j.', err);
});
});
function hookWriteStream(stream, callback) {
var oldWrite = stream.write;
stream.write = (function(write) {
return function(string, encoding, fd) {
write.apply(stream, arguments);
callback(string, encoding, fd);
};
})(stream.write);
return function() {
stream.write = oldWrite;
};
}
It may not be very elegant, but so far this is the best solution I've found.
Looks like my idea works to some degree. As long as most of the logging is done using console.log and not written directly to stdout, you'll be good.
Like I said in the comments below, use a script like this:
fs = require 'fs'
{exec} = require 'child_process'
execAndPipe = (execString) ->
piper = exec execString
piper.stdout.on 'data', (data) ->
if data[0...'PROCESS'.length] == 'PROCESS'
# extract the worker ID and output
# to a corresponding file
piper.stderr.on 'data', (data) ->
if data[0...'PROCESS'.length] == 'PROCESS'
# extract the worker ID and output
# to a corresponding file
task 'run', 'Run the server', ->
execAndPipe 'node blah.js'
to run your server. Then just redefine console.log like:
console.log = function (d) {
process.stdout.write('PROCESS' + WORKERID + d + '\n');
};
I kinda doubt you'll be able to rebind stdout directly, so this might be amongst your best options.
If you don't want anything to output to the console at ALL, you could rebind console.log like:
console.log = function (d) {
var str = fs.createWriteStream(__dirname + '/app#' + workerId + '.log', {
encoding: 'utf8'
});
process.stdout.pipe(str);
};
And forget about the external script.

Resources