How to make a child powershell process on node - node.js

I'm trying to have a spawned Powershell process on Windows with NodeJS, which i can send some commands, and the the output. I've did that with the stdio configuration form the parent node process, but i want to have it separated, and i cant achieve any of this to correctly work.
In this code, i was hoping to get the output of $PSTableVersion powershell variable on the psout.txt file, but it gets never written.
The simple code is:
var
express = require('express'),
fs = require('fs'),
spawn = require("child_process").spawn;
var err = fs.openSync('pserr.txt', 'w');
var out = fs.openSync('psout.txt', 'w');
var writer = fs.createWriteStream(null, {
fd: fs.openSync('psin.txt', 'w')
});
var child = spawn("powershell.exe", ["-Command", "-"], {
detached: true,
stdio: [writer, out, err]
});
writer.write('$PSTableVersion\n');
writer.end();
child.unref();
// just wait
var app = express();
app.listen(3000);
Update
I've tried with the code on this node github issue tracker: https://github.com/joyent/node/issues/8795
It seems calling .NET vs calling native executable are different things...

Related

Node child_process pass argv when forked

I have Node application with Express server. I also have node scripts in server folder. During some events I need get data from separate node scripts, so I create child process.
Without arguments, everything works fine, but I need to pass some data from parent process.
var express = require('express');
var router = express.Router();
var child_process = require('child_process');
router.get('/:site/start', function(req, res, next) {
const basedir = req.app.get('basedir');
const child_script_path = basedir + '/scripts/script.js';
const child_argv = [
'--slowmo=0',
'--headless=1'
];
child = child_process.fork(child_script_path, {
execArgv: child_argv
});
...
}
});
When I try to pass arguments and run script through Express, these errors are shown:
/home/user/.nvm/versions/node/v8.9.4/bin/node: bad option: --slowmo=0
/home/user/.nvm/versions/node/v8.9.4/bin/node: bad option: --headless=1
But when I run script from command line like :
node /scripts/script.js --slowmo=0 --headless=1
I get no errors and script can catch args from command line.
How can I pass args to child script in this situation?
Ubuntu 16.04
Node 8.9.4
Express 4.15.5
execArgv option is used to pass arguments for the execution process, not for your script.
This could be useful for passing specific execution environment to your forked process.
If you want to pass arguments to your script, you should use args.
child_process.fork(modulePath[, args][, options])
Example:
const child_process = require('child_process');
const child_script_path = './script.js';
const child_argv = [
'--foo',
'--bar'
]
const child_execArgv = [
'--use-strict'
]
let child = child_process.fork(child_script_path, child_argv, {
execArgv: child_execArgv // script.js will be executed in strict mode
})
// script.js
console.log(process.argv[2], process.argv[3]) // --foo --bar

NodeJS - piping multiple FFMPEG processes

I am trying to programm an converter which can take any video source and convert it to mp3. The mp3 should be saved on my hard drive, or in an buffer to send it via telegram.
It works good so far, the only problem I am facing is that it can only take one video at a time, and I don't know why.
// IMPORTS
var fs = require('fs');
var https = require('https');
var child_process = require('child_process');
// EVENTEMITER (Not used so far)
var util = require('util');
var EventEmitter = require('events').EventEmitter;
// STREAMHANDLER
var StreamHandler = function(url, name){
// VARIABLES
self = this;
this.url = url;
this.name = name;
// CREATE FFMPEG PROCESS
var spawn = child_process.spawn;
var args = ['-i', 'pipe:0', '-f', 'mp3', '-ac', '2', '-ab', '128k', '-acodec', 'libmp3lame', 'pipe:1'];
this.ffmpeg = spawn('ffmpeg', args);
// GRAB STREAM
https.get(url, function(res) {
res.pipe(self.ffmpeg.stdin);
});
// WRITE TO FILE
this.ffmpeg.stdout.pipe(fs.createWriteStream(name));
//DEBUG
this.ffmpeg.stdout.on("data", function (data) {
console.error(self.name);
});
}
util.inherits(StreamHandler, EventEmitter);
// TESTING
var test1 = new StreamHandler(vidUrl, "test1.mp3");
test1.ffmpeg.on("exit", function (code, name, signal) {
console.log("Finished: " + test1.name);
});
var test2 = new StreamHandler(vidUrl, "test2.mp3");
test2.ffmpeg.on("exit", function (code, name, signal) {
console.log("Finished: " + test2.name);
});
It skips test1.mp3 and only converts test2.mp3, but 2 ffmpeg processes were created:
After test2.mp3 is converted the other ffmpeg thread stays open, but does nothing, and the node program gets stuck waiting (i guess so) for it to send something.
I hope someone can help me :)
Using your code, I had the same problem. It would hang at the end and only output data for the test2.mp3 file. I'm not exactly sure what caused the problem, but I changed it a bit and this works for me:
// IMPORTS
var fs = require('fs');
//var https = require('https');
var http = require('http');
var child_process = require('child_process');
// EVENTEMITER (Not used so far)
var util = require('util');
var EventEmitter = require('events').EventEmitter;
// These never change...
var spawn = child_process.spawn;
var args = ['-i', 'pipe:0', '-f', 'mp3', '-ac', '2', '-ab', '128k', '-acodec', 'libmp3lame', 'pipe:1'];
// STREAMHANDLER
var StreamHandler = function(url, name){
// CREATE FFMPEG PROCESS
var ffmpeg = spawn('ffmpeg', args);
// GRAB STREAM
http.get(url, function(res) {
res.pipe(ffmpeg.stdin);
});
// WRITE TO FILE
ffmpeg.stdout.pipe(fs.createWriteStream(name));
ffmpeg.on("exit", function() {
console.log("Finished:", name);
});
//DEBUG
ffmpeg.stdout.on("data", function(data) {
console.error(name, "received data");
});
}
util.inherits(StreamHandler, EventEmitter);
// TESTING
var vidUrl = 'http://www.sample-videos.com/video/mp4/720/big_buck_bunny_720p_1mb.mp4';
var test1 = new StreamHandler(vidUrl, "test1.mp3");
var test2 = new StreamHandler(vidUrl, "test2.mp3");
I am using http instead of https, because I didn't have a sample video at an https url available. It shouldn't make a difference.
I moved the spawn and args variables out of the object, because they don't change. I also do not use this to store the local variables. I just use a normal closure instead. Finally, I moved the exit event handling code inside the object. I just think it's better to group all that stuff together -- plus, it's only declared once rather than for each new process you create.
Running this gives me the following output (I saved the script as ffmpeg.js):
$ node ffmpeg.js
test2.mp3 received data
Finished: test2.mp3
test1.mp3 received data
Finished: test1.mp3
Also, just a tip. If you want to use the this object inside StreamHandler, I would recommend using arrow functions if your version of Node supports them. This code also works:
var StreamHandler = function(url, name){
// CREATE FFMPEG PROCESS
this.ffmpeg = spawn('ffmpeg', args);
// GRAB STREAM
http.get(url, (res) => {
res.pipe(this.ffmpeg.stdin);
});
// WRITE TO FILE
this.ffmpeg.stdout.pipe(fs.createWriteStream(name));
this.ffmpeg.on("exit", () => {
console.log("Finished:", name);
});
//DEBUG
this.ffmpeg.stdout.on("data", (data) => {
console.error(name, "received data");
});
}
Notice that with arrow functions, I don't have to use var self = this; Avoiding that is pretty much the reason arrow functions were added to javascript.
Hope this helps!
-- EDIT --
Ok, I figured it out. The problem in your code is this line:
self = this;
It should be:
var self = this;
Without the var specifier, you are creating a global variable. So, the second time you are calling new StreamHandler, you are overwriting the self variable. That's why the test1.mp3 file hangs and the test2.mp3 file is the only one finishing. By adding var, your original script now works for me.

Logging stdout and stderr of Node

I am using the boilerplate code of mean.io and starting my server with the command:
node server.js
How do I log stdout and stderr of my Express application?
Here's my file server.js:
'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
passport = require('passport'),
logger = require('mean-logger');
/**
* Main application entry file.
* Please note that the order of loading is important.
*/
// Initializing system variables
var config = require('./server/config/config');
var db = mongoose.connect(config.db);
// Bootstrap Models, Dependencies, Routes and the app as an express app
var app = require('./server/config/system/bootstrap')(passport, db);
// Start the app by listening on <port>, optional hostname
app.listen(config.port, config.hostname);
console.log('Mean app started on port ' + config.port + ' (' + process.env.NODE_ENV + ')');
// Initializing logger
logger.init(app, passport, mongoose);
// Expose app
exports = module.exports = app;
What about this?
console.log("I will goto the STDOUT");
console.error("I will goto the STDERR");
Note: both of these functions automatically add new line to your input.
If you don't want those newlines appended to your input, do this
process.stdout.write("I will goto the STDOUT")
process.stderr.write("I will goto the STDERR")
Both process.stdout and process.stderr are streams, so you can even pipe a stream into them. See Node.js docs on streams for more info.
You can do this by writing to stdout and stderr streams
process.stdout.write('Hello')
or
process.stderr.write('Error')
Better will be to use some thirdparty logging module like winston or bunyan
The only way I can think of to do this is to spawn a child process (like the fork system call), which then you can "pipe" the output of stderr, stdout to files.
var out = fs.openSync('./output.log', 'a')
, err = fs.openSync('./error.log', 'a');
require('child_process').spawn('./server', [], {
detached : true,
stdio : ['ignore', out, err]
});

How to interact with multiple console windows?

how can interact with multiple console windows, from one node.js script?
so far i have researched a bit, and not have found anything that covers my case.
What i want to accomplish is to have one main console window, which it reads my input,
1. action#1
2. action#2
> do 1 // select action
and it would redirect its output to another console window named as Logger which shows the stdout of the action that the user selected, but keeps the main "select action" console window clean.
well i manage to find a way around it, since i wanted to stay with node.js all the way.
start.js
var cp = require("child_process");
cp.exec('start "Logger" cmd /K node logger.js',[],{});
cp.exec("start cmd /K node startAdminInterface.js",[],{});
setTimeout(function(){process.exit(0);},2000);
logger.js
var net = require('net');
net.createServer(function (socket) {
socket.on('data',function(d){
console.log(": "+d.toString("utf8"));
});
socket.on('error',function(err){
console.log("- An error occured : "+err.message);
});
}).listen(9999);
startAdminInterface.js
var net = require("net");
var logger = net.connect(9999);
var readline = require('readline'),
rl = readline.createInterface(process.stdin,process.stdout);
rl.setPrompt('> ');
rl.prompt();
rl.on('line', function(line) {
logger.write(line);
rl.prompt();
}).on('close', function() {
process.exit(0);
});
bottom, line its a workaround not exactly what i was after, put i saw potential, on logger.js it could listen from multiple sources, which is an enormous plus in the application that i'm building.

Spawned phantomjs process hanging

I'm trying to create a node server that spawns phantomjs processes to create screenshots. The grab.js script works fine when executed and I've confirmed that it writes to stdout. Problem is the node code that spawns the process simply hangs. I've confirmed that phantomjs is in the path. Anyone know what might be happening here or how I might troubleshoot this?
Here's the phantomjs code (grab.js) that renders the page and writes the data to stdout:
var page = require('webpage').create(),
system = require('system'),
fs = require('fs');
var url = system.args[1] || 'google.com';
page.viewportSize = {
width: 1024,
height: 1200
};
page.open(url, function() {
var b64 = page.renderBase64('png');
fs.write('/dev/stdout', b64, 'w');
phantom.exit();
});
And here's the node code that spawns the phantom progress and prints the result (hangs):
var http = require('http'),
exec = require('child_process').exec,
fs = require('fs');
exec('phantomjs grab.js google.com', function(error, stdout, stderr) {
console.log(error, stdout, stderr);
});
I have had similar issues with exec and then switched to using spawn instead and it worked.
According to this article , Use spawn when you want the child process to return huge binary data to Node, use exec when you want the child process to return simple status messages.
hth
I had same problem, in my case it was not in nodejs, but in phantomjs (v2.1).
It's known problem when phantom`s open method hangs.
Also, found second link (I guess same person wrote) in which author points that requestAnimationFrame is not working well with tweenJs, which causes freezing. PhantomJS returns unixtimestamp but tweenjs expects it to be DOMHighResTimeStamp, and so on...
Trick is to inject request-animation-frame.js (which is also provided in that article)

Resources