Send NodeJS child process stdout to Express response object - node.js

I currently pipe the stdout of a child process to an (Express) response object like this:
try{
let proc = spawn(exe, args);
proc.stdout.pipe(response);
let exitCode = await ending.completion(proc);
}
finally {
// my own clean up goes here (e.g. releasing locks)
}
where ending.completion is defined as
async function(process){
return new Promise(function(resolve,reject){
let done = false;
process.on('exit', (code, signal)=>{
if(!done){
done = true;
if(code !== null){
resolve(code);
} else {
reject(signal);
}
}
});
process.on('error', (err)=>{
if(!done){
done = true;
reject(err);
}
});
});
};
This generally works fine except for the fact that proc internally does a lot of waiting (on the download/transfer) before it can complete/return. I would prefer allowing it to dump its output into a buffer, which then trickles into the response object as the download proceeds. How can I introduce such a buffer?
(A second (and I think related) problem is that if the download is aborted client-side, proc never completes but keeps waiting on its stdout.)

Looks like exec method does exactly what you need.
Spawns a shell then executes the command within that shell, buffering
any generated output.
Use it instead of spawn. Note that unlike spawn, exec expects the whole command (with all the args) as a single string. Here is example from an officials documentation:
const util = require('util');
const exec = util.promisify(require('child_process').exec;
async function lsExample() {
const { stdout, stderr } = await exec('ls');
console.log('stdout:', stdout);
console.error('stderr:', stderr);
}
lsExample();

Related

UI freezes for a short moment while trying to execute multiple commands in a gnome shell extension

Original question: Multiple arguments in Gio.Subprocess
So currently I'm trying to execute multiple asynchronous commands in my gnome-shell-extension via Gio.Subprocess. This works fine, if I put all commands as only one chained command with && in the command vector of the Subprocess. The drawback of this solution is, that the output of the different chained commands is only updated once and the execution time may be long.
What I'm now trying to do, is to execute every command on its own at the same time. Now the output can be updated if one command only has a small interval while another one needs more time.
Let's say these are my commands, in this case I would like to execute each command every second:
let commands = {"commands":[{"command":"ls","interval":1},
let commands = {"commands":[{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1}]}
Then I'm calling my refresh function for each command.
commands.commands.forEach(command => {
this.refresh(command);
})
What is happening now, is that the gnome UI is freezing almost every second, not much, but I can see my mouse cursor or scrolling stop for a very short amount of time, even though I use asynchronous communication.
What I have found out from debugging is that it seems to be the initialization of the Subprocess which causes the small freeze, maybe because all the commands are using it nearly at the same time?
proc.init(cancellable);
I think the documentation says that the init method is synchronous (https://developer.gnome.org/gio//2.56/GInitable.html#g-initable-init) and that there also seems to be an async version (https://developer.gnome.org/gio//2.56/GAsyncInitable.html#g-async-initable-init-async), but the Gio.Subprocess does only implement the synchronous one (https://developer.gnome.org/gio//2.56/GSubprocess.html)
So the final question is, what would be the correct way to avoid the freezing? I tried to move the init part to asynchronous function and continue with the command execution via callbacks after it is done, but with no luck. Maybe this is even the completely wrong approach though.
Whole extension.js (final updating of the output is not part of this version, just for simplicity):
const Main = imports.ui.main;
const GLib = imports.gi.GLib;
const Mainloop = imports.mainloop;
const Gio = imports.gi.Gio;
const ExtensionUtils = imports.misc.extensionUtils;
const Me = ExtensionUtils.getCurrentExtension();
let output, box, gschema, stopped;
var settings;
let commands = {"commands":[{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1}]}
function init() {
//nothing todo here
}
function enable() {
stopped = false;
gschema = Gio.SettingsSchemaSource.new_from_directory(
Me.dir.get_child('schemas').get_path(),
Gio.SettingsSchemaSource.get_default(),
false
);
settings = new Gio.Settings({
settings_schema: gschema.lookup('org.gnome.shell.extensions.executor', true)
});
box = new St.BoxLayout({ style_class: 'panel-button' });
output = new St.Label();
box.add(output, {y_fill: false, y_align: St.Align.MIDDLE});
Main.panel._rightBox.insert_child_at_index(box, 0);
commands.commands.forEach(command => {
this.refresh(command);
})
}
function disable() {
stopped = true;
log("Executor stopped");
Main.panel._rightBox.remove_child(box);
}
async function refresh(command) {
await this.updateGui(command);
Mainloop.timeout_add_seconds(command.interval, () => {
if (!stopped) {
this.refresh(command);
}
});
}
async function updateGui(command) {
await execCommand(['/bin/sh', '-c', command.command]).then(stdout => {
if (stdout) {
let entries = [];
stdout.split('\n').map(line => entries.push(line));
let outputAsOneLine = '';
entries.forEach(output => {
outputAsOneLine = outputAsOneLine + output + ' ';
});
if (!stopped) {
log(outputAsOneLine);
//output.set_text(outputAsOneLine);
}
}
});
}
async function execCommand(argv, input = null, cancellable = null) {
try {
let flags = Gio.SubprocessFlags.STDOUT_PIPE;
if (input !== null)
flags |= Gio.SubprocessFlags.STDIN_PIPE;
let proc = new Gio.Subprocess({
argv: argv,
flags: flags
});
proc.init(cancellable);
let stdout = await new Promise((resolve, reject) => {
proc.communicate_utf8_async(input, cancellable, (proc, res) => {
try {
let [ok, stdout, stderr] = proc.communicate_utf8_finish(res);
resolve(stdout);
} catch (e) {
reject(e);
}
});
});
return stdout;
} catch (e) {
logError(e);
}
}```
It's doubtful that Gio.Initable.init() is what's causing the freeze. First some comments on the usage of GSubprocess here.
function execCommand(argv, input = null, cancellable = null) {
try {
/* If you expect to get output from stderr, you need to open
* that pipe as well, otherwise you will just get `null`. */
let flags = (Gio.SubprocessFlags.STDOUT_PIPE |
Gio.SubprocessFlags.STDERR_PIPE);
if (input !== null)
flags |= Gio.SubprocessFlags.STDIN_PIPE;
/* Using `new` with an initable class like this is only really
* necessary if it's possible you might pass a pre-triggered
* cancellable, so you can call `init()` manually.
*
* Otherwise you can just use `Gio.Subprocess.new()` which will
* do exactly the same thing for you, just in a single call
* without a cancellable argument. */
let proc = new Gio.Subprocess({
argv: argv,
flags: flags
});
proc.init(cancellable);
/* If you want to actually quit the process when the cancellable
* is triggered, you need to connect to the `cancel` signal */
if (cancellable instanceof Gio.Cancellable)
cancellable.connect(() => proc.force_exit());
/* Remember the process start running as soon as we called
* `init()`, so this is just the threaded call to read the
* processes's output.
*/
return new Promise((resolve, reject) => {
proc.communicate_utf8_async(input, cancellable, (proc, res) => {
try {
let [, stdout, stderr] = proc.communicate_utf8_finish(res);
/* If you do opt for stderr output, you might as
* well use it for more informative errors */
if (!proc.get_successful()) {
let status = proc.get_exit_status();
throw new Gio.IOErrorEnum({
code: Gio.io_error_from_errno(status),
message: stderr ? stderr.trim() : GLib.strerror(status)
});
}
resolve(stdout);
} catch (e) {
reject(e);
}
});
});
/* This should only happen if you passed a pre-triggered cancellable
* or the process legitimately failed to start (eg. commmand not found) */
} catch (e) {
return Promise.reject(e);
}
}
And notes on Promise/async usage:
/* Don't do this. You're effectively mixing two usage patterns
* of Promises, and still not catching errors. Expect this to
* blow up in your face long after you expect it to. */
async function foo() {
await execCommand(['ls']).then(stdout => log(stdout));
}
/* If you're using `await` in an `async` function that is
* intended to run by itself, you need to catch errors like
* regular synchronous code */
async function bar() {
try {
// The function will "await" the first Promise to
// resolve successfully before executing the second
await execCommand(['ls']);
await execCommand(['ls']);
} catch (e) {
logError(e);
}
}
/* If you're using Promises in the traditional manner, you
* must catch them that way as well */
function baz() {
// The function will NOT wait for the first to complete
// before starting the second. Since these are (basically)
// running in threads, they are truly running in parallel.
execCommand(['ls']).then(stdout => {
log(stdout);
}).catch(error => {
logError(error);
});
execCommand(['ls']).then(stdout => {
log(stdout);
}).catch(error => {
logError(error);
});
}
Now for the implementation:
const Main = imports.ui.main;
const GLib = imports.gi.GLib;
const Gio = imports.gi.Gio;
const ExtensionUtils = imports.misc.extensionUtils;
const Me = ExtensionUtils.getCurrentExtension();
let cancellable = null;
let panelBox = null;
let commands = {
"commands":[
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1}
]
};
enable() {
if (cancellable === null)
cancellable = new Gio.Cancellable();
panelBox = new St.BoxLayout({
style_class: 'panel-button'
});
// Avoid deprecated methods like `add()`, and try not
// to use global variable when possible
let outputLabel = new St.Label({
y_align: St.Align.MIDDLE,
y_fill: false
});
panelBox.add_child(outputLabel);
Main.panel._rightBox.insert_child_at_index(panelBox, 0);
commands.commands.forEach(command => {
this.refresh(command);
});
}
disable() {
if (cancellable !== null) {
cancellable.cancel();
cancellable = null;
}
log("Executor stopped");
if (panelBox !== null) {
Main.panel._rightBox.remove_child(panelBox);
panelBox = null;
}
}
async function refresh(command) {
try {
await this.updateGui(command);
// Don't use MainLoop anymore, just use GLib directly
GLib.timeout_add_seconds(0, command.interval, () => {
if (cancellable && !cancellable.is_cancelled())
this.refresh(command);
// Always explicitly return false (or this constant)
// unless you're storing the returned ID to remove the
// source later.
//
// Returning true (GLib.SOURCE_CONTINUE) or a value that
// evaluates to true will cause the source to loop. You
// could refactor your code to take advantage of that
// instead of constantly creating new timeouts each
// second.
return GLib.SOURCE_REMOVE;
});
} catch (e) {
// We can skip logging cancelled errors, since we probably
// did that on purpose if it happens
if (!e.matches(Gio.IOErrorEnum, Gio.IOErrorEnum.CANCELLED)
logError(e, 'Failed to refresh');
}
}
// `updateGui()` is wrapped in a try...catch above so it's safe to
// skip that here.
async function updateGui(command) {
let stdout = await execCommand(['/bin/sh', '-c', command.command]);
// This will probably always be true if the above doesn't throw,
// but you can check if you want to.
if (stdout) {
let outputAsOneLine = stdout.replace('\n', '');
// No need to check the cancellable here, if it's
// triggered the command will fail and throw an error
log(outputAsOneLine);
// let outputLabel = panelBox.get_first_child();
// outputLabel.set_text(outputAsOneLine);
}
}
It's hard to say what is causing the freeze you are experiencing, but I would first cleanup your Promise usage and be more explicit about how you use timeout sources, as these may be stacking every second.
If possible, you might want to group your subprocesses into a single timeout source, possible using Promise.all() to await them all at once. Overloading the event loop with pending sources and Promises could also be the cause of the freeze.

Node.js child process isn't receiving stdin unless I close the stdin stream

I'm building a discord bot that wraps a terraria server in node.js so server users can restart the server and similar actions. I've managed to finish half the job, but I can't seem to create a command to execute commands on the terraria server. I've set it to write the command to the stdin of the child process and some basic debugging verifies that it does, but nothing apparently happens.
In the Node.js docs for child process stdin, it says "Note that if a child process waits to read all of its input, the child will not continue until this stream has been closed via end()." This seems likely to be the problem, as calling the end() function on it does actually send the command as expected. That said, it seems hard to believe that I'm unable to continuously send commands to stdin without having to close it.
Is this actually the problem, and if so what are my options for solving it? My code may be found below.
const discordjs = require("discord.js");
const child_process = require("child_process");
const tokens = require("./tokens");
const client = new discordjs.Client();
const terrariaServerPath = "C:\\Program Files (x86)\\Steam\\steamapps\\common\\Terraria\\TerrariaServer.exe"
const terrariaArgs = ['-port', '7777', "-maxplayers", "8", "-world", "test.wld"]
var child = child_process.spawn(terrariaServerPath, terrariaArgs);
client.on('ready', () => {
console.log(`Logged in as ${client.user.tag}!`);
});
client.on('disconnect', () => {
client.destroy();
});
client.on('message', msg => {
if (msg.channel.name === 'terraria') {
var msgSplit = msg.content.split(" ");
if (msgSplit[0] === "!restart") {
child.kill();
child = child_process.spawn(terrariaServerPath, terrariaArgs);
registerStdio();
msg.reply("restarting server")
}
if (msgSplit[0] === "!exec") {
msg.reply(msgSplit[1]);
child.stdin.write(msgSplit[1] + "\n");
child.stdin.end();
}
}
});
client.login(tokens.discord_token);
var registerStdio = function () {
child.stdout.on('data', (data) => {
console.log(`${data}`);
});
child.stderr.on('data', (data) => {
console.error(`${data}`);
});
}
registerStdio();
I was able to solve the problem by using the library node-pty. As near as I can tell, the problem was that the child process was not reading the stdin itself and I was unable to flush it. Node-pty creates a virtual terminal object which can be written to instead of stdin. This object does not buffer writes and so any input is immediately sent to the program.

Promise resolving to child stream stdout and rejecting child stream stderr

I'd like to build a promise that spawns a child process using require('child_process').spawn. The process streams its output to stdout and its errors to stderr.
I would like the promise to:
reject(child.stderr stream (or its data)) if child.stderr emits any data.
resolve(child.stdout stream) only if no error is emitted.
I'm doing this because I want to chain the promise to:
a then that processes the child.stdout stream (upload the stream to an S3 bucket).
a catch that can process the child.stderr stream, allowing me to properly handle errors.
Is it feasible to combine promises and process streams like this ?
I was thinking of working around stderr but unsure about whats happening in between to stdout if a lot of data is coming into it and I don't process it fast enough.
As I see it, the issue is that you don't know whether you ever got data on stderr until the entire process is done as it could put data there at any time.
So, you have to wait for the entire process to be done before calling resolve() or reject(). And, if you then want the entire data to be sent to either one of those, you'd have to buffer them. You could call reject() as soon as you got data on stderr, but you aren't guaranteed to have all the data yet because it's a stream.
So, if you don't want to buffer, you're better off just letting the caller see the streams directly.
If you are OK with buffering the data, you can buffer it yourself like this:
Based on the spawn example in the node.js doc, you could add promise support to it like this:
const spawn = require('child_process').spawn;
function runIt(cmd, args) {
return new Promise(function(resolve, reject) {
const ls = spawn(cmd, args);
// Edit thomas.g: My child process generates binary data so I use buffers instead, see my comments inside the code
// Edit thomas.g: let stdoutData = new Buffer(0)
let stdoutData = "";
let stderrData= "";
ls.stdout.on('data', (data) => {
// Edit thomas.g: stdoutData = Buffer.concat([stdoutData, chunk]);
stdoutData += data;
});
ls.stderr.on('data', (data) => {
stderrData += data;
});
ls.on('close', (code) => {
if (stderrData){
reject(stderrData);
} else {
resolve(stdoutData);
}
});
ls.on('error', (err) => {
reject(err);
});
})
}
//usage
runIt('ls', ['-lh', '/usr']).then(function(stdoutData) {
// process stdout data here
}, function(err) {
// process stdError data here or error object (if some other type of error)
});

Redirecting stdout to file nodejs

I've created:
var access = fs.createWriteStream('/var/log/node/api.access.log', { flags: 'w' });
Then piped:
process.stdout.pipe(access);
Then tried:
console.log("test");
And nothing has appeared in /var/log/node/api.access.log. However this way is working:
process.stdout.pipe(access).write('test');
Could someone explain what am I doing wrong ?
I solved this problem the following way:
var access = fs.createWriteStream('/var/log/node/api.access.log');
process.stdout.write = process.stderr.write = access.write.bind(access);
Of course you can also separate stdout and stderr if you want.
I also would strongly recommend to handle uncaught exceptions:
process.on('uncaughtException', function(err) {
console.error((err && err.stack) ? err.stack : err);
});
This will cover the following situations:
process.stdout.write
process.stderr.write
console.log
console.dir
console.error
someStream.pipe(process.stdout);
throw new Error('Crash');
throw 'never do this';
throw undefined;
Checkout console.Console, the parent class of the normal console.
var myLogFileStream = fs.createWriteStream(pathToMyLogFile);
var myConsole = new console.Console(myLogFileStream, myLogFileStream);
You can then you use myConsole.log, myConsole.error, myConsole.dir, etc. and write directly to your file.
You can also monkey patch process.stdout.write as follows:
var fn = process.stdout.write;
function write() {
fn.apply(process.stdout, arguments);
myLogFileStream.write.apply(myLogFileStream, arguments);
}
process.stdout.write = write;
there are also other options for overwriting console._stdout depending on the motivation for logging the stdout to a file.
process.stdout is a Writable. pipe is a method of Readable(Cf StreamAPI documentation : https://nodejs.org/api/stream.html
You can see the documentation of process.stdout here : https://nodejs.org/api/process.html#process_process_stdout
It's surprising that you can do process.stdout.pipe(...); without any error. But i suppose this call just do nothing. Except returning a new Writable stream binded to stdout (or maybe it returns process.stdout itself. There's no specification for that in the documentation).
If you want to redirect stdout to a file, you have many solutions :
Just use your command line to do that. Windows style : node myfile.js > api.access.log.
Replace the console object by your own object. And you can rewrite console methods.
I'm not sure, but it may be possible to replace process.stdout with your own stream (and you can do whatever you want with this)
#user3173842
for the reply on
I solved this problem the following way:
var access = fs.createWriteStream('/var/log/node/api.access.log');
process.stdout.write = process.stderr.write = access.write.bind(access);
you do understand that process.stdout continues after process.on('exit') and therefore the fs.WriteStream closes after with process.stdout, according to
https://github.com/nodejs/node/issues/7606
so now the question remains, if the developer desired to have the fs.Writestream.write() return to its normal functionality and when fs.Writestream.end is called the writestream closes. How would the developer go about doing this I did
a_l = asyncify_listener
p_std_stream_m is a process stream manager object
p_std_stream_m.std_info.p_stdout_write = process.stdout.write
process.stdout.write = w_stream.write.bind(w_stream)
process.once('beforeExit', a_l( p_std_stream_m.handler,process.stdout,w_stream ) )
where in the 'beforeExit' event listener I did
process.stdout.write = p_std_stream_m.std_info.p_stdout_write
w_stream.end()
It works and you use the once method because the process.stdout seems to do a lot of work
at this time.
Is this good practice, would you do this or what would you do in this situation
anyone can feel free to reply.
Originally based on #Anatol-user3173842 answer
But in my case I needed to hook the stdout & stderr and also write into a file.
So for those who need to keep the normal stdout behaviour in addition to writing into the file. Use the following.
For non-errors:
// stdout logging hook
const stdoutWrite0 = process.stdout.write;
process.stdout.write = (args) => { // On stdout write
CustomLogger.writeToLogFile('log', args); // Write to local log file
args = Array.isArray(args) ? args : [args]; // Pass only as array to prevent internal TypeError for arguments
return stdoutWrite0.apply(process.stdout, args);
};
For errors:
// stderr logging hook
const stderrWrite0 = process.stderr.write;
process.stderr.write = (args) => { // On stderr write
CustomLogger.writeToLogFile('error', args); // Write to local error file
args = Array.isArray(args) ? args : [args]; // Pass only as array to prevent internal TypeError for arguments
return stderrWrite0.apply(process.stderr, args);
};
// uncaught exceptions
process.on('uncaughtException', (err) => {
CustomLogger.writeToLogFile('error', ((err && err.stack) ? err.stack : err));
});
Here is the CustomLogger code, where I also separate the log files by date:
export class CustomLogger {
static LOGS_DIR = 'location-of-my-log-files';
private static logDailyName(prefix: string): string {
const date = new Date().toLocaleDateString().replace(/\//g, '_');
return `${CustomLogger.LOGS_DIR}/${prefix}_${date}.log`;
}
private static writeToLogFile(prefix, originalMsg) {
const timestamp = Date.now();
const fileName = this.logDailyName(prefix);
const logMsg = prepareForLogFile(originalMsg);
fs.appendFileSync(fileName, `${timestamp}\t${logMsg}\n\n`);
return originalMsg;
}
}
Here's a quick example of a logger class that redirects stdout, stderr and exceptions to a file, while still writting everything to the console:
class Logger {
#log_stream
#stdout_write
#stderr_write
constructor(path) {
this.#log_stream = fs.createWriteStream(path, { flags: 'a' })
this.#stdout_write = process.stdout.write.bind(process.stdout)
this.#stderr_write = process.stderr.write.bind(process.stderr)
process.stdout.write = this.stdout_write.bind(this)
process.stderr.write = this.stderr_write.bind(this)
process.on('uncaughtException', function(err) {
console.error((err && err.stack) ? err.stack : err)
})
}
stdout_write(buffer) {
this.#log_stream.write(buffer)
this.#stdout_write(buffer)
}
stderr_write(buffer) {
this.#log_stream.write(buffer)
this.#stderr_write(buffer)
}
}
const logger = new Logger('example.log')

Retrieve stdout to variable

I’m trying to run child process in next code:
run = function (cmd, callback) {
var spawn = require('child_process').spawn;
var command = spawn(cmd);
var result = '';
command.stdout.on('data', function (data) {
result += data.toString();
});
command.on('exit', function () {
callback(result);
});
}
execQuery = function (cmd) {
var result = {
errnum: 0,
error: 'No errors.',
body: ''
};
run(cmd, function (message) {
result.body = message;
console.log(message);
});
return result;
}
After execution execQuery('ls') result.body is always empty, but console.log is contain value.
I ran a quick test and the command's exit event is firing before all of stdouts data is drained. I at least got the output captured and printed if I changed your exit handler to look for command.stdout's end event.
command.stdout.on('end', function () {
callback(result);
});
That should help a bit. Note there are existing libraries you might want to use for this and a truly correct implementation would be significantly more involved than what you have, but my change should address your current roadblock problem.
Random tip: it is the node convention to always reserve the first argument of callback functions for an error and your snippet is inconsistent with that convention. You probably should adjust to match the convention.
Oh sorry, let me address your question about result.body. The run function is ASYNCHRONOUS! That means that your return result; line of code executes BEFORE the run callback body where result.body = message; is. You can't use return values like that anywhere in node when you have I/O involved. You have to use a callback.

Resources