child_process.fork() in Electron - node.js

Is it possible to fork a child_process from an electron render process? I found some posts across the net, but there were no hint how helps me to get my code working.
I created a module, that fork child processes. This code works, when I run this with cmd and under node. But when I try to integrate it in my electron app, I can not communicate with the child.send() method.
// create fork
const fork = require('child_process').fork;
const fs = require('fs');
const img_path = [
'path/to/an/image1.jpg',
'path/to/an/image2.jpg',
'path/to/an/image3.jpg'
];
const cp = [];
const temp_path = img_path.map((item) => item);
createAndResize(2);
function createAndResize(num) {
return childResize(createChildProcess(num));
}
function createChildProcess(num) {
if(num <= 0) {
return cp;
} else {
let cf = fork('./child.js');
cp.push(cf);
num -= 1;
return createChildProcess(num);
}
}
function childResize(list) {
if(list.length <=0) {
return true;
} else {
// child_process is created
let child = list.shift();
child.on('message', function (data) {
if (!temp_path.length) {
process.kill(data);
} else {
child.send(temp_path.shift());
}
});
child.send(temp_path.shift());
setTimeout(function() {
childResize(list);
}, 1000);
}
}
//child.js
process.on('message', function(msg) {
console.log(msg); //this is never reached
};
EDIT: based on the comment below, I fork child processes on the main process. The comunication seems to work with few exceptions. But first my new code:
// myView.js
const { remote } = require('electron');
const mainProcess = remote.require('./main.js');
const { forkChildfromMain } = mainProcess;
forkChildfromMain();
// main.js
const fork = require('child_process').fork;
let cp = [];
function forkChildfromMain() {
createAndResize(4);
}
function createAndResize(num) {
return childResize(createChildProcess(num));
}
function createChildProcess(num) {
if(num <= 0) {
return cp;
} else {
let cf = fork('./resize.js');
cp.push(cf);
num -= 1;
return createChildProcess(num);
}
}
function childResize(list) {
if(list.length <=0) {
return true;
} else {
let child = list.shift();
child.on('message', function (msg) {
// logs 'Hello World' to the cmd console
console.log(msg);
});
child.send('Hello World');
setTimeout(function() {
childResize(list);
}, 1000);
}
}
exports.forkChildfromMain = forkChildfromMain;
// child.js
process.on('message', function(msg) {
// this console statement get never loged
// I think, I must integrate an icpModule
console.log(msg);
//process send msg back to main.js
process.send(msg);
})
OUTDATED: The main problem now is, that I think electron 'spawn' new child processes and do not fork.
Because, when I look at my task manager I see only one instance from electron. When I run the code in a node env, I see there were fork multiple node instances.
The reason why I prefer to fork my child processes in multiple node instances is, that I want to make many image manipulation. So when I fork childs, then every child has it own node instance with memory and so on. I think that would be more performant then when I only have one instance who shared the memory and resources to all of the childs.
The second unexpected behavior is, that the console.log statement in the child is not printed to my cmd console. But this is the smaller ones :)
EDIT: After I analyse my task manager a little more in depth, I saw, that electron spawn multiple child processes like it should.

Electron's renderer process is not the right place for forking child processes, you should think about moving this to the main process.
Nonetheless, it should work the way you describe. If you'd make a minimal example available somewhere I could take a closer look.

Related

Send NodeJS child process stdout to Express response object

I currently pipe the stdout of a child process to an (Express) response object like this:
try{
let proc = spawn(exe, args);
proc.stdout.pipe(response);
let exitCode = await ending.completion(proc);
}
finally {
// my own clean up goes here (e.g. releasing locks)
}
where ending.completion is defined as
async function(process){
return new Promise(function(resolve,reject){
let done = false;
process.on('exit', (code, signal)=>{
if(!done){
done = true;
if(code !== null){
resolve(code);
} else {
reject(signal);
}
}
});
process.on('error', (err)=>{
if(!done){
done = true;
reject(err);
}
});
});
};
This generally works fine except for the fact that proc internally does a lot of waiting (on the download/transfer) before it can complete/return. I would prefer allowing it to dump its output into a buffer, which then trickles into the response object as the download proceeds. How can I introduce such a buffer?
(A second (and I think related) problem is that if the download is aborted client-side, proc never completes but keeps waiting on its stdout.)
Looks like exec method does exactly what you need.
Spawns a shell then executes the command within that shell, buffering
any generated output.
Use it instead of spawn. Note that unlike spawn, exec expects the whole command (with all the args) as a single string. Here is example from an officials documentation:
const util = require('util');
const exec = util.promisify(require('child_process').exec;
async function lsExample() {
const { stdout, stderr } = await exec('ls');
console.log('stdout:', stdout);
console.error('stderr:', stderr);
}
lsExample();

UI freezes for a short moment while trying to execute multiple commands in a gnome shell extension

Original question: Multiple arguments in Gio.Subprocess
So currently I'm trying to execute multiple asynchronous commands in my gnome-shell-extension via Gio.Subprocess. This works fine, if I put all commands as only one chained command with && in the command vector of the Subprocess. The drawback of this solution is, that the output of the different chained commands is only updated once and the execution time may be long.
What I'm now trying to do, is to execute every command on its own at the same time. Now the output can be updated if one command only has a small interval while another one needs more time.
Let's say these are my commands, in this case I would like to execute each command every second:
let commands = {"commands":[{"command":"ls","interval":1},
let commands = {"commands":[{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1}]}
Then I'm calling my refresh function for each command.
commands.commands.forEach(command => {
this.refresh(command);
})
What is happening now, is that the gnome UI is freezing almost every second, not much, but I can see my mouse cursor or scrolling stop for a very short amount of time, even though I use asynchronous communication.
What I have found out from debugging is that it seems to be the initialization of the Subprocess which causes the small freeze, maybe because all the commands are using it nearly at the same time?
proc.init(cancellable);
I think the documentation says that the init method is synchronous (https://developer.gnome.org/gio//2.56/GInitable.html#g-initable-init) and that there also seems to be an async version (https://developer.gnome.org/gio//2.56/GAsyncInitable.html#g-async-initable-init-async), but the Gio.Subprocess does only implement the synchronous one (https://developer.gnome.org/gio//2.56/GSubprocess.html)
So the final question is, what would be the correct way to avoid the freezing? I tried to move the init part to asynchronous function and continue with the command execution via callbacks after it is done, but with no luck. Maybe this is even the completely wrong approach though.
Whole extension.js (final updating of the output is not part of this version, just for simplicity):
const Main = imports.ui.main;
const GLib = imports.gi.GLib;
const Mainloop = imports.mainloop;
const Gio = imports.gi.Gio;
const ExtensionUtils = imports.misc.extensionUtils;
const Me = ExtensionUtils.getCurrentExtension();
let output, box, gschema, stopped;
var settings;
let commands = {"commands":[{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1}]}
function init() {
//nothing todo here
}
function enable() {
stopped = false;
gschema = Gio.SettingsSchemaSource.new_from_directory(
Me.dir.get_child('schemas').get_path(),
Gio.SettingsSchemaSource.get_default(),
false
);
settings = new Gio.Settings({
settings_schema: gschema.lookup('org.gnome.shell.extensions.executor', true)
});
box = new St.BoxLayout({ style_class: 'panel-button' });
output = new St.Label();
box.add(output, {y_fill: false, y_align: St.Align.MIDDLE});
Main.panel._rightBox.insert_child_at_index(box, 0);
commands.commands.forEach(command => {
this.refresh(command);
})
}
function disable() {
stopped = true;
log("Executor stopped");
Main.panel._rightBox.remove_child(box);
}
async function refresh(command) {
await this.updateGui(command);
Mainloop.timeout_add_seconds(command.interval, () => {
if (!stopped) {
this.refresh(command);
}
});
}
async function updateGui(command) {
await execCommand(['/bin/sh', '-c', command.command]).then(stdout => {
if (stdout) {
let entries = [];
stdout.split('\n').map(line => entries.push(line));
let outputAsOneLine = '';
entries.forEach(output => {
outputAsOneLine = outputAsOneLine + output + ' ';
});
if (!stopped) {
log(outputAsOneLine);
//output.set_text(outputAsOneLine);
}
}
});
}
async function execCommand(argv, input = null, cancellable = null) {
try {
let flags = Gio.SubprocessFlags.STDOUT_PIPE;
if (input !== null)
flags |= Gio.SubprocessFlags.STDIN_PIPE;
let proc = new Gio.Subprocess({
argv: argv,
flags: flags
});
proc.init(cancellable);
let stdout = await new Promise((resolve, reject) => {
proc.communicate_utf8_async(input, cancellable, (proc, res) => {
try {
let [ok, stdout, stderr] = proc.communicate_utf8_finish(res);
resolve(stdout);
} catch (e) {
reject(e);
}
});
});
return stdout;
} catch (e) {
logError(e);
}
}```
It's doubtful that Gio.Initable.init() is what's causing the freeze. First some comments on the usage of GSubprocess here.
function execCommand(argv, input = null, cancellable = null) {
try {
/* If you expect to get output from stderr, you need to open
* that pipe as well, otherwise you will just get `null`. */
let flags = (Gio.SubprocessFlags.STDOUT_PIPE |
Gio.SubprocessFlags.STDERR_PIPE);
if (input !== null)
flags |= Gio.SubprocessFlags.STDIN_PIPE;
/* Using `new` with an initable class like this is only really
* necessary if it's possible you might pass a pre-triggered
* cancellable, so you can call `init()` manually.
*
* Otherwise you can just use `Gio.Subprocess.new()` which will
* do exactly the same thing for you, just in a single call
* without a cancellable argument. */
let proc = new Gio.Subprocess({
argv: argv,
flags: flags
});
proc.init(cancellable);
/* If you want to actually quit the process when the cancellable
* is triggered, you need to connect to the `cancel` signal */
if (cancellable instanceof Gio.Cancellable)
cancellable.connect(() => proc.force_exit());
/* Remember the process start running as soon as we called
* `init()`, so this is just the threaded call to read the
* processes's output.
*/
return new Promise((resolve, reject) => {
proc.communicate_utf8_async(input, cancellable, (proc, res) => {
try {
let [, stdout, stderr] = proc.communicate_utf8_finish(res);
/* If you do opt for stderr output, you might as
* well use it for more informative errors */
if (!proc.get_successful()) {
let status = proc.get_exit_status();
throw new Gio.IOErrorEnum({
code: Gio.io_error_from_errno(status),
message: stderr ? stderr.trim() : GLib.strerror(status)
});
}
resolve(stdout);
} catch (e) {
reject(e);
}
});
});
/* This should only happen if you passed a pre-triggered cancellable
* or the process legitimately failed to start (eg. commmand not found) */
} catch (e) {
return Promise.reject(e);
}
}
And notes on Promise/async usage:
/* Don't do this. You're effectively mixing two usage patterns
* of Promises, and still not catching errors. Expect this to
* blow up in your face long after you expect it to. */
async function foo() {
await execCommand(['ls']).then(stdout => log(stdout));
}
/* If you're using `await` in an `async` function that is
* intended to run by itself, you need to catch errors like
* regular synchronous code */
async function bar() {
try {
// The function will "await" the first Promise to
// resolve successfully before executing the second
await execCommand(['ls']);
await execCommand(['ls']);
} catch (e) {
logError(e);
}
}
/* If you're using Promises in the traditional manner, you
* must catch them that way as well */
function baz() {
// The function will NOT wait for the first to complete
// before starting the second. Since these are (basically)
// running in threads, they are truly running in parallel.
execCommand(['ls']).then(stdout => {
log(stdout);
}).catch(error => {
logError(error);
});
execCommand(['ls']).then(stdout => {
log(stdout);
}).catch(error => {
logError(error);
});
}
Now for the implementation:
const Main = imports.ui.main;
const GLib = imports.gi.GLib;
const Gio = imports.gi.Gio;
const ExtensionUtils = imports.misc.extensionUtils;
const Me = ExtensionUtils.getCurrentExtension();
let cancellable = null;
let panelBox = null;
let commands = {
"commands":[
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1}
]
};
enable() {
if (cancellable === null)
cancellable = new Gio.Cancellable();
panelBox = new St.BoxLayout({
style_class: 'panel-button'
});
// Avoid deprecated methods like `add()`, and try not
// to use global variable when possible
let outputLabel = new St.Label({
y_align: St.Align.MIDDLE,
y_fill: false
});
panelBox.add_child(outputLabel);
Main.panel._rightBox.insert_child_at_index(panelBox, 0);
commands.commands.forEach(command => {
this.refresh(command);
});
}
disable() {
if (cancellable !== null) {
cancellable.cancel();
cancellable = null;
}
log("Executor stopped");
if (panelBox !== null) {
Main.panel._rightBox.remove_child(panelBox);
panelBox = null;
}
}
async function refresh(command) {
try {
await this.updateGui(command);
// Don't use MainLoop anymore, just use GLib directly
GLib.timeout_add_seconds(0, command.interval, () => {
if (cancellable && !cancellable.is_cancelled())
this.refresh(command);
// Always explicitly return false (or this constant)
// unless you're storing the returned ID to remove the
// source later.
//
// Returning true (GLib.SOURCE_CONTINUE) or a value that
// evaluates to true will cause the source to loop. You
// could refactor your code to take advantage of that
// instead of constantly creating new timeouts each
// second.
return GLib.SOURCE_REMOVE;
});
} catch (e) {
// We can skip logging cancelled errors, since we probably
// did that on purpose if it happens
if (!e.matches(Gio.IOErrorEnum, Gio.IOErrorEnum.CANCELLED)
logError(e, 'Failed to refresh');
}
}
// `updateGui()` is wrapped in a try...catch above so it's safe to
// skip that here.
async function updateGui(command) {
let stdout = await execCommand(['/bin/sh', '-c', command.command]);
// This will probably always be true if the above doesn't throw,
// but you can check if you want to.
if (stdout) {
let outputAsOneLine = stdout.replace('\n', '');
// No need to check the cancellable here, if it's
// triggered the command will fail and throw an error
log(outputAsOneLine);
// let outputLabel = panelBox.get_first_child();
// outputLabel.set_text(outputAsOneLine);
}
}
It's hard to say what is causing the freeze you are experiencing, but I would first cleanup your Promise usage and be more explicit about how you use timeout sources, as these may be stacking every second.
If possible, you might want to group your subprocesses into a single timeout source, possible using Promise.all() to await them all at once. Overloading the event loop with pending sources and Promises could also be the cause of the freeze.

nodejs worker_threads How to understand that all workers have completed work

Began to study worker_threads in nodejs
After completion of work, the last worker must finish the script
process.exit();
my index.js
const { Worker } = require("worker_threads");
const logUpdate = require("log-update");
const threads = 100;
const someData = 'some data';
let names = [...Array(threads)].fill(0);
for (let i = 0; i < threads; i++) {
const port = new Worker(require.resolve("./worker.js"), {
workerData: { someData, i }
});
port.on("message", (data) => handleMessage(data, i));
port.on("error", (e) => console.log(e));
port.on("exit", (code) => console.log(`Exit code: ${code}`));
}
function handleMessage(_, index) {
names[index]++;
logUpdate(names.map((status, i) => `Thread ${i}: ${status} ${_}`).join("\n"));
}
worker.js
const { parentPort, workerData } = require("worker_threads" )
const { someData, i } = workerData;
(async () => {
parentPort.postMessage( `worker start ${i} some data ${someData}`);
process.exit();
})();
Now workers are created and work out, but after they are completed, the script does not complete its work
From whatever knowledge I have, I understand that if there is no error caused in the worker until its exit, then the return code is 0, otherwise 1 (automatically). Look at process exit codes.
So when you do process.exit() at that time you can pass the code as an argument. (or not have process.exit() at all).
Generally what I have seen, and do is:
Wrap the worker creation in a Promise returning function.
NOTE:
worker.terminate() : terminates the worker thread.
(Personal choice) I choose to do parentPort.postMessage(JSON.stringify(true)) if I complete my task
in my worker thread. Hence worker.on('message', message => {if (message === true) {worker.terminate();resolve(true);} else {console.log(message)}}); in the main thread.
Whenever I have to create threads for repetitive task-delegations. I use .map function on the array. the function inside returns the same return value as that from my worker creator (Promised returning) function. Hence eventually I map the array into array of promises.
Then I put this array of promises into Promise.all().
Then I check for returned values of Promise.all()
Checkout:
My Gist (as an example for Promise.all()).
Medium (Just for worker_thread basics. An Awesome article)

ioSocket not emitting to browser on server heavy load

Some requests that can be made to my nodejs server require heavy processing (for example: 5000 files). Since the request will take a while to process, I want to display progresses in the browser. For that, I'm using io-socket. Regularly, the server sends the progress advance to the client with for example ioSocket.emit("log", "progress 24%).
However, if progress sending to the client usually works, when there is a big amount of files, it doesn't. Nothing is sent to browser.
I'm sure the process is going fine since I log progress to node terminal, and there it appears as expected.
I'm wondering what I can do to have ioSocket.emit event work in heavy load case, because it's where it's most useful to see progress.
The files processing function looks like this:
var child_process = require("child_process");
function (ioSocket) {
ioSocket.emit("log", "start")
var ratingCounts = 0;
var listOfFilesRatings = []
_.each(listOfFilesPaths, function(path, i){
child_process.exec("exiftool -b -Rating "+ path, function(err, stdout){
if (err) console.log(err)
else {
listOfFilesRatings.push(stdout);
ratingCounts++;
ioSocket.emit("log", "rating test progress "+ ratingCounts)
};
});
ioSocket.emit("log", "each progress "+ i)
});
}
In this example, only the first "start" emit will be fired to the browser.
However if I do the following:
function (ioSocket) {
ioSocket.emit("log", "start")
for (i=0; i<1000; i++) {
ioSocket.emit("log", "each progress "+ i)
};
}
everything works fine, and I get the "start" and all "each progress" sent to browser.
If you are processing 5000 files, your scheme with _.each() and child_process.exec() will launch 5000 exiftool processes at once. That will likely bring any computer, except for perhaps some big iron to its knees. You should probably be launching no more than N of those at a time where you run some performance tests on your particular hardware to determine what N should be (probably under 10).
Here's one way to do that:
var child_process = require("child_process");
function processFiles(ioSocket) {
return new Promise((resolve, reject) => {
ioSocket.emit("log", "start")
let ratingCounts = 0;
let listOfFilesRatings = [];
const maxInFlight = 10;
let inFlightCntr = 0;
let fileIndex = 0;
function run() {
// while room to run more, run them
while (inFlightCntr < maxInFlight && fileIndex < listOfFilesPaths.length) {
let index = fileIndex++;
++inFlightCntr;
ioSocket.emit("log", "each progress " + index)
child_process.exec("exiftool -b -Rating " + path, function(err, stdout) {
++ratingCounts;
--inFlightCntr;
if (err) {
console.log(err);
listOfFilesRatings[index] = 0;
} else {
listOfFilesRatings[index] = stdout;
ioSocket.emit("log", "rating test progress " + ratingCounts)
}
run();
});
}
if (inFlightCntr === 0 && fileIndex >= listOfFilesPaths.length) {
// all done here
console.log(listOfFilesRatings);
resolve(listOfFilesRatings);
}
}
run();
});
}
processFiles().then(results => {
console.log(results);
});

Fork a child process and inject dependency

I currently have an operation in a module that is blocking, so I'm looking at making this into a child process that I fork instead.
If I want to do that, then I of course need to modify the architecture of my module. The module requires that a dependency is injected by calling the module as a function, passing in the dependency, like so:
var dependency = { name: "Bob" }
require('worker')(dependency)
Then in my worker module:
module.exports = function (dependency) {
// Outputs { name: "Bob" }
console.log(dependency)
}
How can I turn this example into a child process being forked?
When using .fork() you are spinning up a completely separate process, so you are not able to pass around references between the parent and child processes (and are limited to messaging after the process has been created).
An approach not requiring messaging is to pass arguments (in an array) when you fork the process. Although I believe you'll have to stick with simple string/number values (but it looks like this might be enough for you from the code). Eg.:
At top level:
var name = 'bob'
var args = [name];
var childProcess = require('child_process').fork(__dirname + '/worker', args);
In the worker process:
var name = process.argv[2]; //AFIAK elements 0 and 1 are already populated with env info
Update
If you really want to go the messaging route (which I'd hesitate to recommend if you already need to send messages), then you could differentiate between the types of messages something like this (there may be more elegant ways):
At top level:
var childProcess = require('child_process').fork(__dirname + '/worker');
childProcess.send({msgtype:'dependencies', content:dependencies});
//Then to send 'normal' message:
childProcess.send({msgtype:'myothermessagetype', content:'some content'}
In worker process:
process.on('message', function(msg){
if(msg.mtype == 'dependencies') {
var dependencies = msg.content;
//Do something with dependencies
} else if(msg.mtype == 'myothermessagetype') {
var normalmessage = msg.content;
//Do something in response to normal message.
}
});
a.js
var fork = require ("child_process").fork;
var child;
var init = false;
var m = module.exports = {};
m.init = function (o){
if (init) return;
init = true;
child = fork (__dirname + "/child");
child.send ({ init: o });
};
m.print = function (o){
if (!init) return;
child.send ({ msg: o });
};
m.uninit = function (){
if (!init) return;
child.on ("exit", function (){
init = false;
});
child.kill ();
};
child.js
var dependency;
var print = function (o){
console.log (o + dependency.name);
};
process.on ("message", function (o){
if (o.init){
dependency = o.init;
}else{
print (o.msg);
}
});
b.js
var a = require ("./a");
a.init ({ name: "asd" });
a.print ("hi, ");
setTimeout (function (){
a.uninit ();
}, 1000);
Prints: hi, asd
In the main module:
var dependency = {message: 'hello there'};
var args = [JSON.stringify(dependency)];
var child = require('child_process').fork('worker', args);
child.send('sayhello');
child.send('exit');
And in the child process module (worker.js):
var dependency = JSON.parse(process.argv[2]);
process.on('message', function(m){
if(m == 'sayhello') console.log(dependency.message);
else if(m == 'exit') process.exit();
});

Resources