run-sequence synchronous task never completes - node.js

I'm almost certainly going about this in the wrong way, so first up my high-level requirement.
I'm using the angular2-seed and want to run Protractor tests in a headless mode by using Xvfb. I don't want an Xvfb server running at all times (this is a build server) so instead I'd like to spin up an Xvfb service, have Protractor do it's thing, and then "gracefully" shut down Xvfb. In isolation these tasks are working fine, however I've hit a wall when it comes to adding them into the gulp build setup.
Here's the task in the gulpfile:
gulp.task('e2e.headless', (done: any) =>
runSequence('start.xvfb',
'protractor',
'stop.xvfb',
done));
The tasks themselves are loaded in through individual typescript task files, i.e:
import {runProtractor} from '../../utils';
export = runProtractor
And here are my (latest) utility files themselves.
protractor.ts
import * as util from 'gulp-util';
import {normalize, join} from 'path';
import {ChildProcess} from 'child_process';
function reportError(message: string) {
console.error(require('chalk').white.bgRed.bold(message));
process.exit(1);
}
function promiseFromChildProcess(child: ChildProcess) {
return new Promise(function (resolve: () => void, reject: () => void) {
child.on('close', (code: any) => {
util.log('Exited with code: ', code);
resolve();
});
child.stdout.on('data', (data: any) => {
util.log(`stdout: ${data}`);
});
child.stderr.on('data', (data: any) => {
util.log(`stderr: ${data}`);
reject();
});
});
}
export function runProtractor(): (done: () => void) => void {
return done => {
const root = normalize(join(__dirname, '..', '..', '..'));
const exec = require('child_process').exec;
// Our Xvfb instance is running on :99
// TODO: Pass this in instead of hard-coding
process.env.DISPLAY=':99';
util.log('cwd:', root);
let child = exec('protractor', { cwd: root, env: process.env},
function (error: Error, stdout: NodeBuffer, stderr: NodeBuffer) {
if (error !== null) {
reportError('Protractor error: ' + error + stderr);
}
});
promiseFromChildProcess(child).then(() => done());
};
}
xvfb_tools.ts
import * as util from 'gulp-util';
const exec = require('child_process').exec;
function reportError(message: string) {
console.error(require('chalk').white.bgRed.bold(message));
process.exit(1);
}
export function stopXvfb() {
return exec('pkill -c -n Xvfb',
function (error: NodeJS.ErrnoException, stdout: NodeBuffer, stderr: NodeBuffer) {
if (error !== null) {
reportError('Failed to kill Xvfb. Not really sure why...');
} else if (stdout.toString() === '0') {
reportError('No known Xvfb instance. Is it running?');
} else {
util.log('Xvfb terminated');
}
});
}
export function startXvfb() {
return exec('Xvfb :99 -ac -screen 0 1600x1200x24',
function (error: NodeJS.ErrnoException, stdout: NodeBuffer, stderr: NodeBuffer) {
if (error !== null && error.code !== null) {
reportError('Xvfb failed to start. Err: ' + error.code + ', ' + error + ', ' + stderr);
}
});
}
I feel as though I'm probably going around the houses in creating a promise from my exec child_process, however earlier interations of the code didn't do it, so...
Note that the debug logging which should be output in runProtractor() displaying the root directory never gets called, so I'm quite sure that there is an async issue at play here. Here is the output from the task:
[00:47:49] Starting 'e2e.headless'...
[00:47:49] Starting 'start.xvfb'...
[00:47:49] Finished 'start.xvfb' after 12 ms
[00:47:49] Starting 'protractor'...
[00:47:49] Finished 'protractor' after 5.74 ms
[00:47:49] Starting 'stop.xvfb'...
[00:47:49] Finished 'stop.xvfb' after 11 ms
[00:47:49] Finished 'e2e.headless' after 38 ms
[00:47:49] Xvfb terminated
Can someone set me straight/push me in the right direction please??

Thanks to Ludovic from the angular2-seed team!
The mistake was in not calling the runProtractor function from the wrapper class, i.e.
export = runProtractor(). Once that was noted, I could then strip out the un-necessary wrapper function as well as the promiseFromChildProcess, which were distractions.
The final task was just an anonymous function that takes the gulp callback "done" which is called when exiting:
function reportError(message: string) {
console.error(require('chalk').white.bgRed.bold(message));
process.exit(1);
}
export = (done: any) => {
const root = normalize(join(__dirname, '..', '..', '..'));
const exec = require('child_process').exec;
process.env.DISPLAY=':99';
util.log('cwd:', root);
exec('protractor', { cwd: root, env: process.env},
function (error: Error, stdout: NodeBuffer, stderr: NodeBuffer) {
if (error !== null) {
reportError('Protractor error: ' + error + stderr);
} else {
done();
}
});
}

You need to add a callback function to your gulp task and call the cb (callback) function after all your runSequence tasks have completed.
gulp.task('e2e.headless', (cb) =>
runSequence('start.xvfb',
'protractor',
'stop.xvfb',
(err) => {
if (err) {
console.log(err.message);
} else {
console.log("Build finished successfully");
}
cb(err);
});
});

Related

Why does this behaviour with nodejs and child process happen?

What am I trying to do?
I want to clone multiple git repositories from nodejs.
How do I achieve that?
First I have this method in a separate file:
const exec = require("child_process").exec;
const { printError } = require("./printers");
function execute(command) {
console.log(`Running command: ${command}`);
let result = "";
let savedError = undefined;
return new Promise((resolve, reject) => {
const proc = exec(command, function (error, stdout, stderr) {
savedError = savedError || stderr || error;
result += stdout;
});
proc.on("close", (code) => {
if (code !== 0) {
console.error(
[
"================================================================",
`Command ${command} failed.`,
`Status code: ${code}`,
`Error message: ${savedError}`,
`Output: ${result}`,
"================================================================",
"",
].join("\n")
);
reject(result);
} else {
resolve(result);
}
});
});
Then I use it in my main module (code is abbreviated and bit simplified just so you can get the point:
const repoUrls = ["url1", "url2"]; // imagine these are real urls
async function main() {
const copyOfUrls = [...repoUrls];
while (copyOfUrls.length) {
try {
await execute(
`git clone ${copyOfUrls[0]} repositories/${repoUrlFileFriendly(
copyOfUrls[0]
)}`
);
console.log('fun fact - this console log never happens');
copyOfUrls.shift()
} catch (error) {
console.error("Failed to clone, see error:", error);
}
}
}
What is the problem?
Well, code works BUT after cloning first repo, the process exits (both the child process and the main one) and second repository is never even attempted to be cloned. (note the "console.log('fun fact - this console log never happens');").

Cant load protobuf message with protobuf.js

Im trying to use proto messages with protobuf.js, encode them and send them to a RabbitMQ message broker. I have the following sub-folders inside my project:
- model
- protos
- transactions.proto
- RabitMQ.js
- routes
- rmq-api.js
I added a route which does the following(Using express) in the rmq-api.js file:
const RabbitMQ = require('../model/RabbitMQ');
router.post('/api/transactions' ,function (req,res,next) {
RabbitMQ.PublishTransactionsMessage(DummyMessage).then(() => {
res.status(200).send({message: "OK :)"});
}).catch((e) => {
res.status(500).send({error:e.message});
});
});
In the RabitMQ.js file I have the following code:
module.exports = {
PublishTransactionsMessage: function(message) {
return new Promise((resolve, reject) => {
amqp.connect(RabbitMQConfig.url, function (error, connection) {
if (error) {
console.error("Could not connect to the rabbit message broker on {0} - " +
"Check connection please and try again".format(RabbitMQConfig.url));
console.error("Error message - {0}".format(error));
reject(error)
}
connection.createChannel(function(error, channel) {
if (error) {
console.error("Could Create channel - {0}".format(error.message));
reject(error)
}
const queue = RabbitMQConfig.queue;
channel.assertQueue(queue, {
durable: true
});
// Convert Message to protobuff
protobuf.load("./protos/transactions.proto").then((err, root) => {
if (err) {
reject(err);
}
let ScraperMessageResult = root.lookupType("transactions.ScraperMessageResult");
const errMsg = ScraperMessageResult.verify(message);
if (errMsg)
reject(errMsg);
let buffer = ScraperMessageResult.encode(message).finish();
channel.sendToQueue(queue, buffer);
console.log(`Sent ${message} to queue: ${queue}`);
resolve()
}).catch((err) => {
reject(err);
});
});
});
});
},
};
In the code shown above in the line:
protobuf.load("./protos/transactions.proto").then((err, root) => {
I keep catching the following error:
Inside this catch block:
}).catch((err) => {
reject(err);
});
This seems like a pretty simple problem however I havent found anything on this online so I might be missing something really simple here.
P.S. I tried using __dirname + "/protos/transaction.proto" and still couldnt get this to work.
Please help me figure this out.
The problem is that your function doesn't look at the dist directory,
await protocolBuffer.load(__dirname + '/item.proto'); should look there,
and also you need to copy the file manuly by using copy/cp command, you can add it as part of the package json script like so:
"build": "nest build && COPY src\\reading_list\\protocol_buffer\\*.proto dist\\reading_list\\protocol_buffer\\"

Exit process when pipe finish

I'm trying to import users from a CSV file with node.js in CLI.
But I don't know how to exit program properly when all the tasks are completed.
Here my code using Sails.js and node-csv
"use strict";
const Sails = require('sails');
const fs = require('fs');
const csv = require('csv');
const filename = './test.csv';
Sails.load({
models : { migrate : 'safe'},
log : { level : 'info' },
hooks : { grunt : false, sockets : false, pubsub : false },
}, (errSails, sails) => {
if ( errSails ) process.exit(1);
let input = fs.createReadStream(filename);
let parser = csv.parse();
let transformer = csv.transform( (record) => {
sails.log.debug('record : ', record);
sails.log.info('Transformer : searching user...');
return User.findOne({email: record.email})
.then( (user) => {
sails.log.info('Transformer : search complete');
if ( !user ) {
sails.log.info('Transformer : no users found');
return User.create(record);
}
return record;
})
.catch( (err) => sails.log.error(err) );
});
parser
.on('readable', () => sails.log.info('Parser readable'))
.on('finish', () => sails.log.info('Parser finish'))
.on('error', (err) => sails.log.error('Parser error : ', err));
transformer
.on('finish', () => sails.log.info('Transformer finish'))
.on('error', (err) => sails.log.error('Transformer error : ', err));
input.pipe(parser).pipe(transformer).pipe(process.stdout);
//--- Exit program
// process.exit();
});
Here is what is inside the terminal
debug: record : { ... }
info: Transformer : searching user...
info: Parser readable
info: Parser finish
info: Transformer finish
info: Transformer : search complete
info: Transformer : no users found
Without process.exit();, the program continue forever.
If I uncomment the process.exit(); line, the program terminate immediately without parsing anything. But, if I put this line into the finish event of the transformer, the program exit after info: Transformer finish without the last two lines.
I want to know how and where I should put process.exit(); to exit program to be sure that all users of my CSV file are created in the database.
First, don't try to user Process exit on error happening, use throws new Error. read more here.
and about your problem: the second thing you have an async function and put your exit program command out of async function body, so it makes your second problem, for your first problem, to make exit when everything will be done, you should change your async function first.to something like this:
.on('finish', () => {sails.log.info('Transformer finish');process.exit(1);})

How to check whether node child process executes completely and is listening for requests

I have a requirement that i want to run my task after booting a http server.
But i cannot check whether the server is booted.
Instead,i used a setTimeout function to assume the server is booted with 2 seconds interval.
Could anybody,who knows a better solution,help me?
Great thanks!
My code is as follows:
// main.js
function exec(command) {
var defer = q.defer();
var child = childProcess.exec(command, function (err, stdout, stderr) {
if (err) {
return defer.reject(err);
}
});
child.stdout.pipe(process.stdout);
// TODO: find another graceful way to check child process ends
setTimeout(function () {
defer.resolve();
}, 2000);
return defer.promise;
}
exec('"' + process.execPath + '" app.js')
.then(function () {
// my task here;
console.log('my task')
});
// app.js
var http = require('http'),
server;
server = http.createServer(function (req, res) {
// serve html;
res.end('hello world');
});
server.listen(9999);
// run
$> node main.js
A good solution would be to try to connect to the webserver until it responds. Since you're using Q, you can take a look at the code from this answer, which enables you to retry an operation until it succeeds.
function retry(operation, delay) {
return operation().catch(function(reason) {
// Note: I replaced delay * 2 by delay, it will be better in your case
return Q.delay(delay).then(retry.bind(null, operation, delay));
});
}
Then you could do something like
startMyWebServer()
.then (function () {
return retry(connectToWebServer, 500).timeout(10000);
})
.then(function () {
console.log("Webserver ready")
})
.catch(function (err) {
console.log("Unable to contact webserver")
});
The call to retry will retry to connect to the webserver if the previous connection failed, with a delay of 500ms. If after 10s it didn't succeed, the promise will be rejected.
The connectToWebServer function should be a function that tries to connect to your webserver and returns a promise, i.e. something like
function connectToWebServer() {
var d = q.defer();
request('http://your-ip:your-port', function (err, res) {
if (err || res.statusCode != 200) {
return d.reject("Error : "+(err || "status code "+res.statusCode));
}
d.resolve();
}
return d.promise;
}

execute a batch file from nodejs

Would it be possible to run a batch file from a nodejs application?
After googling for some time we can use child_process to execute the commands. Tried the same module but without success.
Could somebody guide me?
This creates a NodeJS module with a single function named exec() to execute batch scripts.
var exec = require('child_process').exec,
path = require('path'),
os = require('os');
fs = require('fs');
// HACK: to make our calls to exec() testable,
// support using a mock shell instead of a real shell
var shell = process.env.SHELL || 'sh';
// support for Win32 outside Cygwin
if (os.platform() === 'win32' && process.env.SHELL === undefined) {
shell = process.env.COMSPEC || 'cmd.exe';
}
// Merges the current environment variables and custom params for the environment used by child_process.exec()
function createEnv(params) {
var env = {};
var item;
for (item in process.env) {
env[item] = process.env[item];
}
for(item in params) {
env[item] = params[item];
}
return env;
}
// scriptFile must be a full path to a shell script
exports.exec = function (scriptFile, workingDirectory, environment, callback) {
var cmd;
if (!workingDirectory) {
callback(new Error('workingDirectory cannot be null'), null, null);
}
if (!fs.existsSync(workingDirectory)) {
callback(new Error('workingDirectory path not found - "' + workingDirectory + '"'), null, null);
}
if (scriptFile === null) {
callback(new Error('scriptFile cannot be null'), null, null);
}
if (!fs.existsSync(scriptFile)) {
callback(new Error('scriptFile file not found - "' + scriptFile + '"'), null, null);
}
// transform windows backslashes to forward slashes for use in cygwin on windows
if (path.sep === '\\') {
scriptFile = scriptFile.replace(/\\/g, '/');
}
// TODO: consider building the command line using a shell with the -c argument to run a command and exit
cmd = '"' + shell + '" "' + scriptFile + '"';
// execute script within given project workspace
exec(cmd,
{
cwd: workingDirectory,
env: createEnv(environment)
},
function (error, stdout, stderr) {
// TODO any optional processing before invoking the callback
callback(error, stdout, stderr);
}
);
};
I have found the solution for it.. and its works fine for me. This opens up a new command window and runs my main node JS in child process. You need not give full path of cmd.exe. I was making that mistake.
var spawn = require('child_process').spawn,
ls = spawn('cmd.exe', ['/c', 'startemspbackend.bat']);
ls.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
ls.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
ls.on('exit', function (code) {
console.log('child process exited with code ' + code);
});
An easier way I know for executing that is the following code :
function Process() {
const process = require('child_process');
var ls = process.spawn('script.bat');
ls.stdout.on('data', function (data) {
console.log(data);
});
ls.stderr.on('data', function (data) {
console.log(data);
});
ls.on('close', function (code) {
if (code == 0)
console.log('Stop');
else
console.log('Start');
});
};
Process();

Resources