Node.js. What should I use? Next()? - node.js

app.get("/server", function (req, res){
connection.query("SELECT * from serverdb", function(err, rows)
{
var data = rows;
var reachabilityResultString="";
var serverCount = rows.length;
var arrayWithReachabilityResultStrings = new Array();
var insertReachabilityResultStringIntoArray;
for (var counterForServername = 0 ; counterForServername < serverCount; counterForServername++)
{
ls = childProcess.exec('ping ' + rows[counterForServername].ipadresse,function (error, stdout, stderr)
{
if (error)
{
console.log(error.stack);
console.log('Error code: '+error.code);
console.log('Signal received: '+error.signal);
var errorSignal = ("Signal received: " + error.signal);
var errorReachability = "Error";
}
else
{
console.log('Child Process STDOUT: '+stdout);
console.log('Child Process STDERR: '+stderr);
pingOutput = String(stdout);
console.log(reachabilityResult(pingOutput));
insertReachabilityResultStringIntoArray = arrayWithReachabilityResultStrings.push(reachabilityResult(pingOutput));
console.log(arrayWithReachabilityResultStrings);
};
ls.on('exit', function (code) {
console.log('Child process exited with exit code '+code);
});
});
};
});
res.render("all.jade,{servers: data, status: arrayWithReachabilityResultStrings});
});
..well..this is my code. My problem is that the program first invoke the website with the jadecode; I hope you know what I mean. I want to deliver the arrayWithReachabilityResultStrings to all.jade, so the program must wait until the for loop is finished. But I don't know how to make it wait. I know the "problem" is the asynchronous behavior of node.js but I don't know how I can solve this..

just fix your missing " and move your
res.render("all.jade,{servers: data, status: arrayWithReachabilityResultStrings});
one line up. It needs to be invoked by a callback in connection.query, as it is now it is invoked much sooner.
It would also be nice, if you read a bit about javascript variable scoping. This SO question does good job in that.
P.S.: Glad to see new people learning node.

If you need to run an arbitrary number of subcommands and wait until they are all done, you should consider a helper library such as async.js and use the async.queue flow control function. This kind of coordination is actually somewhat tricky in node to code by hand without any flow control facilities. However, it is certainly possible. In this case you would need a separate done counter that you increment on each 'exit' event and when all of your child processes have been started and all have finished, you're done.

Related

Proper way of killing forked child process(es)

I'm working on an Express app where I need to fork an expensive process, and kill the process when it's complete.
I've seen varying ways to do this on Stack Overflow and searches, but I would like to know if this is the proper way to do it when using the child_process.fork(...) methodology.
// route handler - /routes/api.js
exports.redirect = function(req, res) {
var campaignId = req.query.campaignId,
destination = req.query.destination,
worker = child_process.fork(__dirname + '/../workers/redirect');
worker.send({
campaignId: campaignId,
destination: destination
});
worker.on('message', function(msg) {
this.kill();
});
return res.redirect(destination);
};
// forked process file referenced as "worker" - /workers/redirect.js
var db = require('../db'),
RedirectedUrlService = require('../services/redirect'),
logger = require('../logger');
process.on('message', function(data) {
RedirectedUrlService.create(data, function(error, redirectedUrl) {
if (error) {
logger.log('error', error);
}
process.send('done');
});
});
I'm not seeing any additional worker references if I: ps ax | grep node, so it seems to be doing what I expect.
Is this correct, and my code is still non-blocking? or by messaging the parent to issue the kill(), have I completely defeated the purpose of creating the fork in the first place?
In your child process, use process.exit(0);

Not receiving stdout from nodejs spawned process

I'm trying to have nodejs interact with adventure, an old text based game. The idea is to open adventure as a child process and then play the game by writing to its stdin and placing an event listener on stdout.
When the game starts, it prints an initial:
Welcome to Adventure!! Would you like instructions?
So to illustrate my problem, I have a nodejs+express instance with:
var childProcess = require('child_process');
var spawn = childProcess.spawn;
var child = spawn('adventure');
console.log("spawned: " + child.pid);
child.stdout.on('data', function(data) {
console.log("Child data: " + data);
});
child.on('error', function () {
console.log("Failed to start child.");
});
child.on('close', function (code) {
console.log('Child process exited with code ' + code);
});
child.stdout.on('end', function () {
console.log('Finished collecting data chunks.');
});
But when I start the server, the text from the game doesn't reach the event listener:
spawned: 24250
That's all the output I get. The child.stdout.on even listener is never called. Why isn't that initial line from the game being picked up?
If I append the following line to the above block of javascript, then the program output appears at once. So adventure runs, and I can now force it to trigger the child.stdout.on event listener... but this also ends the child process, which defeats the purpose of reading and writing to it.
...
child.stdout.on('end', function () {
console.log('Finished collecting data chunks.');
});
child.stdin.end();
Now the output is:
spawned: 28778
Child data:
Welcome to Adventure!! Would you like instructions?
user closed input stream, quitting...
Finished collecting data chunks.
Child process exited with code 0
I'm sure its a trivial oversight on my part, I appreciate any help figuring this out though.
After going through the Nodejs documentation a few more times, I convinced myself I was either missing something pretty big, or the spawn command wasn't working correctly. So I created a github issue.
And the answer was immediately posted: the child process can't buffer output if you want fast access.
So to achieve what I was originally looking for:
var childProcess = require('child_process');
var spawn = childProcess.spawn;
var child = spawn('unbuffer', 'adventure');
console.log("spawned: " + child.pid);
child.stdout.on('data', function(data) {
console.log("Child data: " + data);
});
child.on('error', function () {
console.log("Failed to start child.");
});
child.on('close', function (code) {
console.log('Child process exited with code ' + code);
});
child.stdout.on('end', function () {
console.log('Finished collecting data chunks.');
});
With the functional difference being the use of unbuffer, a command that disables output buffering.
Why isn't that initial line from the game being picked up?
I had the same problem on a project that called a compiled C++ program from node.js. I realized the problem was in the compiled C++ program: I wasn't flushing the stdout stream. Adding fflush(stdout); after printing a line solved the issue. Hopefully you still have access to the source of the game!
The data passed is a buffer type, not a string. Therefore, you need a decoder to read that buffer and then do the logging.
Here's how to do that.
var StringDecoder = require('string_decoder').StringDecoder;
var decoder = new StringDecoder('utf8');
child.stdout.on('data', function (data) {
var message = decoder.write(data);
console.log(message.trim());
});

Mongoose callback not working using Forever

I am trying to run a background process with Forever that adds data from an external service into my mongodb database every hour (I am new with node and had no idea about how to do this). I am using Node with Express and running the forever task using
forever -o out.log -e err.log start background/collector.js
so I have some feedback from the process. The code is the following:
var request = require('request');
var mongoose = require('mongoose');
var Model = require('../models/Model.js');
// Starting the collector process
addNewData();
function addNewData() {
request('external_service_url', function (error, response, body) {
if (!error && response.statusCode == 200) {
var models = JSON.parse(body);
console.log('Adding the new models...')
for(var i = 0; i < models.length; i++)
{
console.log(i);
Model.create(models[i], function (error, post, result) {
console.log('Test');
if (error) console.log('Something went wrong adding the document!');
if ( ! result.updatedExisting)
{
console.log('A new document has been added!');
}
});
}
console.log('Models added: ' + models.length);
// Every hour we will collect new data
console.log('Waiting for an hour...');
setTimeout(addNewData, 36000000);
}
})
}
I don't know why but seems that the .create() callback is not working, the console.logs placed inside the create callback are not showing anything in the logs. Forever does not work with mongoose? Do you have some idea?
Well here's the thing. You never "connected" your mongoose adapter to the database. Simply fixed by adding:
mongoose.connect('mongodb://myhost/mydatabase');
Somewhere near the top before you attempt to do anything else. It won't necessarily "connect" in that order, but the mongoose methods will work it out. "Asynchronous Programming", read on for the rest.
Also you should not be looping like this since you are not waiting for the "callback" ( it's not a closure it's a callback ) to complete or otherwise "throttle" the connection, which can lead to problems.
Use something like "async" instead to handle this. In this example the .eachLimit() in order to limit the number of parallel operations to a reasonabe level:
async.eachLimit(models,10,function(el,callback) {
Model.create(models[i], function (error, result) {
// do something maybe but call
callback(err); // to signal it is done
});
},function(err) {
// comes here on completion or error
});
Remember that "asychronous" code needs to execute on "callbacks" or "promises" in order to signal when things are done. Unline in "scynchronous" programming where each next line only executes when the previous lines of code are complete.
Your "for" loop otherwise just "sends everything" to the server and does not wait for completion before continuing execution.
But the main problem here was the connection. But you still should fix the rest as shown.

node.js file system problems

I keep banging my head against the wall because of tons of different errors. This is what the code i try to use :
fs.readFile("balance.txt", function (err, data) //At the beginning of the script (checked, it works)
{
if (err) throw err;
balance=JSON.parse(data);;
});
fs.readFile("pick.txt", function (err, data)
{
if (err) throw err;
pick=JSON.parse(data);;
});
/*....
.... balance and pick are modified
....*/
if (shutdown)
{
fs.writeFile("balance2.txt", JSON.stringify(balance));
fs.writeFile("pick2.txt", JSON.stringify(pick));
process.exit(0);
}
At the end of the script, the files have not been modified the slightest. I then found out on this site that the files were being opened 2 times simultaneously, or something like that, so i tried this :
var balance, pick;
var stream = fs.createReadStream("balance.txt");
stream.on("readable", function()
{
balance = JSON.parse(stream.read());
});
var stream2 = fs.createReadStream("pick.txt");
stream2.on("readable", function()
{
pick = JSON.parse(stream2.read());
});
/****
****/
fs.unlink("pick.txt");
fs.unlink("balance.txt");
var stream = fs.createWriteStream("balance.txt", {flags: 'w'});
var stream2 = fs.createWriteStream("pick.txt", {flags: 'w'});
stream.write(JSON.stringify(balance));
stream2.write(JSON.stringify(pick));
process.exit(0);
But, this time, both files are empty... I know i should catch errors, but i just don't see where the problem is. I don't mind storing the 2 objects in the same file, if that can helps. Besides that, I never did any javascript in my life before yesterday, so, please give me a simple explanation if you know what failed here.
What I think you want to do is use readFileSync and not use readFile to read your files since you need them to be read before doing anything else in your program (http://nodejs.org/api/fs.html#fs_fs_readfilesync_filename_options).
This will make sure you have read both the files before you execute any of the rest of your code.
Make your like code do this:
try
{
balance = JSON.parse(fs.readFileSync("balance.txt"));
pick = JSON.parse(fs.readFileSync("pick.txt"));
}
catch(err)
{ throw err; }
I think you will get the functionality you are looking for by doing this.
Note, you will not be able to check for an error in the same way you can with readFile. Instead you will need to wrap each call in a try catch or use existsSync before each operation to make sure you aren't trying to read a file that doesn't exist.
How to capture no file for fs.readFileSync()?
Furthermore, you have the same problem on the writes. You are kicking off async writes and then immediately calling process.exit(0). A better way to do this would be to either write them sequentially asynchronously and then exit or to write them sequentially synchronously then exit.
Async option:
if (shutdown)
{
fs.writeFile("balance2.txt", JSON.stringify(balance), function(err){
fs.writeFile("pick2.txt", JSON.stringify(pick), function(err){
process.exit(0);
});
});
}
Sync option:
if (shutdown)
{
fs.writeFileSync("balance2.txt", JSON.stringify(balance));
fs.writeFileSync("pick2.txt", JSON.stringify(pick));
process.exit(0);
}

nodeunit fail to exit from my asynchronous tests

Whenever I run my nodeunit test in IDE or console, it run well but fail to exit. Help me please with it!
var store = require('../lib/db');
var list = require('../source/models/mock_deals');
var logger = require('../lib/logging').logger;
exports.setUp = function(done){
logger.info('start test...');
done();
};
exports.tearDown = function(done){
logger.info('end test...');
done();
};
exports.testInsertDeal = function(test){
var length = list.length;
test.equals(length, 2);
store.mongodb.open(function(err,db){
if(err){
logger.error(err);
return;
}
logger.info("mongodb is connected!");
db.collection('deals',function(err,collection){
for(var i=0; i<length; i++){
var item = list[i];
collection.insert(item, function(err, result){
if(err){
logger.error('Fail to insert document deal [' + item.id + ']');
return;
}
logger.info( 'index ' + i + ' : ' +JSON.stringify(item) );
});
}
});
test.expect(1);
});
test.done();
};
I changed to use mongoose instead of mongodb. test still could not exit automatically.
But when I disconnected mongoose in test.tearDown method in my nodeunit test. the test existed correctly.
Add below in you test:
exports.tearDown = function(done){
mongoose.disconnect(function(err){
if(err) {
logger.error(err);
return;
}
logger.info('mongoose is disconnected');
});
done();
};
And more, If I use log4js for logging in my test and configure log4js with reloadSecs: 500 , test will not exist either. After I set reloadSecs to 0, then test exists well. So we need to configure logging.json with option reloadSecs: 0
To summarize: we need to make sure there are no working parts there after all test methods are done. then test will exist correctly.
If you know when your program should exit, you can simply use the following line of code to exit:
process.exit(0);
where 0 is the return code of the program.
Now that isn't really fixing the problem. There is probably a call back still waiting or a connection that is still active keeping your program up and running that isn't shown in the code you posted here. If you don't care to find it, just use process.exit. If you really care to find it, you will have to dig some more. I've never used nodeunit but I have used other node libraries that leave stuff up in their inner workings that keep the program from exiting. In those cases, I usually don't feel like wading through other peoples source code to find out what is going on so I just do the afore mentioned process.exit call.
This should at least give you an option.

Resources