Incr appears to run twice--why? - node.js

I'm having a hard time getting node, redis, and async to do what I want. I'm trying very basic things to grasp the patterns of redirecting control flow. Here, I hold a counter variable "success" that increases by one if a comparison key0 > key1 is true. They are static for now so it's always true; the only thing I wish to change is to increment success. I refresh the browser to re-run the comparison and increment success again.
My trouble is: when the page is refreshed, success jumps by 2. I tried putting a callback with incr, but it looks like only get-type commands have callbacks. I have a client.end(); in my script, but it prevented me from reloading the page so I commented it out. I suspect this is the source of my problem. If so, where does client.end belong?
var http = require("http");
var redis = require("redis");
var async = require("async");
client = redis.createClient();
http.createServer(function(request, response) {
// key "success" set to 0 externally, through redis-cli;
client.set("key0", "19");
client.set("key1", "11");
response.writeHead(200, {"Content-Type": "text/plain"});
async.series([
shortcut("key0"),
shortcut("key1"),
shortcut("success")
],
function(err, results){
if (results[0] > results[1]) {
client.incr("success", function(err, reply) {
response.write("incr done");
});
response.write(results[0] + "\n\n");
response.write(results[1] + "\n\n");
response.write(results[2]);
}
response.end();
// client.quit();
});
}).listen(8000);
function shortcut(key) {
return function(callback) {
client.get(key, function(err, reply) {
callback(null, reply);
}
);
}
}

Your browser most likely requests favicon.ico and thus generates the extra request which runs your code a second time.

Related

node.js server and AWS asynchronous call issue

I have a simple node Express app that has a service that makesa call to a node server. The node server makes a call to an AWS web service. The AWS simply lists any S3 buckets it's found and is an asynchronous call. The problem is I don't seem to be able to get the server code to "wait" for the AWS call to return with the JSON data and the function returns undefined.
I've read many, many articles on the web about this including promises, wait-for's etc. but I think I'm not understanding the way these work fully!
This is my first exposer to node and I would be grateful if somebody could point me in the right direction?
Here's some snippets of my code...apologies if it's a bit rough but I've chopped and changed things many times over!
Node Express;
var Httpreq = new XMLHttpRequest(); // a new request
Httpreq.open("GET","http://localhost:3000/listbuckets",false);
Httpreq.send(null);
console.log(Httpreq.responseText);
return Httpreq.responseText;
Node Server
app.get('/listbuckets', function (req, res) {
var bucketData = MyFunction(res,req);
console.log("bucketData: " + bucketData);
});
function MyFunction(res, req) {
var mydata;
var params = {};
res.send('Here are some more buckets!');
var request = s3.listBuckets();
// register a callback event handler
request.on('success', function(response) {
// log the successful data response
console.log(response.data);
mydata = response.data;
});
// send the request
request.
on('success', function(response) {
console.log("Success!");
}).
on('error', function(response) {
console.log("Error!");
}).
on('complete', function() {
console.log("Always!");
}).
send();
return mydata;
}
Use the latest Fetch API (https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) to make HTTP calls. It has built-in support with Promise.
fetch('http://localhost:3000/listbuckets').then(response => {
// do something with the response here
}).catch(error => {
// Error :(
})
I eventually got this working with;
const request = require('request');
request(url, function (error, response, body) {
if (!error && response.statusCode == 200) {
parseString(body, function (err, result) {
console.log(JSON.stringify(result));
});
// from within the callback, write data to response, essentially returning it.
res.send(body);
}
else {
// console.log(JSON.stringify(response));
}
})

CPU goes 100% due to so many http call in node js code. How I will optimize that code?

I am putting some sample code here from my application.
I am calling update data function. In one minute I have 150 calls of http.
So due to this code my cpu goes 100% and I want to optimize this code.
Wanted to fix performance issue of this node js code.
function doSynchronousLoop(data) {
if (data.length > 0) {
var loop = function(data, i) {
if (++i < data.length) {
var req = httpcall.request('http://myserverip/path', function(res) {
res.setEncoding('utf8');
res.on('data', function (chunk) {
setTimeout(function(){
loop(data, i);
}, 50);
});
});
req.on('error', function(err) {
console.error('error: ' ,err.stack.split("\n"));
});
req.write("");
req.end();
}else{
allData= [];
}
};
loop(data, 0);
}else{
allData= [];
}
function updateData(allData){
doSynchronousLoop(allData);
}
Your callback calls a new HTTP request on every "data" event. Unless the get request is really short, it'll fire multiple times, so several more http.get calls will be fired BEFORE the first one fires its end request. So, if you change your callback to fire on the end event instead of the data event, it will behave synchronously.

nodejs/express - stream stdout instantly to the client

I spawned the following child: var spw = spawn('ping', ['-n','10', '127.0.0.1']) and I would like to receive the ping results on the client side (browser) one by one, not as a whole.
So far I tried this:
app.get('/path', function(req, res) {
...
spw.stdout.on('data', function (data) {
var str = data.toString();
res.write(str + "\n");
});
...
}
and that:
...
spw.stdout.pipe(res);
...
In both cases browser waits 10 of the pings to complete, and then prints the result as a whole. I would like to have them one by one, how to accomplish that?
(Client is just making a call to .../path and console.logs the result)
EDIT: Although I do believe that websockets are necessary to implement this, I just want to know whether there are any other ways. I saw several confusing SO answers, and blog posts (in this post, at step one OP streams the logs to the browser) which didn't help, therefore I decided to go for a bounty for some attention.
Here's a complete example using SSE (Server sent events). This works in Firefox and probably Chrome too:
var cp = require("child_process"),
express = require("express"),
app = express();
app.configure(function(){
app.use(express.static(__dirname));
});
app.get('/msg', function(req, res){
res.writeHead(200, { "Content-Type": "text/event-stream",
"Cache-control": "no-cache" });
var spw = cp.spawn('ping', ['-c', '100', '127.0.0.1']),
str = "";
spw.stdout.on('data', function (data) {
str += data.toString();
// just so we can see the server is doing something
console.log("data");
// Flush out line by line.
var lines = str.split("\n");
for(var i in lines) {
if(i == lines.length - 1) {
str = lines[i];
} else{
// Note: The double-newline is *required*
res.write('data: ' + lines[i] + "\n\n");
}
}
});
spw.on('close', function (code) {
res.end(str);
});
spw.stderr.on('data', function (data) {
res.end('stderr: ' + data);
});
});
app.listen(4000);
And the client HTML:
<!DOCTYPE Html>
<html>
<body>
<ul id="eventlist"> </ul>
<script>
var eventList = document.getElementById("eventlist");
var evtSource = new EventSource("http://localhost:4000/msg");
var newElement = document.createElement("li");
newElement.innerHTML = "Messages:";
eventList.appendChild(newElement);
evtSource.onmessage = function(e) {
console.log("received event");
console.log(e);
var newElement = document.createElement("li");
newElement.innerHTML = "message: " + e.data;
eventList.appendChild(newElement);
};
evtSource.onerror = function(e) {
console.log("EventSource failed.");
};
console.log(evtSource);
</script>
</body>
</html>
Run node index.js and point your browser at http://localhost:4000/client.html.
Note that I had to use the "-c" option rather than "-n" since I'm running OS X.
If you are using Google Chrome, changing the content-type to "text/event-stream" does what your looking for.
res.writeHead(200, { "Content-Type": "text/event-stream" });
See my gist for complete example: https://gist.github.com/sfarthin/9139500
This cannot be achieved with the standard HTTP request/response cycle. Basically what you are trying to do is make a "push" or "realtime" server. This can only be achieved with xhr-polling or websockets.
Code Example 1:
app.get('/path', function(req, res) {
...
spw.stdout.on('data', function (data) {
var str = data.toString();
res.write(str + "\n");
});
...
}
This code never sends an end signal and therefore will never respond. If you were to add a call to res.end() within that event handler, you will only get the first ping – which is the expected behavior because you are ending the response stream after the first chunk of data from stdout.
Code Sample 2:
spw.stdout.pipe(res);
Here stdout is flushing the packets to the browser, but the browser will not render the data chunks until all packets are received. Thus the reason why it waits 10 seconds and then renders the entirety of stdout. The major benefit to this method is not buffering the response in memory before sending — keeping your memory footprint lightweight.

Why are my requests being paused when they're in a loop?

I am trying to test some some examples from the book I'm reading, "Learning Node 2012". And my application for testing the server by doing 2000 requests is pausing. The tester pauses after 5 requests and sends another 5 after a certain interval. Why is it pausing? How can I fix this?
The server code:
var http = require('http');
var fs = require('fs');
// write out numbers
var counter = 0;
function writeNumbers(res)
{
for (var i = 0; i < 100; i++)
{
counter++;
res.write(counter.toString() + '\n');
}
}
// create the http server
http.createServer( function(req, res) {
var query = require('url').parse(req.url).query;
var app = require('querystring').parse(query).file + ".txt";
// content header
res.writeHead(200, { 'Content-Type': 'text/plain' } );
// write out numbers
writeNumbers(res);
// timer to open file and read contents
setTimeout(function() {
console.log('opening ' + app);
// open and read in file contents
fs.readFile(app, 'utf8', function(err, data) {
if (err)
res.write('Could not find or open file for reading\n');
else
res.write(data);
res.end();
});
}, 2000);
}).listen(3000);
console.log('Server is running on port 3000');
The spam test code:
var http = require('http');
// the url we want, plus the path and options we need
var options = {
host: 'localhost',
port: 3000,
path: '/?file=secondary',
method: 'GET'
};
var processPublicTimeline = function(response) {
// finished? ok, write the data to a file
console.log('finished request');
};
for (var i = 0; i < 2000; i++)
{
// make the request, and then end it, to close the connection
http.request(options, processPublicTimeline).end();
}
While this definitely does have some relation to Why is node.js only processing six requests at a time?
It is also because you are using a timeOut to call res.end() to close the connection/respond, and thus move onto the next connection in queue.
You should instead think about these types of things aynchronously, without the use of timeOuts, but instead with callBacks.
So your code for your two main blocks could be more like:
var counter = 0;
function writeNumbers(res, callBack){
// notice callBack argument
for (var i = 0; i < 100; i++){
counter++;
res.write(counter.toString() + '\n');
}
// execute callBack (if it exists)
if(callBack && typeof callBack === "function") callBack();
}
http.createServer( function (req, res){
var query = require('url').parse(req.url).query;
var app = require('querystring').parse(query).file + ".txt";
res.writeHead(200, { 'Content-Type': 'text/plain' } );
writeNumbers(res, function(){
// Notice this function which is passed as a callBack argument for writeNumbers to evaluate.
// This executes when the main writeNumbers portion finishes.
console.log('opening ' + app);
fs.readFile(app, 'utf8', function(err, data) {
if (err)
res.write('Could not find or open file for reading\n');
else
res.write(data);
res.end();
});
});
}).listen(3000);
Notice that your writeNumbers function now takes a callBack argument to execute when it is done, and that when you call it in your server's object, you pass a function as that callBack argument. This is one of the core patterns used very frequently in node.js/javascript applications.
This means that you aren't waiting for a timeOut to execute to end your request response, but rather it is ended as it processes your response, and moves onto the next connection immediately. This is likely to happen wayyyy quicker than 2 seconds (the amount of your timeOut). So you should see your connections being processed much quicker.
Because (as someone pointed out in your comments) your system is only able to handle a few open TCP connections at a time, you want to move through your connections as quickly as possible. Leveraging a callBack chain can help you do that when you want to do things in a certain order, or if you need to wait for certain processes to finish before executing others, without guessing with a timeOut.
Hope this helps!

Asynchronous http calls with nodeJS

I would like to launch asynchronous http calls on my server node, i saw the async node module and i guess the async.parallel enables us to do that.
The documented example is pretty clear, but i don't know how i could manage multiple http calls.
I tried the example bellow but it doesn't even launch the http calls:
var http = require('http');
var Calls = [];
Calls.push(function(callback) {
// First call
http.get('http://127.0.0.1:3002/first' callback);
});
Calls.push(function(callback) {
// Second call
http.get('http://127.0.0.1:3002/second' callback);
});
var async = require('async');
async.parallel(Calls, function(err, results) {
console.log('async callback: '+JSON.stringify(results));
res.render('view', results);
});
If i launch the http requests separately, i do have a result, but but calling the async callback i get async callback: [null,null]
Have a look at the documentation:
With http.request() one must always call req.end() to signify that
you're done with the request - even if there is no data being written
to the request body.
You are creating a request, but you are not finalizing it. In your calls you should do:
var req = http.request(options, function(page) {
// some code
});
req.end();
This is assuming you are doing a normal GET request without body.
You should also consider using http.get which is a nice shortcut:
http.get("http://127.0.0.1:3002/first", function(res) {
// do something with result
});
Update The other thing is that callbacks in async have to be of the form
function(err, res) { ... }
The way you are doing it now won't work, because callback to http.get accepts only one argument res. What you need to do is the following:
http.get('http://127.0.0.1:3002/second', function(res) {
callback(null, res);
});
Ok the thing is to call the callback this way callback(null, res); instead callback(res);, i think the first parameter is interpreted as an error and the second one is the real result.
dont use capital names for other purpouses than types/classes
below is your code with corrected obvious mistakes
var http = require('http');
var calls = [];
calls.push(function(callback) {
// First call
http.get('http://127.0.0.1:3002/first', function (resource) {
resource.setEncoding('utf8');
resource.on('data', function (data) {
console.log('first received', data);
callback();
});
});
});
calls.push(function(callback) {
// Second call
http.get('http://127.0.0.1:3002/second', function (resource) {
resource.setEncoding('utf8');
resource.on('data', function (data) {
console.log('second received', data);
callback();
});
});
});
var async = require('async');
async.parallel(calls, function(err, results) {
console.log('async callback ', results);
res.render('view', results);
});

Resources