node.js: confusion with order of callbacks - node.js

I have just started with node.js. I find the asynchronous coding style it uses to be very impressive indeed. However, for those of us who are used to Java and Python it does take some time to get used to it.
I know the following code works fine. This is verified by several questions on this forum. I have also tried it on my own.
var http = require('http'),
fs = require('fs');
fs.readFile('./index.html', function (err, html) {
if (err) {
//throw err;
}
http.createServer(function(request, response) {
console.log("Server started");
response.writeHeader(200, {"Content-Type": "text/html"});
response.write(html);
response.write("Other things");
response.end();
}).listen(3000);
});
The way I am interpretting this is as follows:
1. Try reading the html file
i. When done create a server
ii. Send it over to the client
2. Do everything else.
However, we can also have a chain of thoughts as follows:
1. Create the server
2. Try reading the file
i. When done. Send it over to the client
3. In the meanwhile do anything else the server might be asked to do.
The code corresponding to the second chain of thoughts is:
var http = require('http'),
fs = require('fs');
http.createServer(function(request, response) {
console.log("Server started");
response.writeHeader(200, {"Content-Type": "text/html"});
fs.readFile('./index.html', function (err, html) {
if (err) {
//throw err;
}
response.write(html);
response.write("Other things");
});
response.end();
}).listen(3000);
While the first code works as expected. The second one displays nothing at all in the browser.
Why is the second chain of thoughts wrong?

Actually, what happens here is that the following function gets called each time there is an incoming request:
function(request, response) {
console.log("Server started");
response.writeHeader(200, {"Content-Type": "text/html"});
response.write(html);
response.write("Other things");
response.end();
}
You replaced that with:
function(request, response) {
console.log("Server started");
response.writeHeader(200, {"Content-Type": "text/html"});
fs.readFile('./index.html', function (err, html) {
if (err) {
//throw err;
}
response.write(html);
response.write("Other things");
});
response.end();
}
Now here, it will run the following:
Write the header
Queue the readFile
Immediately execute the following: response.end();
By the time it is done reading the file and wants to write the contents, you already ended the response

Related

response written to page only on second request

var http = require('http');
var fs = require('fs');
var path = process.argv[2];
var str="";
function onRequest(request, response) {
str += "";
console.log("Request received" + path);
fs.readdir(path, function(err, items) {
str += items;
});
response.writeHead(200, {"Context-Type": "text/plain"});
response.write(new Buffer(str).toString());
response.end();
}
http.createServer(onRequest).listen(8000);
The above snippet creates a http server which gets a directory path from the user as argument. Make a http request to get the list of files available in the directory and send them back as a response.
The response is written to the page only on the second request. The page is shown empty on the first request.
Could anyone help. Thanks in advance!!
Javascript is non blocking, so
response.writeHead(200, {"Context-Type": "text/plain"});
response.write(new Buffer(str).toString());
response.end();
will be executed before
str += items;
With
fs.readdir(path, function(err, items) {
// Handle error
str += items;
response.writeHead(200, {"Context-Type": "text/plain"});
response.write(new Buffer(str).toString());
response.end();
});
it will send the response after readdir.
And in Javascript the program will not be started for every new Request (like in PHP). So your global variable will be shared between all requests. If you don't want that put the var str=""; in onRequest.
If you have multiple routes you also want to look into something like express, because the http module doesn't include routing.

Unresolved function or method for pipe()

I'm trying to write a simple file web file server. I'm using PhpStorm.
var http = require('http');
var fs = require('fs');
function send404Request(response) {
response.writeHead("404", {"Content-Type": "text/html"});
response.write("404 Page Not Found");
response.end();
}
function onRequest(request, response) {
if (request.method === 'GET' && request.url === '/') {
response.writeHead("200", {"Content-Type": "text/plain"});
fs.createReadStream("./index.html").pipe(response);
} else {
send404Request(response);
}
}
http.createServer(onRequest).listen(8888);
console.log("file server is now running...");
However, PhpStorm says "unresolved function or method pipe()"
Here is my setting for JavaScript libraries in PhpStorm:
Any idea where goes wrong?
In Settings/Project Settings/JavaScript/Libraries, Download "node-DefinitelyTyped". This work for me. I had the same issue as your.
In the end of 2019 faced the same issue but in my gulpfile.js.
i got the "Unresolved function or method pipe()" and
the "Unresolved function or method require()" error messages in PHPStorm.
Downloading the #types/gulp library in PHPStorm resolved both issues.
Perhaps you need to wait for the stream to open:
var readStream = fs.createReadStream("./index.html");
readStream.on('open', function () {
readStream.pipe(response);
});
Additionally, add this to catch any errors that might be going on:
readStream.on('error', function(err) {
console.log(err.message);
});
Here is a workaround I am using. Instead of
fs.createReadStream("./index.html").pipe(response);
I am using:
response.write(fs.readFileSync("./index.html"));
This works well for intelliJ IDEA.

stream with nodejs (io.js), backpressure and drain, `end` event is called twice

I have this code. The end event is fired two times. I can't understand why. Any hints? Thanks.
var chance = require('chance').Chance();
require('http').createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/plain'});
function generateMore() {
while(chance.bool({likelihood: 95})) {
var shouldContinue = res.write(
chance.string({length: (16 * 1024) - 1})
);
if(!shouldContinue) {
console.log('Backpressure');
return res.once('drain', generateMore);
}
}
res.end('\nThe end...\n', function() {
console.log('All data was sent'); // I see this log two times
});
}
generateMore();
}).listen(8080, function () {
console.log('Listening');
});
Because when you open URL, browser tries get favicon.ico and sends two requests to your server.

how to use setTimeout asynchronously in node.js

I'm new to node.js, I tried to use setTimeout to simulate long connections and hope it act asynchronously.
var http = require('http');
http.createServer(function (request, response) {
console.log('New request # ' + request.url);
(function (response) {
setTimeout(function () {
console.log('Time is up');
response.writeHead(200, {"Content-Type": "text/plain"});
response.end('Hello World\n');
}, 3000);
})(response);
}).listen(8124);
console.log('Server running at http://127.0.0.1:8124/');
But, the code above perform like a synchronous single thread app, which can only handle one request per 3 seconds.
I thought everything in node.js should act asynchronously. So, what's the problem here?
The SetTimeout is async, you don't need that anonym function in the middle, just write this.
var http = require('http');
http.createServer(function (request, response) {
console.log('New request # ' + request.url);
setTimeout(function () {
console.log('Time is up');
response.writeHead(200, {"Content-Type": "text/plain"});
response.end('Hello World\n');
}, 3000);
}).listen(8124);
console.log('Server running at http://127.0.0.1:8124/');
If you produce 10 concurent request the total comp time will be around 3sec, which means it is async. You can use the ab tool to check, or if you program node, maybe easier to install http-perf. and run nperf -c 10 -n 10 http://127.0.0.1:8124
You need to run your sleep in a new process. There is a module that can help you (https://github.com/cramforce/node-worker) or you can look at the normal api documentaion about spawn.
var async,
__slice = [].slice;
async = require("async");
async.setTimeOut = function() {
var arg, args, callback, delay, runWithTimeOut;
callback = arguments[0], delay = arguments[1], arg = arguments[2], args = 4 <= arguments.length ? __slice.call(arguments, 3) : [];
runWithTimeOut = function() {
return setTimeout(callback, delay, arg, args);
};
return async.parallel([runWithTimeOut]);
};

Incr appears to run twice--why?

I'm having a hard time getting node, redis, and async to do what I want. I'm trying very basic things to grasp the patterns of redirecting control flow. Here, I hold a counter variable "success" that increases by one if a comparison key0 > key1 is true. They are static for now so it's always true; the only thing I wish to change is to increment success. I refresh the browser to re-run the comparison and increment success again.
My trouble is: when the page is refreshed, success jumps by 2. I tried putting a callback with incr, but it looks like only get-type commands have callbacks. I have a client.end(); in my script, but it prevented me from reloading the page so I commented it out. I suspect this is the source of my problem. If so, where does client.end belong?
var http = require("http");
var redis = require("redis");
var async = require("async");
client = redis.createClient();
http.createServer(function(request, response) {
// key "success" set to 0 externally, through redis-cli;
client.set("key0", "19");
client.set("key1", "11");
response.writeHead(200, {"Content-Type": "text/plain"});
async.series([
shortcut("key0"),
shortcut("key1"),
shortcut("success")
],
function(err, results){
if (results[0] > results[1]) {
client.incr("success", function(err, reply) {
response.write("incr done");
});
response.write(results[0] + "\n\n");
response.write(results[1] + "\n\n");
response.write(results[2]);
}
response.end();
// client.quit();
});
}).listen(8000);
function shortcut(key) {
return function(callback) {
client.get(key, function(err, reply) {
callback(null, reply);
}
);
}
}
Your browser most likely requests favicon.ico and thus generates the extra request which runs your code a second time.

Resources