I want to write a simple code to test nodejs Concurrency when it receives many requests.
And I use loadtest module for simulate send many requests to server with this command :
loadtest -c 10 --rps 200 http://localhost:3000/
my simple code that write in server.js :
var http = require('http');
var server = http.createServer();
var cun=0;
var step=0;
function handleRequest(req,res) {
console.log(step++);
while(100000000>cun){
cun++;
}
cun=0;
}
server.on('request', handleRequest);
server.listen(3000);
but requests is wait until while loop is down
and know to solve this problem I should use callback function , but don't know How do write it.
var http = require('http');
var url = require("url");
var server = http.createServer();
function handleRequest(req, res) {
res.writeHead(200, { 'content-type': 'text/plain'});
var params = url.parse(request.url, true).query;
var input = params.number;
/*
1- Use input param in a algorithm and process it
2- Insert data in to the db
3- select on db and return to the client
*/
res.end();
}
server.on('request', handleRequest);
server.listen(3000);
PROBLEM :
All requests wait in a queue until last request process(process-insert-select) is down
Related
I'm building a react app
In one component I'm writing this GET request which works:
In another component I'm writing this POST request:
Which then returns this 404 error:
And I have no idea how my GET works but my POST returns 404:not found when I'm requesting the same file both times?
UPDATE:
I'm running a node.js server now but it's a bit of a frankenstein's monster as this really isn't an area I have an understanding of. Does anyone know what I'm doing wrong?
// Server setup from node.js website
const http = require('http');
const hostname = '127.0.0.1';
const port = 3000;
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end('Hello World\n');
});
server.listen(port, hostname, () => {
console.log(`Server running at http://${hostname}:${port}/`);
});
// Trying to listen for data from React app to feed into JSON (broken)
var express = require("express");
var myParser = require("body-parser");
var app = express();
app.use(myParser.urlencoded({extended : true}));
app.post("/scene-setup.json", function(request, response) {
console.log(request.body); //This prints the JSON document received (if it is a JSON document)
});
app.listen(3001);
// Updating JSON file with "obj" (working)
var jsonfile = require('jsonfile')
var file = './scene-setup.json'
var obj = {name: 'JP'}
jsonfile.writeFile(file, obj, function (err) {
console.error(err)
})
Axios is used for making HTTP requests. So, you should have a backend server running that can handle these requests. I am not sure what exactly is the data that you want to save. If you need access to that data, should be saving it on the backend.
If you want to save some data just on the client side, HTML5 filesystem API might be something you want to look at. It can manage some data in the limited sandboxed part of user's filesystem.
If I have two node.js servers running, how can I tell which server called the processRequest function?
var http = require('http');
var https = require('https');
function processRequest(req, res) {
res.writeHead(200);
res.end("hello world, I'm on port: " + ???.port + "\n");
}
var server1 = http.createServer(processRequest).listen(80);
var server2 = https.createServer(processRequest).listen(443);
Originally I wanted the port number, but couldn't find the object/variable to give it to me. Based on the below answer it makes more sense to determine encrypted vs non-encrypted since the point is to know which of the http servers the request came in on.
The req parameter is an instance of IncomingMessage from which you can access the socket.
From there you can access both the localPort and remotePort.
Something like:
console.log(req.socket.localPort);
console.log(req.socket.remotePort);
This way you get the port number:
var http = require('http');
var server = http.createServer().listen(8080);
server.on('request', function(req, res){
res.writeHead(200, {"Content-Type": "text/html; charset: UTF-8"});
res.write("Hello from Node! ");
res.write(" Server listening on port " + this.address().port);
res.end();
});
In case you are using http://localhost:<port_number>, then you can get the port number using req.headers.host property.
Example:
const http = require('http');
const server = http.createServer((req, res)=>{
console.log(req.headers.host); // localhost:8080
console.log(req.headers.host.split(':')[1]); // 8080
})
server.listen(8080);
Instead of checking port numbers, you can also check the server instance or the connection object:
var http = require('http'),
https = require('https');
function processRequest(req, res) {
var isSSL = (req.socket.encrypted ? true : false);
// alternate method:
// var isSSL = (this instanceof https.Server);
// or if you want to check against a specific server instance:
// var isServer1 = (this === server1);
res.writeHead(200);
res.end('hello world, i am' + (!isSSL ? ' not' : '') + ' encrypted!\n');
}
var server1 = http.createServer(processRequest).listen(80);
var server2 = https.createServer(processRequest).listen(443);
I am learning Node.JS and this is the most commonly available example of server by Node.JS
// Load the http module to create an http server.
var http = require('http');
// Configure our HTTP server to respond with Hello World to all requests.
var server = http.createServer(function (request, response) {
// var name=request.getParameter('name');
// console.log(name);
console.log('res: ' + JSON.stringify(response.body));
response.writeHead(200, {"Content-Type": "text/plain"});
response.end("Hello World\n");
});
// Listen on port 8000, IP defaults to 127.0.0.1
server.listen(8000);
Now when I am executing this from console it works fine, and from browser also it works fine, by hitting the URL: localhost:8000
But now I also want to send some parameters to this server, so I tried localhost:8000/?name=John and few more URL's but none of them work, Can anyone help me?
Thanks in advance!!
try:
var url = require('url');
var name = url.parse(request.url, true).query['name'];
Node's HTTP API is rather low-level compared to other frameworks/environments that you might be familiar with, so pleasantries like a getParameter() method don't exist out of the box.
You can get the query-string from the request's url, which you can then parse:
var http = require('http');
var url = require('url');
var server = http.createServer(function (request, response) {
var parsedUrl = url.parse(request.url, true);
var query = parsedUrl.query;
console.log(query.name);
// ...
});
I'm building a simple HTTP server that serves a fairly large file to clients using streams. I need to serve multiple clients at the same time, and I'm wondering what the simplest way to achieve that is.
My initial feeling is that using the cluster module and forking {num CPUs} processes might be the simplest way.
var StreamBrake = require('streambrake');
var http = require('http');
var fs = require('fs');
var server = http.createServer(function(req, res) {
var stream = fs.createReadStream('/data/somefile');
stream.pipe(new StreamBrake(10240)).pipe(res);
});
server.listen(1234, 10);
Edit: To clarify, the issue is that this code won't begin to serve the second client until it has finished serving the first.
After thinking about your issue, I'm confused as to why you believe there's an issue. CreateReadStream is asynchronous. Here is in example that complicates the code a little bit in order to demonstrate that using CreateReadStream we can indeed service multiple connections at a time.
/*jshint node:true*/
var http = require('http');
var fs = require('fs');
var activeRequests = 0;
var launchFiveRequests = 5;
http.createServer(function (req, res) {
activeRequests++;
console.log('Request received');
var readStream = fs.createReadStream('play.js', {'bufferSize': 1024});
readStream.setEncoding('utf8');
readStream.on('data', function (data) {
console.log(activeRequests);
res.write(data);
});
readStream.on('end', function () {
res.end();
console.log('end');
activeRequests--;
});
}).listen(8080);
var options = {
hostname: 'localhost',
port: 8080,
path: '/'
};
while(launchTenRequests--) {
http.get(options, function(res) {
console.log('response received');
});
}
Serving up a sufficiently large file, and you should see that all 5 requests are live at once, and all end up ending relatively simultaneously.
Trying to learn more about node.js by making a simple http proxy server. The use scenario is simple: user -> proxy -> server -> proxy -> user
The following code works until the last step. Couldn't find way to pipe connector's output back to the user.
#!/usr/bin/env node
var
url = require('url'),
http = require('http'),
acceptor = http.createServer().listen(3128);
acceptor.on('request', function(request, response) {
console.log('request ' + request.url);
request.pause();
var options = url.parse(request.url);
options.headers = request.headers;
options.method = request.method;
options.agent = false;
var connector = http.request(options);
request.pipe(connector);
request.resume();
// connector.pipe(response); // doesn't work
// connector.pipe(request); // doesn't work either
});
Using tcpflow I see the incoming request from the browser, then the outgoing proxy request, then the server response back to the proxy. Somehow i couldn't manage to retransmit the response back to the browser.
What is the proper way to implement this logic with pipes?
you dont have to 'pause', just 'pipe' is ok
var connector = http.request(options, function(res) {
res.pipe(response, {end:true});//tell 'response' end=true
});
request.pipe(connector, {end:true});
http request will not finish until you tell it is 'end';
OK. Got it.
UPDATE: NB! As reported in the comments, this example doesn't work anymore. Most probably due to the Streams2 API change (node 0.9+)
Piping back to the client has to happen inside connector's callback as follows:
#!/usr/bin/env node
var
url = require('url'),
http = require('http'),
acceptor = http.createServer().listen(3128);
acceptor.on('request', function(request, response) {
console.log('request ' + request.url);
request.pause();
var options = url.parse(request.url);
options.headers = request.headers;
options.method = request.method;
options.agent = false;
var connector = http.request(options, function(serverResponse) {
serverResponse.pause();
response.writeHeader(serverResponse.statusCode, serverResponse.headers);
serverResponse.pipe(response);
serverResponse.resume();
});
request.pipe(connector);
request.resume();
});
I used the examples from this post to proxy http/s requests. Faced with the problem that cookies were lost somewhere.
So to fix that you need to handle headers from the proxy response.
Below the working example:
const http = require('http');
const acceptor = http.createServer().listen(3128);
acceptor.on('request', function(request, response) {
const req = service.request(options, function(res) {
response.writeHead(res.statusCode, res.headers);
return res.pipe(response, {end: true});
});
request.pipe(req, {end: true});
});