I'm trying to test a small node server I've written with CURL and for some reason this fails. My script looks like this:
http.createServer(function (req, res)
{
"use strict";
res.writeHead(200, { 'Content-Type': 'text/plain' });
var queryObject = url.parse(req.url, true).query;
if (queryObject) {
if (queryObject.launch === "yes") {
launch();
else {
// what came through?
console.log(req.body);
}
}
}).listen(getPort(), '0.0.0.0');
When I point my browser to:
http://localhost:3000/foo.js?launch=yes
that works fine. My hope is to send some data via JSON so I added a section to see if I could read the body part of the request (the 'else' block). However, when I do this in Curl, I get 'undefined':
curl.exe -i -X POST -H "Content-Type: application/json" -d '{"username":"xyz","password":"xyz"}' http://localhost:3000/foo.js?moo=yes
I'm not sure why this fails.
The problem is that you are treating both requests as if they where GET requests.
In this example I use a different logic for each method. Consider that the req object acts as a ReadStream.
var http = require('http'),
url = require('url');
http.createServer(function (req, res) {
"use strict";
if (req.method == 'POST') {
console.log("POST");
var body = '';
req.on('data', function (data) {
body += data;
console.log("Partial body: " + body);
});
req.on('end', function () {
console.log("Body: " + body);
});
res.writeHead(200, {'Content-Type': 'text/html'});
res.end('post received');
} else {
var queryObject = url.parse(req.url, true).query;
console.log("GET");
res.writeHead(200, {'Content-Type': 'text/plain'});
if (queryObject.launch === "yes") {
res.end("LAUNCHED");
} else {
res.end("NOT LAUNCHED");
}
}
res.writeHead(200, { 'Content-Type': 'text/plain' });
}).listen(3000, '0.0.0.0');
Related
var http = require("http");
var fs = require("fs");
http.createServer(function (req, res) {
fs.readFile("demo1.html", function (err, data) {
res.writeHead(200, { "Content-Type": "text/html" });
res.write(data);
return res.end();
});
}).listen(80);
Error :
You don't check for an error in the callback of readFile. If there is an error, data will be undefined and res.write(data) throws the error you see.
var http = require("http");
var fs = require("fs");
http.createServer(function (req, res) {
fs.readFile("demo1.html", function (err, data) {
if (err) {
console.log(err);
res.writeHead(404); //or whatever status code you want to return
} else {
res.writeHead(200, { "Content-Type": "text/html" });
res.write(data);
}
return res.end();
});
}).listen(80);
Is it possible in Node.js to return HTML (e.g. <div>Test</div>) as response to client?
I saw that option in Express on sendFile() method().
Is there something similar in Node?
I found this solution:
var myHtmlData;
fs.readFile('./index.html', (err, data) => {
if(err){
throw err;
} else {
myHtmlData = data
}
})
// and after use it
response.write(myHtmlData)
But I was wondering is it possible to do it with some method similar to sendFile() to write html directly like this <div>Test</div> without reading it from another file.
Sure. It's pretty simple. The following code returns the response as HTML to the client.
var http = require('http');
http.createServer(function(req, res){
if(req.url === "/"){
res.writeHead(200, { 'Content-Type':'text/html'});
res.end("<div><p>Test<p></div>");
}
}).listen(3000);
And in case you want to serve an HTML or JSON file as a response, you can execute the following code.
var http = require('http');
var fs = require('fs');
http.createServer(function(req, res){
if(req.url === '/I_want_json'){
res.writeHead(200, { 'Content-Type':'application/json'});
var obj = {
firstName: "Jane",
lastName: "Doe",
};
res.end(JSON.stringify(obj));
}
else if (req.url === '/I_want_html'){
res.writeHead(200, { 'Content-Type':'text/html'});
html = fs.readFileSync('./index.html');
res.end(html);
}
else{
res.writeHead(404);
res.end();
}
}).listen(3000, '127.0.0.1');
Do not forget to set the Content-Type as mentioned since it is a mandatory part for client to distinguish the type of response.
First, you need to mention the content type as HTML. Then read the HTML file using fs module and send its data as a response. Don't forget to end the response using end() method.
const http = require('http');
const fs = require('fs');
const server = http.createServer((req, res) => {
res.setHeader("Content-Type", "text/html");
fs.readFile('./views/index.html', (err, data) => {
if(err) {
console.log(err);
res.end();
} else {
res.write(data);
res.end();
}
})
});
I'm learning Node, and I have this code
var http = require('http');
// var data = require('fs').readFileSync(__dirname + '/index.html', 'utf8');
http.createServer(function (req, res) {
if (req.url === '/') {
res.writeHead(200, {"content-type": "text/html"});
var html = require('fs').createReadStream(__dirname + '/index.html');
html.on('data', function (chunk) {
res.write(chunk);
})
}
else if (req.url === '/api') {
var obj = {
firstname: 'John',
lastname: 'Smith'
};
res.writeHead(200, {"content-type": "application/json"});
res.write(JSON.stringify(obj));
}
else {
res.writeHead(404, {"content-type": "text/plain"});
res.write("Error 404: Page not found.");
res.end();
}
}).listen(1337, "127.0.0.1");
console.log('Server listening on port 1337');
For some reason, 404 response will not display unless I use res.end(), even though the other two responses display fine without res.end(). Anyone know why this is?
I'm trying to use the node-http-proxy as a reverse proxy, but I can't seem to get POST and PUT requests to work. The file server1.js is the reverse proxy (at least for requests with the url "/forward-this") and server2.js is the server that receives the proxied requests. Please explain what I'm doing incorrectly.
Here's the code for server1.js:
// File: server1.js
//
var http = require('http');
var httpProxy = require('http-proxy');
httpProxy.createServer(function (req, res, proxy) {
if (req.method == 'POST' || req.method == 'PUT') {
req.body = '';
req.addListener('data', function(chunk) {
req.body += chunk;
});
req.addListener('end', function() {
processRequest(req, res, proxy);
});
} else {
processRequest(req, res, proxy);
}
}).listen(8080);
function processRequest(req, res, proxy) {
if (req.url == '/forward-this') {
console.log(req.method + ": " + req.url + "=> I'm going to forward this.");
proxy.proxyRequest(req, res, {
host: 'localhost',
port: 8855
});
} else {
console.log(req.method + ": " + req.url + "=> I'm handling this.");
res.writeHead(200, { "Content-Type": "text/plain" });
res.write("Server #1 responding to " + req.method + ": " + req.url + "\n");
res.end();
}
}
And here's the code for server2.js:
// File: server2.js
//
var http = require('http');
http.createServer(function (req, res, proxy) {
if (req.method == 'POST' || req.method == 'PUT') {
req.body = '';
req.addListener('data', function(chunk) {
req.body += chunk;
});
req.addListener('end', function() {
processRequest(req, res);
});
} else {
processRequest(req, res);
}
}).listen(8855);
function processRequest(req, res) {
console.log(req.method + ": " + req.url + "=> I'm handling this.");
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.write("Server #2 responding to " + req.method + ': url=' + req.url + '\n');
res.end();
}
http-proxy depends on the data and end events for POST / PUT requests. The latency between the time that server1 receives the request and when it is proxied means that http-proxy misses those events entirely. You have two options here to get this to work correctly - you can buffer the request or you can use a routing proxy instead. The routing proxy seems the most appropriate here since you only need to proxy a subset of requests. Here's the revised server1.js:
// File: server1.js
//
var http = require('http');
var httpProxy = require('http-proxy');
var proxy = new httpProxy.RoutingProxy();
http.createServer(function (req, res) {
if (req.url == '/forward-this') {
return proxy.proxyRequest(req, res, {
host: 'localhost',
port: 8855
});
}
if (req.method == 'POST' || req.method == 'PUT') {
req.body = '';
req.addListener('data', function(chunk) {
req.body += chunk;
});
req.addListener('end', function() {
processRequest(req, res);
});
} else {
processRequest(req, res);
}
}).listen(8080);
function processRequest(req, res) {
console.log(req.method + ": " + req.url + "=> I'm handling this.");
res.writeHead(200, { "Content-Type": "text/plain" });
res.write("Server #1 responding to " + req.method + ": " + req.url + "\n");
res.end();
}
In addition to #squamos
How to write a node express app that serves most local files, but reroutes some to another domain?
var proxy = new httpProxy.RoutingProxy();
"Above code is working for http-proxy ~0.10.x. Since then lot of things had changed in library. Below you can find example for new version (at time of writing ~1.0.2)"
var proxy = httpProxy.createProxyServer({});
Here is my solution for proxying POST requests. This isn't the most ideal solution, but it works and is easy to understand.
var request = require('request');
var http = require('http'),
httpProxy = require('http-proxy'),
proxy = httpProxy.createProxyServer({});
http.createServer(function(req, res) {
if (req.method == 'POST') {
request.post('http://localhost:10500/MyPostRoute',
{form: {}},
function(err, response, body) {
if (! err && res.statusCode == 200) {
// Notice I use "res" not "response" for returning response
res.writeHead(200, {'Content-Type': "application/json"});
res.end(body);
}
else {
res.writeHead(404, {'Content-Type': "application/json"});
res.end(JSON.stringify({'Error': err}));
}
});
}
else if (req.method == 'GET') {
proxy.web(req, res, { target: 'http://localhost/9000' }, function(err) {
console.log(err)
});
}
The ports 10500 and 9000 are arbitrary and in my code I dynamically assign them based on the services I host. This doesn't deal with PUT and it might be less efficient because I am creating another response instead of manipulating the current one.
I tried to send the text in gzip, but I don't know how. In the examples the code uses fs, but I don't want to send a text file, just a string.
const zlib = require('zlib');
const http = require('http');
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/html', 'Content-Encoding': 'gzip'});
const text = "Hello World!";
res.end(text);
}).listen(80);
You're half way there. I can heartily agree that the documentation isn't quite up to snuff on how to do this;
const zlib = require('zlib');
const http = require('http');
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/html', 'Content-Encoding': 'gzip'});
const text = "Hello World!";
const buf = new Buffer(text, 'utf-8'); // Choose encoding for the string.
zlib.gzip(buf, function (_, result) { // The callback will give you the
res.end(result); // result, so just send it.
});
}).listen(80);
A simplification would be not to use the Buffer;
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/html', 'Content-Encoding': 'gzip'});
const text = "Hello World!";
zlib.gzip(text, function (_, result) { // The callback will give you the
res.end(result); // result, so just send it.
});
}).listen(80);
...and it seems to send UTF-8 by default. However, I personally prefer to walk on the safe side when there is no default behavior that makes more sense than others and I can't immediately confirm it with documentation.
Similarly, in case you need to pass a JSON object instead:
const data = {'hello':'swateek!'}
res.writeHead(200, {'Content-Type': 'application/json', 'Content-Encoding': 'gzip'});
const buf = new Buffer(JSON.stringify(data), 'utf-8');
zlib.gzip(buf, function (_, result) {
res.end(result);
});