I'm writing a http server using node.js and having trouble isolating the request body as a stream readable. Here is a basic sample of my code:
var http = require('http')
, fs = require('fs');
http.createServer(function(req, res) {
if ( req.method.toLowerCase() == 'post') {
req.pipe(fs.createWriteStream('out.txt'));
req.on('end', function() {
res.writeHead(200, {'content-type': 'text/plain'})
res.write('Upload Complete!\n');
res.end();
});
}
}).listen(8182);
console.log('listening on port 8182');
According to node's documentation the a request param is an instance of http.IncomingObject which implements node's readable stream interface. The problem with just using stream.pipe() as I did above is the readable stream includes the plain text of the request headers along with the request body. Is there a way to isolate only the request body as a readable stream?
I'm aware that there are frameworks for file uploads such as formidable. My ultimate goal is not to create an upload server but to act as a proxy and stream the request body to another web service.
thanks in advance.
Edit>>
working server for "Content-type: multipart/form-data" using busboy
var http = require('http')
, fs = require('fs')
, Busboy = require('busboy');
http.createServer(function(req, res) {
if ( req.method.toLowerCase() == 'post') {
var busboy = new Busboy({headers: req.headers});
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.pipe(fs.createWriteStream('out.txt'));
});
req.pipe(busboy);
req.on('end', function() {
res.writeHead(200, 'Content-type: text/plain');
res.write('Upload Complete!\n');
res.end();
});
}
}).listen(8182);
console.log('listening on port 8182');
Check your req.headers['content-type']. If it's multipart/form-data then you could use a module like busboy to parse the request for you and give you readable streams for file parts (and plain strings for non-file parts if they exist).
If the content-type is some other multipart/* type, then you could use dicer, which is the underlying module that busboy uses for parsing multipart.
Related
Got a server that sends huge files ~50MB as response.
Using http and piping the file as follows:
var http = require('http'),
fileSystem = require('fs'),
path = require('path');
http.createServer(function(request, response) {
var filePath = path.join(__dirname, 'myfile.mp3');
var stat = fileSystem.statSync(filePath);
response.writeHead(200, {
'Content-Type': 'audio/mpeg',
'Content-Length': stat.size
});
var readStream = fileSystem.createReadStream(filePath);
// We replaced all the event handlers with a simple call to readStream.pipe()
readStream.pipe(response);
})
.listen(2000);
As expected, it takes a huge amount of time to send the response. But I want to have a responsive page when the request is being made.
Anyway I can send the progress of the response that is sent have it show at the frontend?
Can use I socket.io?
What I have tried so far:
Tried calling the pipe event on the readStream but it was called only once.
.on("pipe", (src) => {
console.log("Something is piping into the writer.");
// assert.equal(src, reader);
})
I'm trying to build a web application that runs only on pure node js without express or other packages.
I didn't find a good method for sending multiple files connected from the server.
For example, send html,css and js file that are connected.
Now, I can only send the html file.
I have a folder named "design" with 3 files---> index.html,style.css,scipt.js
*I really don't care what method you use, I just want to send more then 1 files connected to the client from the server.
Thanks :) !
const http = require('http');
const fs = require('fs');
const server = http.createServer((req,res)=>{
if(req.url == '/')
{
const readStream = fs.createReadStream('./design/index.html');
res.writeHead(200,{'Content-type': 'text/html'});
readStream.pipe(res);
}
});
server.listen('3000',()=>console.log("Connected Succesfully"));
By default, if you were able to serve HTML correctly to the browser, the HTML itself will request any CSS, JavaScript, or asset files that it needs!
The problem is that when HTML requests these files, /styles.css for example, the server doesn't know what to do!
That's why you need to set up a request handler for every asset:
const server = http.createServer((req,res)=>{
if(req.url == '/') {
const readStream = fs.createReadStream('./design/index.html');
res.writeHead(200,{'Content-type': 'text/html'});
readStream.pipe(res);
}
if (req.url == '/styles.css') {
const readStream = fs.createReadStream('./design/styles.css');
res.writeHead(200,{'Content-type': 'text/html'});
readStream.pipe(res);
}
if (req.url == '/script.js') {
const readStream = fs.createReadStream('./design/script.js');
res.writeHead(200,{'Content-type': 'text/html'});
readStream.pipe(res);
}
});
But of course, since this is not very neat, you can do something like this:
const readStream = fs.createReadStream(`./design${req.url}`);
once, and it will serve all the assets required only using one code block.
I am new to node js.
I wrote a server using node js which will read the request data and save that data into an image file.(Because the data coming is image data itself.)
The node js script I wrote is :
const http = require('http');
const fs = require('fs');
const server = http.createServer();
server.on('request', function(request, respond) {
var body = '';
filePath = '1.jpg';
request.on('data', function(data) {
body += data;
});
request.on('end', function (){
fs.appendFile(filePath, body, function() {
respond.end();
});
});
});
server.listen(8080);
And from the terminal on same machine , I fired a curl command to send the image :
curl -X POST --data #tmp.jpg 127.0.0.1:8080
The tmp.jpg is opening perfectly on my machine.
But 1.jpg (created by node js) is not opening.
What can be the problem ?
Any help would be highly appreciated.
The issue lies with your usage of curl. To send binary data, use the --data-binary flag instead:
curl -X POST --data-binary #image.jpg 127.0.0.1:8080
Also, you need to use writeFile instead of appendFile. The latter will add each request's data to the same file, which wil not be readable as image.
Furthermore, when processing binary data I find it easier to use buffers instead of string. The request processing would look like this:
server.on('request', function(req, respond) {
filePath = '1.jpg';
var chunks = []
req.on('data', d => {
chunks.push(d)
})
req.on('end', function (){
var data = Buffer.concat(chunks)
fs.writeFile(filePath, data, function() {
respond.end();
});
});
});
UPDATE
Thanks to #robertklep and #vallo for pointing out that I was not parsing the multipart request properly.
Here's the updated server code with some re-worked sample code from Busboy:
'use strict';
// Require
var http = require('http');
var Busboy = require('busboy');
var fs = require('fs');
// Server
var server = http.createServer(function(request, response) {
if (request.method === 'POST') {
var busboy = new Busboy({ headers: request.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.pipe(fs.createWriteStream(`../db/images/${filename}`));
});
busboy.on('finish', function() {
response.writeHead(200, { 'Connection': 'close' });
response.end("That's all folks!");
});
return request.pipe(busboy);
}
response.writeHead(404);
response.end();
});
server.listen(8000, '192.168.7.25', () => {});
I am trying to post a jpg to an endpoint but the resulting image cannot be opened :
The file “image_copy.jpg” could not be opened. It may be damaged or
use a file format that Preview doesn’t recognize.
Some background:
Everything (servers, storage) are being hosted locally
Have made a decision to only use native Node modules like http and fs due to storage constraints on a microcontroller board
Am using form-data as it eases the pain of multi-part forms and uploads as well as sets the correct request headers
Here is some sample code broken into two scripts:
Server
'use strict';
// Require
var http = require('http');
// Server
var server = http.createServer((request, response) => {
var body = [];
request.on('data', function(chunk) {
body.push(chunk);
});
request.on('end', function() {
saveImage(Buffer.concat(body),null);
response.statusCode = 200;
response.end('thanks')
});
});
server.listen(8000, '192.168.7.25', () => {});
// Process
function saveImage(data,callback) {
var fs = require('fs');
fs.writeFile('../db/images/image_copy.jpg', data, function(err) {});
}
Client
'use strict';
// Require
var FormData = require('form-data');
var fs = require('fs');
var http = require('http');
// Vars
var form = new FormData();
// Process
form.append('my_file', fs.createReadStream('/temp/1.jpg'));
var request = http.request({
hostname: '192.168.7.25',
port: 8000,
path: '/api/premises/v1/image',
method: 'POST',
headers: form.getHeaders()
});
form.pipe(request);
request.on('response', function(res) {
console.log(res.statusCode);
});
After executing, the jpg is uploaded and saved to the correct file location (and also has the same file size as the source jpg) but the new image can't be opened.
Even if I encode the incoming chunk as binary and set the encoding on fs.writeFile to binary, I get a similar result.
What am I doing wrong? Thank you!
The client is uploading in multipart/form-data format, which is a format that can contain, amongst others, file data.
However, this means that the server should parse this format to extract the file data. Right now, the server is just taking the request body verbatim and writing it to a file.
The multiparty module can help you, and one of its usage examples shows you how to hook it up with http.Server: https://github.com/pillarjs/multiparty#usage
var multiparty = require('multiparty');
var http = require('http');
var util = require('util');
http.createServer(function(req, res) {
// parse a file upload
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
res.writeHead(200, {'content-type': 'text/plain'});
res.write('received upload:\n\n');
res.end(util.inspect({fields: fields, files: files}));
});
}).listen(8000);
Using that, you can extract the file data from (I think) files.my_file and write it to the file.
I have a case where i have to read the data from the request body and create a file and write the data into it. If the operation is successful I set the response header to 201 and add the location of file in Location header. The file creation is done using Java methods and node.js code is below.
var server = http.createServer(function(req, res)
{
var body = "";
req.on("data", function(chunk)
{
body += chunk.toString();
});
req.on("end", function() {
var rtn = obj.AddonPostMethod(filepath,body);
if(rtn.length < 13)
{
res.writeHead(201, {"Location" : rtn});
res.end();
}
else
{
res.writeHead(400, {"Content-Type" : application/json"});
res.write(''+rtn);
res.end();
}
});
}});
The problem is that the response headers are not getting updated and are always set to the default headers 200 Ok. In addition to this the server is always busy even after the response is received.
I don't think you're actually listening on a port with the code you reference.
var http = require('http');
http.createServer(function(req,res){
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end('Hello World\n');
}).listen(8000);
console.log('Server running at http://127.0.0.1:8000/');
You never declare the http object as actually listening on a port/ip with the .listen() function.
Also, you don't need to wait for the req object to emit anything to respond. The function is called when the request is complete. You can listen for specific requests and route them appopriately by storing the http.Server object to a variable.
var server = http.createServer();
server.listen(8000);
server.on('request', function(req,res){ /* do something with the request */ });
More documentation on the http object can be found on the node.js documents for http