I am new to node js.
I wrote a server using node js which will read the request data and save that data into an image file.(Because the data coming is image data itself.)
The node js script I wrote is :
const http = require('http');
const fs = require('fs');
const server = http.createServer();
server.on('request', function(request, respond) {
var body = '';
filePath = '1.jpg';
request.on('data', function(data) {
body += data;
});
request.on('end', function (){
fs.appendFile(filePath, body, function() {
respond.end();
});
});
});
server.listen(8080);
And from the terminal on same machine , I fired a curl command to send the image :
curl -X POST --data #tmp.jpg 127.0.0.1:8080
The tmp.jpg is opening perfectly on my machine.
But 1.jpg (created by node js) is not opening.
What can be the problem ?
Any help would be highly appreciated.
The issue lies with your usage of curl. To send binary data, use the --data-binary flag instead:
curl -X POST --data-binary #image.jpg 127.0.0.1:8080
Also, you need to use writeFile instead of appendFile. The latter will add each request's data to the same file, which wil not be readable as image.
Furthermore, when processing binary data I find it easier to use buffers instead of string. The request processing would look like this:
server.on('request', function(req, respond) {
filePath = '1.jpg';
var chunks = []
req.on('data', d => {
chunks.push(d)
})
req.on('end', function (){
var data = Buffer.concat(chunks)
fs.writeFile(filePath, data, function() {
respond.end();
});
});
});
Related
Got a server that sends huge files ~50MB as response.
Using http and piping the file as follows:
var http = require('http'),
fileSystem = require('fs'),
path = require('path');
http.createServer(function(request, response) {
var filePath = path.join(__dirname, 'myfile.mp3');
var stat = fileSystem.statSync(filePath);
response.writeHead(200, {
'Content-Type': 'audio/mpeg',
'Content-Length': stat.size
});
var readStream = fileSystem.createReadStream(filePath);
// We replaced all the event handlers with a simple call to readStream.pipe()
readStream.pipe(response);
})
.listen(2000);
As expected, it takes a huge amount of time to send the response. But I want to have a responsive page when the request is being made.
Anyway I can send the progress of the response that is sent have it show at the frontend?
Can use I socket.io?
What I have tried so far:
Tried calling the pipe event on the readStream but it was called only once.
.on("pipe", (src) => {
console.log("Something is piping into the writer.");
// assert.equal(src, reader);
})
UPDATE
Thanks to #robertklep and #vallo for pointing out that I was not parsing the multipart request properly.
Here's the updated server code with some re-worked sample code from Busboy:
'use strict';
// Require
var http = require('http');
var Busboy = require('busboy');
var fs = require('fs');
// Server
var server = http.createServer(function(request, response) {
if (request.method === 'POST') {
var busboy = new Busboy({ headers: request.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.pipe(fs.createWriteStream(`../db/images/${filename}`));
});
busboy.on('finish', function() {
response.writeHead(200, { 'Connection': 'close' });
response.end("That's all folks!");
});
return request.pipe(busboy);
}
response.writeHead(404);
response.end();
});
server.listen(8000, '192.168.7.25', () => {});
I am trying to post a jpg to an endpoint but the resulting image cannot be opened :
The file “image_copy.jpg” could not be opened. It may be damaged or
use a file format that Preview doesn’t recognize.
Some background:
Everything (servers, storage) are being hosted locally
Have made a decision to only use native Node modules like http and fs due to storage constraints on a microcontroller board
Am using form-data as it eases the pain of multi-part forms and uploads as well as sets the correct request headers
Here is some sample code broken into two scripts:
Server
'use strict';
// Require
var http = require('http');
// Server
var server = http.createServer((request, response) => {
var body = [];
request.on('data', function(chunk) {
body.push(chunk);
});
request.on('end', function() {
saveImage(Buffer.concat(body),null);
response.statusCode = 200;
response.end('thanks')
});
});
server.listen(8000, '192.168.7.25', () => {});
// Process
function saveImage(data,callback) {
var fs = require('fs');
fs.writeFile('../db/images/image_copy.jpg', data, function(err) {});
}
Client
'use strict';
// Require
var FormData = require('form-data');
var fs = require('fs');
var http = require('http');
// Vars
var form = new FormData();
// Process
form.append('my_file', fs.createReadStream('/temp/1.jpg'));
var request = http.request({
hostname: '192.168.7.25',
port: 8000,
path: '/api/premises/v1/image',
method: 'POST',
headers: form.getHeaders()
});
form.pipe(request);
request.on('response', function(res) {
console.log(res.statusCode);
});
After executing, the jpg is uploaded and saved to the correct file location (and also has the same file size as the source jpg) but the new image can't be opened.
Even if I encode the incoming chunk as binary and set the encoding on fs.writeFile to binary, I get a similar result.
What am I doing wrong? Thank you!
The client is uploading in multipart/form-data format, which is a format that can contain, amongst others, file data.
However, this means that the server should parse this format to extract the file data. Right now, the server is just taking the request body verbatim and writing it to a file.
The multiparty module can help you, and one of its usage examples shows you how to hook it up with http.Server: https://github.com/pillarjs/multiparty#usage
var multiparty = require('multiparty');
var http = require('http');
var util = require('util');
http.createServer(function(req, res) {
// parse a file upload
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
res.writeHead(200, {'content-type': 'text/plain'});
res.write('received upload:\n\n');
res.end(util.inspect({fields: fields, files: files}));
});
}).listen(8000);
Using that, you can extract the file data from (I think) files.my_file and write it to the file.
i am having an xml file.I need to expose an api in such a way that it can consume xml files throughnodjs server api.
how can i receive a simple xml file in my nodejs server. i need a nodejs server script.
Try this code after struggling sometime I successfully get the code as per your need.
var express = require('express');
var fs=require('fs');
var app = express();
var conf;
var result;
app.get('/Excel', function(req, res){
result=fs.readFileSync('E:/User/nodejs/nodejs/tasks/result.xml');
res.end(result);
});
app.post('/posting',function(req,res){
var body = '';
req.on('data', function(data) {
body += data;
});
req.on('end', function (){
fs.writeFile('E:/User/nodejs/nodejs/tasks/result.xml', body, function() {
console.log(body);
res.end('Successfully Posted');
});
});
});
app.listen(3000);
console.log('Listening on port 3000');
After you run the server Post the data in content-type as application/xml using http://localhost:3000/posting and place your code in body and as result result.xml file will be created with the data.And you can get this file by http://localhost:3000/Excel.Hope this helps for you.
You can use a simple XML parser middleware such as express-xml-bodyparser. A working examples with express can be found in the above mentioned site.
Currently, I have a lot of linux based clients downloading firmware updates from my webserver.
After the client has successfully downloaded the firmware file, my server needs to execute a few scripts, which logs in to the client and performs some tasks.
Is there a way for a node server to keep track of the clients download progress, so I can execute the needed scripts once the file has been downloaded?
Ok so I will try.
If you serve your Firmware as static files through Apache/Nginx and direct url call. You don't get the progress inside your NodeJS script.
If you serve your files via stream inside a express controller you can listen to the progress. Look at this answer here https://stackoverflow.com/a/42273080/3168392
You will have to use a socket connection to make sure the node server gets update from the client of the progress of the file being downloaded.
Something like this
CLIENT_CODE
var socket = io('http://localhost');
socket.on('connect', function(){});
socket.on('data_reciving', parts.pop(),function(percentage){
if(parse_data(percentage) === 100){
client.socket.emit('downloadCompleted', {fileName:'test'});
}else{
//do nothing
}
});
SERVER_CODE:
sockets.on('connection', function (socket) {
//listen to the event from client
socket.on('downloadCompleted', function (data) {
connect_to_client();
do_some_operation();
socket.emit('ALLDONE',{some_data});
});
});
I guess this helps ,you can use this post for reference.
If you just want to run some code when a download has finished, you can use on-finished:
const onFinished = require('on-finished');
app.use((req, res, next) => {
onFinished(res, (err, res) => {
...log some data, perform some housekeeping, etc...
});
next();
});
As is, it will attach a "finished" listener to all responses, which is probably not what you want. Since this is plain Express middleware, you can attach it to specific routes instead (but how depends on how exactly the files are being served).
I found some code that seems to fit my needs.
With the code below, I can detect both the progress of a user's download from the server-side, and fire an event once the file transfer completes.
var http = require("http");
var fs = require("fs");
var filename = "./large-file";
var serv = http.createServer(function (req, res) {
var sent = 0;
var lastChunkSize = 0;
var stat = fs.statSync(filename);
res.setHeader('Content-disposition', 'attachment; filename=large-file.iso');
res.setHeader('Accept-Ranges', 'bytes');
res.setHeader('Keep-Alive', 'timeout=5, max=100');
res.writeHeader(200, {"Content-Length": stat.size});
var fReadStream = fs.createReadStream(filename, { highWaterMark: 128 * 1024 });
fReadStream.on('data', function (chunk) {
if(!res.write(chunk)){
fReadStream.pause();
lastChunkSize = chunk.length;
console.log('Sent', sent, 'of', stat.size);
}
});
fReadStream.on('end', function () {
console.log('Transfer complete.');
res.end();
});
res.on("drain", function () {
sent += lastChunkSize;
fReadStream.resume();
});
});
serv.listen(3001);
I'm writing a http server using node.js and having trouble isolating the request body as a stream readable. Here is a basic sample of my code:
var http = require('http')
, fs = require('fs');
http.createServer(function(req, res) {
if ( req.method.toLowerCase() == 'post') {
req.pipe(fs.createWriteStream('out.txt'));
req.on('end', function() {
res.writeHead(200, {'content-type': 'text/plain'})
res.write('Upload Complete!\n');
res.end();
});
}
}).listen(8182);
console.log('listening on port 8182');
According to node's documentation the a request param is an instance of http.IncomingObject which implements node's readable stream interface. The problem with just using stream.pipe() as I did above is the readable stream includes the plain text of the request headers along with the request body. Is there a way to isolate only the request body as a readable stream?
I'm aware that there are frameworks for file uploads such as formidable. My ultimate goal is not to create an upload server but to act as a proxy and stream the request body to another web service.
thanks in advance.
Edit>>
working server for "Content-type: multipart/form-data" using busboy
var http = require('http')
, fs = require('fs')
, Busboy = require('busboy');
http.createServer(function(req, res) {
if ( req.method.toLowerCase() == 'post') {
var busboy = new Busboy({headers: req.headers});
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.pipe(fs.createWriteStream('out.txt'));
});
req.pipe(busboy);
req.on('end', function() {
res.writeHead(200, 'Content-type: text/plain');
res.write('Upload Complete!\n');
res.end();
});
}
}).listen(8182);
console.log('listening on port 8182');
Check your req.headers['content-type']. If it's multipart/form-data then you could use a module like busboy to parse the request for you and give you readable streams for file parts (and plain strings for non-file parts if they exist).
If the content-type is some other multipart/* type, then you could use dicer, which is the underlying module that busboy uses for parsing multipart.