I have a simple nodeJS server that fetches data from another server and store them in a JSON files, i need to write a status about each file fetched and generated, but that doesn't work, because i have to execute response.end(), which implies that i can't write to the stream again, without ending the stream
here's my code:
var http = require('http');
var module = require('fs');
var APIs = [ '/servlet/en', '/servlet/fr' ];
var langs =[ 'en', 'fr' ];
var finish = false;
var host = 'http://www.localtest';
const port = process.argv[2] || 9000;
var responses = [];
http.createServer(function (req, response) {
for (x in APIs){
console.log(x);
var options = {
host: 'localtest',
port: 8888,
path: APIs[x],
lang: langs[x]
};
http.get(options, function(res) {
res.setEncoding('utf8');
var body='';
res.on('data', function(chunk){
body += chunk;
});
res.on('end', function(chunk){
responses.push(body);
if (responses.length == 2){
var d = JSON.parse(responses[1]);
var d2 = JSON.parse(responses[0]);
module.writeFileSync("options.lang1"+".json",JSON.stringify(d) , 'utf-8');
module.writeFileSync("options.lang2"+".json",JSON.stringify(d2) , 'utf-8');
}
});
});
}
}).listen(parseInt(port));
console.log(`Server listening on port ${port}`);
An example, i tried to write a message to the user after the line :
responses.push(body);
using response.write(), but this method needs an response.end() in order to be executed and displayed on the browser, If i do that i can't write to the stream anymore!
Couple issues with your code here. First off, you shouldn't use module as a variable, as that is a word that's already used in node's moduling system, e.g. in module.exports
Second, You really want to have some control flow in there. here's a complete example using the async library, though others prefer Promises.
var http = require('http');
var fs = require('fs');
var APIs = [ '/servlet/en', '/servlet/fr' ];
var langs =[ 'en', 'fr' ];
var host = 'http://www.localtest';
const port = process.argv[2] || 9000;
const async = require('async');
let responses = [];
function fetchAndWriteFile(lang, callback){
var options = {
host: 'localtest',
port: 8888,
path: '/servlet/'+lang,
lang: lang
};
http.get(options, function(res) {
res.setEncoding('utf8');
const filename = 'options.'+lang+'.json';
const fileStream = fs.createWriteStream(filename, {defaultEncoding: 'utf-8'});
fileStream.on('end', (e)=> {
if(e) return callback(e);
return callback(null, filename);
});
res.pipe(fileStream)
});
}
http.createServer(function (req, response) {
// this will run the fetchAndWriteFile once for each lang in langs
async.map(langs, fetchAndWriteFile, (e, files) => {
response.end(files); // files will be an array of filenames saved
});
}).listen(parseInt(port));
console.log(`Server listening on port ${port}`);
Related
UPDATE
Thanks to #robertklep and #vallo for pointing out that I was not parsing the multipart request properly.
Here's the updated server code with some re-worked sample code from Busboy:
'use strict';
// Require
var http = require('http');
var Busboy = require('busboy');
var fs = require('fs');
// Server
var server = http.createServer(function(request, response) {
if (request.method === 'POST') {
var busboy = new Busboy({ headers: request.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.pipe(fs.createWriteStream(`../db/images/${filename}`));
});
busboy.on('finish', function() {
response.writeHead(200, { 'Connection': 'close' });
response.end("That's all folks!");
});
return request.pipe(busboy);
}
response.writeHead(404);
response.end();
});
server.listen(8000, '192.168.7.25', () => {});
I am trying to post a jpg to an endpoint but the resulting image cannot be opened :
The file “image_copy.jpg” could not be opened. It may be damaged or
use a file format that Preview doesn’t recognize.
Some background:
Everything (servers, storage) are being hosted locally
Have made a decision to only use native Node modules like http and fs due to storage constraints on a microcontroller board
Am using form-data as it eases the pain of multi-part forms and uploads as well as sets the correct request headers
Here is some sample code broken into two scripts:
Server
'use strict';
// Require
var http = require('http');
// Server
var server = http.createServer((request, response) => {
var body = [];
request.on('data', function(chunk) {
body.push(chunk);
});
request.on('end', function() {
saveImage(Buffer.concat(body),null);
response.statusCode = 200;
response.end('thanks')
});
});
server.listen(8000, '192.168.7.25', () => {});
// Process
function saveImage(data,callback) {
var fs = require('fs');
fs.writeFile('../db/images/image_copy.jpg', data, function(err) {});
}
Client
'use strict';
// Require
var FormData = require('form-data');
var fs = require('fs');
var http = require('http');
// Vars
var form = new FormData();
// Process
form.append('my_file', fs.createReadStream('/temp/1.jpg'));
var request = http.request({
hostname: '192.168.7.25',
port: 8000,
path: '/api/premises/v1/image',
method: 'POST',
headers: form.getHeaders()
});
form.pipe(request);
request.on('response', function(res) {
console.log(res.statusCode);
});
After executing, the jpg is uploaded and saved to the correct file location (and also has the same file size as the source jpg) but the new image can't be opened.
Even if I encode the incoming chunk as binary and set the encoding on fs.writeFile to binary, I get a similar result.
What am I doing wrong? Thank you!
The client is uploading in multipart/form-data format, which is a format that can contain, amongst others, file data.
However, this means that the server should parse this format to extract the file data. Right now, the server is just taking the request body verbatim and writing it to a file.
The multiparty module can help you, and one of its usage examples shows you how to hook it up with http.Server: https://github.com/pillarjs/multiparty#usage
var multiparty = require('multiparty');
var http = require('http');
var util = require('util');
http.createServer(function(req, res) {
// parse a file upload
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
res.writeHead(200, {'content-type': 'text/plain'});
res.write('received upload:\n\n');
res.end(util.inspect({fields: fields, files: files}));
});
}).listen(8000);
Using that, you can extract the file data from (I think) files.my_file and write it to the file.
I'm building a simple HTTP server that serves a fairly large file to clients using streams. I need to serve multiple clients at the same time, and I'm wondering what the simplest way to achieve that is.
My initial feeling is that using the cluster module and forking {num CPUs} processes might be the simplest way.
var StreamBrake = require('streambrake');
var http = require('http');
var fs = require('fs');
var server = http.createServer(function(req, res) {
var stream = fs.createReadStream('/data/somefile');
stream.pipe(new StreamBrake(10240)).pipe(res);
});
server.listen(1234, 10);
Edit: To clarify, the issue is that this code won't begin to serve the second client until it has finished serving the first.
After thinking about your issue, I'm confused as to why you believe there's an issue. CreateReadStream is asynchronous. Here is in example that complicates the code a little bit in order to demonstrate that using CreateReadStream we can indeed service multiple connections at a time.
/*jshint node:true*/
var http = require('http');
var fs = require('fs');
var activeRequests = 0;
var launchFiveRequests = 5;
http.createServer(function (req, res) {
activeRequests++;
console.log('Request received');
var readStream = fs.createReadStream('play.js', {'bufferSize': 1024});
readStream.setEncoding('utf8');
readStream.on('data', function (data) {
console.log(activeRequests);
res.write(data);
});
readStream.on('end', function () {
res.end();
console.log('end');
activeRequests--;
});
}).listen(8080);
var options = {
hostname: 'localhost',
port: 8080,
path: '/'
};
while(launchTenRequests--) {
http.get(options, function(res) {
console.log('response received');
});
}
Serving up a sufficiently large file, and you should see that all 5 requests are live at once, and all end up ending relatively simultaneously.
I am trying to make a webserver in node.js that downloads an image from Wikipedia and servers it on a page. I cant get it to work. I pasted my code in an online sandbox: http://runnable.com/UXWTyD3pTQ1RAADe.
Heres my code:
var http = require('http');
var fs = require('fs');
var fd = fs.open('name.jpeg', 'r+');
var options = {
host:'upload.wikimedia.org',
port:80,
path:'/wikipedia/commons/1/15/Jagdschloss_Granitz_4.jpg'
};
var server = http.createServer(function(req, res){
res.writeHead(200, ['Content-Type', 'text/html']);
http.get(options,function(res) {
res.on('data', function (chunk) {
fs.write(fd, chunk, 0, chunk.length, 0, null);
});
res.on('end',function(){
fd.end();
res.send("<img src='name.jpeg'></img>");
res.end();
});
});
});
server.listen(process.env.OPENSHIFT_NODEJS_PORT, process.env.OPENSHIFT_NODEJS_IP);
I keep running into:
node server.js
Running...
fs.js:415
binding.write(fd, buffer, offset, length, position, wrapper);
^
TypeError: Bad argument
at Object.fs.write (fs.js:415:11)
at IncomingMessage.<anonymous> (server.js:18:12)
at IncomingMessage.EventEmitter.emit (events.js:96:17)
at IncomingMessage._emitData (http.js:359:10)
at HTTPParser.parserOnBody [as onBody] (http.js:123:21)
at Socket.socketOnData [as ondata] (http.js:1485:20)
at TCP.onread (net.js:404:27)
Working code - saving image file:
/**Try to get an image from Wikipedia and return it**/
var http = require('http');
var fs = require('fs');
var options = {
host:'upload.wikimedia.org',
port:80,
path:'/wikipedia/commons/1/15/Jagdschloss_Granitz_4.jpg'
};
var server = http.createServer(function(req, res){
res.writeHead(200, ['Content-Type', 'text/html']);
http.get(options,function(imgRes) {
imgRes.pipe(fs.createWriteStream('name.jpeg'));
res.end("<html><img src='name.jpeg'></img></html>");
});
});
server.listen(process.env.OPENSHIFT_NODEJS_PORT, process.env.OPENSHIFT_NODEJS_IP);
You would also need node-static (http://www.sitepoint.com/serving-static-files-with-node-js/) for serving static file name.jpeg.
But the other way is to do it manually:
var http = require('http');
var fs = require('fs');
var options = {
host:'upload.wikimedia.org',
port:80,
path:'/wikipedia/commons/1/15/Jagdschloss_Granitz_4.jpg'
};
var server = http.createServer(function(req, res){
if(req.url == '/name.jpeg') {
res.writeHead(200, ['Content-Type', 'image/jpg']);
try {
var imgData = fs.readFileSync('name.jpeg');
res.end(fs.readFileSync('name.jpeg'));
} catch(err) {
res.end();
}
}
else {
res.writeHead(200, ['Content-Type', 'text/html']);
http.get(options,function(imgRes) {
imgRes.pipe(fs.createWriteStream('name.jpeg'));
res.end("<html><img src='name.jpeg'></img></html>");
});
}
});
server.listen(process.env.OPENSHIFT_NODEJS_PORT, process.env.OPENSHIFT_NODEJS_IP);
When I am trying to run the code from below:
var express = require('express');
var app = express();
var port = process.env.PORT || 5000;
var request = require('request');
var zlib = require('zlib');
app.listen(port, function() {
console.log("Listening on " + port);
makeRequest();
});
function makeRequest(){
var url = 'https://api.stackexchange.com/2.1/search?pagesize=5&order=desc&sort=activity&intitle=ios development&site=stackoverflow';
var headers = {'Accept-Encoding': 'gzip'};
var response = request(url, headers);
gunzipJSON(response);
}
function gunzipJSON(response){
var gunzip = zlib.createGunzip();
var json = "";
gunzip.on('data', function(data){
json += data.toString();
});
gunzip.on('end', function(){
parseJSON(json);
});
response.pipe(gunzip);
}
function parseJSON(json){
var json = JSON.parse(json);
if(json.items.length){
for(var i in json.items){
console.log(json.items[i].title + '\n' + json.items[i].link);
}
}
}
I get error saying
Error: incorrect header check
at Zlib._binding.onerror (zlib.js:286:17)
I am passing my own gzipped url in the code.
Any help will be really useful.
Thanks
Old question (and request library is now deprecated), but better late than never:
Interestingly, the code in question does work for me on Node.js version 15.13.0, but not on 14.16.0 (keeping the version of request the same, which is the latest 2.88.2).
However, just using the following simple code works on 14.16.0 (live demo), but not on 15.13.0!:
request(url, function (error, response, body) {
console.log(JSON.parse(body));
});
This means that for some reason, on 14.16.0 the response body is automatically unzipped (hence the above snippet works), while on 15.13.0 the response body is kept compressed and so an active decompression is needed.
I am using express and nodejs and am having problems saving facebook profile pictures to my server.
Location of picture: http://profile.ak.fbcdn.net/hprofile-ak-ash2/275619_223605264_963427746_n.jpg
Script Being Used:
var http = require('http')
var fs = require('fs')
var options = {
host: 'http://profile.ak.fbcdn.net',
port: 80,
path: '/hprofile-ak-ash2/275619_223605264_963427746_n.jpg'
}
var request = http.get(options, function(res){
res.setEncoding('binary')
var imagedata = ''
res.on('data', function (chunk) {imagedata += chunk})
res.on('end', function(){
fs.writeFile('logo.jpg', imagedata, 'binary', function (err) {
if(err){throw err}
console.log('It\'s saved!');
})
})
})
The image saves but is empty. Console logging the image data is blank too. I followed this example origionally which does work for me. Just changing the location of the image to the facebook pic breaks the script.
I ended up coming up with a function that worked:
var http = require('http');
var fs = require('fs');
var url = require('url');
var getImg = function(o, cb){
var port = o.port || 80,
url = url.parse(o.url);
var options = {
host: url.hostname,
port: port,
path: url.pathname
};
http.get(options, function(res) {
console.log("Got response: " + res.statusCode);
res.setEncoding('binary')
var imagedata = ''
res.on('data', function(chunk){
imagedata+= chunk;
});
res.on('end', function(){
fs.writeFile(o.dest, imagedata, 'binary', cb);
});
}).on('error', function(e) {
console.log("Got error: " + e.message);
});
}
USAGE:
getImg({
url: "http://UrlToImage.com",
dest: __dirname + '/your/path/to/save/imageName.jpg'
},function(err){
console.log('image saved!')
})
I know my answer is a little late, but I hope it'll help others we get to this question, so here it is:
Saving the file to the root directory of your Node server can be done this way:
var request = require("request");
var fs = require("fs");
var fbUserId = 4;
var imageLink = "https://graph.facebook.com/"+ fbUserId +"/picture?width=500&height=500";
request(imageLink).pipe(fs.createWriteStream("resultIMG.png"))
.on('close', function(){
console.log("saving process is done!");
});
Of course, you can add any path you want for the image prior the the file name string.
If you still are seeing empty images, set the encoding of the request module to null , like this:
var request = require("request").defaults({ encoding: null });
That should do it.