Why node.js convert POST body? - node.js

Want to save JPG binary body data to file system in OpenShift. But somehow received info will get converted. Do you have any idea why? Is it possible that node.js treat data as text and does an encoding / decoding on it?
var myServer = http.createServer(function(request, response)
{
var data = '';
request.on('data', function (chunk){
data += chunk;
});
request.on('end',function(){
var date = new Date();
var url_parts = url.parse(request.url,true);
if(url_parts.pathname == '/setImage') {
if(data != null && data.length > 0) {
fs.writeFile('/var/lib/openshift/555dd1415973ca1660000085/app-root/data/asset/' + url_parts.query.filename, data, 'binary', function(err) {
if (err) throw err
console.log(date + ' File saved. ' + url_parts.query.filename + ' ' + data.length)
response.writeHead(200)
response.end()
})
}
}

You are initializing data with a string, so adding chunk's with += to it will convert the chunks to string as well (which is subject to character encoding).
Instead, you should collect the chunks as an array of Buffer's and use Buffer.concat() to create the final Buffer:
var chunks = [];
request.on('data', function (chunk){
chunks.push(chunk);
});
request.on('end', function() {
var data = Buffer.concat(chunks);
...
});

Related

Node.JS Get data from Http Request POST

I am new to Node.js and am writing a AWS Lambda function in Node.js to get data from a REST endpoint and sending the data back to Alexa. Below is a snippet of my code:
var req = http.request(post_options, function(response) {
var str = '';
response.setEncoding('utf8');
//another chunk of data has been recieved, so append it to `str`
response.on('data', function(chunk) {
str += chunk;
});
//the whole response has been recieved, so we just print it out here
response.on('end', function() {
var result = JSON.parse(str);
text = 'According to Weather Underground, the temperature in ' + citySlot + ',' + stateSlot + ' feels like ';
text += result.HighFahr0 + " degrees fahrenheit.";
console.log("text::"+text);
});
});
req.write(post_data);
//this.emit(':ask', text, "Anything else i can help you with?");
req.end();
If i execute this.emit right after building the text string, it does not work. If i execute this.emit after the req.write function, the text variable is out of scope and contains nothing. How do i get the contents of the text variable after req.write() and req.end()?
Thanks Very Much.
Declare text before the request. I hope it should work then.
var text;
var req = http.request(post_options, function(response) {
var str = '';
response.setEncoding('utf8');
//another chunk of data has been recieved, so append it to `str`
response.on('data', function(chunk) {
str += chunk;
});
//the whole response has been recieved, so we just print it out here
response.on('end', function() {
var result = JSON.parse(str);
text = 'According to Weather Underground, the temperature in ' + citySlot + ',' + stateSlot + ' feels like ';
text += result.HighFahr0 + " degrees fahrenheit.";
console.log("text::"+text);
});
});
req.write(post_data);
this.emit(':ask', text, "Anything else i can help you with?");
req.end();

"Unhandled stream error in pipe" - Too many file downloads in Node.js / request?

I am trying to download many (around 2,000) images from a JSON feed using Node, (and specifically the request module). When I try to do this (looping through the JSON) I get
throw er; // Unhandled stream error in pipe.
I checked ulimit -n and it was set at 256, so I increased that to 4,000 and I still get the same error (although after I am downloading a much higher number of images).
I have two questions,
Why am I still getting an error if I raised the maximum download number well in excess of the number of simultaneous downloads I actually have
What is the best way to "queue" or pause the downloads so as not to overwhelm my system? Here is my code.
var fs = require('fs')
, http = require('http')
, request = require('request')
, url = 'http://www.urlOfJsonFeed'
function get(){
http.get(url, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
jParse(body);
});
}).on('error', function(e) {
console.log("Got error: ", e);
});
}
function jParse(info){
data = JSON.parse(info)
entries = data.entries
numToDownload = entries.length;
for(var i = numToDownload - 1; i >= 0; i -= 1){
link = entries[i]['imgUrl']
download(link, 'images/' + mov + '.mp4', function(){
console.log('Downloaded ' + dMov + ' movies')
dMov++
}
}
}
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
if (err) {
console.log('header error');
}
if (!err && res.statusCode == 200) {
//Download the image
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
}
});
};
get()

Combine sequence of asynchronous requests to build final answer

I'm rather new in express framework. I have call flickr api to get albums list and for each album need to get its thumbnail. On the end need to build covers array with list of objects like {title, thumb}. I would like to pass fully created covers array to template and render. I have problem with it because of the way that node.js callbacks works and for loop ends quickly before requests ends. How to do that properly ?
http.get(base_url+'&user_id='+flickr.user_id+'&method=flickr.photosets.getList', function(resp){
var body = '';
resp.on('data', function(chunk) {
body += chunk;
});
resp.on('end', function() {
var json = JSON.parse(body);
var ps = json.photosets.photoset;
// final answer
var covers = {};
for(var i=0; i<ps.length; i++) {
var p = ps[i];
var getSizesUrl = base_url+'&user_id='+flickr.user_id+'&method=flickr.photos.getSizes&photo_id='+p.primary;
http.get(getSizesUrl, function(resp){
var body1 = '';
resp.on('data', function(chunk) {
body1 += chunk;
});
resp.on('end', function() {
var json1 = JSON.parse(body1);
covers += {title: p.title._content, thumb: json1.sizes.size[1].source};
if(i + 1 == ps.length) {
// last call
console.log(covers);
res.render('photosets', {covers: covers});
}
});
});
}
});
});
Update using async and request as #sgwilly said, but something wrong...
request(base_url+'&user_id='+flickr.user_id+'&method=flickr.photosets.getList', function (error, response, body) {
var json = JSON.parse(body);
var ps = json.photosets.photoset;
// functions list to call by `async`
var funcs = {};
for(var i = 0; i < ps.length; i++) {
var p = ps[i];
funcs += function(callback) {
request(base_url+'&user_id='+flickr.user_id+'&method=flickr.photos.getSizes&photo_id='+p.primary, function (error, response, body1){
var json1 = JSON.parse(body1);
var cover = {title: p.title._content, thumb: json1.sizes.size[1].source};
callback(null, cover);
});
};
}
// run requests and produce covers
async.series(funcs,
function(covers){
console.log(covers);
res.render('photosets', {covers: covers});
}
);
});
Node.js callbacks are a little tricky, but they'll make sense after a while.
You want to use request as your outside loop, then async.concat as a callback inside, and have each iterator be a request call of a URL.
Thanks for #dankohn. I got this and work :)
request(base_url+'&user_id='+flickr.user_id+'&method=flickr.photosets.getList', function (error, response, body) {
var json = JSON.parse(body);
var ps = json.photosets.photoset;
async.concat(ps, function(p, callback){
request(base_url+'&user_id='+flickr.user_id+'&method=flickr.photos.getSizes&photo_id='+p.primary, function (error, response, body1){
var json1 = JSON.parse(body1);
var cover = {title: p.title._content, thumb: json1.sizes.size[1].source};
callback(null, cover);
});
},
function(err, covers){
console.log(covers);
res.render('photosets', {covers: covers});
});
});

How can I 'accumulate' a raw stream in Node.js?

At the moment I concatenate everything into a string as follows
var body = '';
res.on('data', function(chunk){
body += chunk;
});
How can I preserve and accumulate the raw stream so I can pass raw bytes to functions that are expecting bytes and not String?
Better use Buffer.concat - much simpler. Available in Node v0.8+.
var chunks = [];
res.on('data', function(chunk) { chunks.push(chunk); });
res.on('end', function() {
var body = Buffer.concat(chunks);
// Do work with body.
});
First off, check that these functions actually need the bytes all in one go. They really should accept 'data' events so that you can just pass on the buffers in the order you receive them.
Anyway, here's a bruteforce way to concatenate all data chunk buffers without decoding them:
var bodyparts = [];
var bodylength = 0;
res.on('data', function(chunk){
bodyparts.push(chunk);
bodylength += chunk.length;
});
res.on('end', function(){
var body = new Buffer(bodylength);
var bodyPos=0;
for (var i=0; i < bodyparts.length; i++) {
bodyparts[i].copy(body, bodyPos, 0, bodyparts[i].length);
bodyPos += bodyparts[i].length;
}
doStuffWith(body); // yay
});
Alternately, you can also use a node.js library like bl or concat-stream:
'use strict'
let http = require('http')
let bl = require('bl')
http.get(url, function (response) {
response.pipe(bl(function (err, data) {
if (err)
return console.error(err)
console.log(data)
}))
})

Getting binary files with cradles getAttachment from a CouchDB?

I uploaded a png as attachment to a CouchDb database. When I have look at it via Futon it is fine, if I try to get it back via cradle it is corrupted. I used a snipptlet from the crade-test.js shipped with crade and modified it a bit:
var response = {};
var streamer = db.getAttachment(data.id,filename);
streamer.addListener('response', function (res) {
response.headers = res.headers;
response.headers.status = res.statusCode;
response.body = "";
});
streamer.addListener('data', function (chunk) { response.body += chunk; });
streamer.addListener('end', function () {
fs.writeFile('new-'+filename, response.body, function (err) {
if (err) throw err;
console.log('It\'s saved!');
});
});
The result is a corrupted png that is bigger than the input. I provided a working example here: http://jsfiddle.net/x8GZc/
The snippet you found is used with a text document (= basically a string).
For binary data (e.g. images), you must set the correct encoding on the response object:
stream = client.database('images').getAttachment(req.params.id, filename);
// response is your HTTP response object
stream.on('data', function(chunk) {
return response.write(chunk, "binary");
});
stream.on('end', function() {
return response.end();
});

Resources