I am new to Node.js and am writing a AWS Lambda function in Node.js to get data from a REST endpoint and sending the data back to Alexa. Below is a snippet of my code:
var req = http.request(post_options, function(response) {
var str = '';
response.setEncoding('utf8');
//another chunk of data has been recieved, so append it to `str`
response.on('data', function(chunk) {
str += chunk;
});
//the whole response has been recieved, so we just print it out here
response.on('end', function() {
var result = JSON.parse(str);
text = 'According to Weather Underground, the temperature in ' + citySlot + ',' + stateSlot + ' feels like ';
text += result.HighFahr0 + " degrees fahrenheit.";
console.log("text::"+text);
});
});
req.write(post_data);
//this.emit(':ask', text, "Anything else i can help you with?");
req.end();
If i execute this.emit right after building the text string, it does not work. If i execute this.emit after the req.write function, the text variable is out of scope and contains nothing. How do i get the contents of the text variable after req.write() and req.end()?
Thanks Very Much.
Declare text before the request. I hope it should work then.
var text;
var req = http.request(post_options, function(response) {
var str = '';
response.setEncoding('utf8');
//another chunk of data has been recieved, so append it to `str`
response.on('data', function(chunk) {
str += chunk;
});
//the whole response has been recieved, so we just print it out here
response.on('end', function() {
var result = JSON.parse(str);
text = 'According to Weather Underground, the temperature in ' + citySlot + ',' + stateSlot + ' feels like ';
text += result.HighFahr0 + " degrees fahrenheit.";
console.log("text::"+text);
});
});
req.write(post_data);
this.emit(':ask', text, "Anything else i can help you with?");
req.end();
Related
Want to save JPG binary body data to file system in OpenShift. But somehow received info will get converted. Do you have any idea why? Is it possible that node.js treat data as text and does an encoding / decoding on it?
var myServer = http.createServer(function(request, response)
{
var data = '';
request.on('data', function (chunk){
data += chunk;
});
request.on('end',function(){
var date = new Date();
var url_parts = url.parse(request.url,true);
if(url_parts.pathname == '/setImage') {
if(data != null && data.length > 0) {
fs.writeFile('/var/lib/openshift/555dd1415973ca1660000085/app-root/data/asset/' + url_parts.query.filename, data, 'binary', function(err) {
if (err) throw err
console.log(date + ' File saved. ' + url_parts.query.filename + ' ' + data.length)
response.writeHead(200)
response.end()
})
}
}
You are initializing data with a string, so adding chunk's with += to it will convert the chunks to string as well (which is subject to character encoding).
Instead, you should collect the chunks as an array of Buffer's and use Buffer.concat() to create the final Buffer:
var chunks = [];
request.on('data', function (chunk){
chunks.push(chunk);
});
request.on('end', function() {
var data = Buffer.concat(chunks);
...
});
I try to get the response of the http-request to my variable "temperature" but it doesnt work, i already tried it with callbacks but im not really familiar with it so i cant solve my problem right now. Maybe someone has an idea?
thanks and best regards :-)
var temperature = '';
http.get(url, function(res) {
//console.log("Got response: " + res.statusCode);
var bodyarr = [];
res.on('data', function(chunk){
bodyarr.push(chunk);
});
res.on('end', function(){
//console.log(*/bodyarr.join('').toString());
temperature = bodyarr;
});
enter code here
}).on('error', function(e) {
console.log("Got error: " + e.message);
});
console.log(temperature);
The issue is about asynchronicity here; the temperature object is undefined at that point since the log method is called before the request is done. Use the http.request as it's asynchronous; it takes a callback as parameter which you pass to response.end if you want to have the complete response:
//The url for example is 'www.random.org/temperatures/?min=1&max=100'
var options = {
host: 'www.random.org',
path: '/temperatures/?min=1&max=100'
};
var temperature = '';
var http = require('http');
callback = function(res) {
var bodyarr = [];
res.on('data', function (chunk) {
bodyarr.push(chunk);
});
res.on('end', function () {
console.log(req.data);
console.log(bodyarr);
temperature = bodyarr.join();
});
}
var req = http.request(options, callback).end();
I have the following code:
app.get('/pull-requests', function (request) {
fetchRepos(fetchPullRequests);
app.on('pull-requests:fetched', function (pullRequestsByRepo) {
var html = "";
_.each(pullRequestsByRepo, function (pullRequests) {
html += 'There is <strong>'+ pullRequests.length +'</strong> pending pull request(s) for <strong>'+ pullRequests[0].title +'</strong>:';
html += '<ul>';
_.each(pullRequests, function (pullRequest) {
html += '<li><em>'+ pullRequest.title +'</em> ('+ pullRequest.url +')</li>';
});
html += '</ul>';
});
response.send(html);
});
});
It works fine once. Every second request ends raising an error Can't set headers after they are sent..
EDIT: More code to explicit the logic
function fetchRepos (callback) {
_options.path = '/orgs/'+ app.get('org') +'/repos?client_id='+ app.get('client_id') +'&client_secret='+ app.get('client_secret');
// Fetch the list of repos for a given organisation
var request = https.get(_options, function (res) {
data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on('end', function () {
var repos = JSON.parse(data);
return callback(repos);
});
});
request.on('error', function (error) {
console.log('Problem with request: '+ e);
});
}
function fetchPullRequests (repos) {
var pullRequests = [];
_.each(repos, function (repo, index) {
_options.path = '/repos/'+ app.get('org') +'/'+ repo.name +'/pulls?client_id='+ app.get('client_id') +'&client_secret='+ app.get('client_secret');
var request = https.get(_options, function (res) {
(function () {
var data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on('end', function () {
data = JSON.parse(data);
if (data.length > 0) {
pullRequests.push(data);
}
if (index == (repos.length - 1)) {
app.emit('pull-requests:fetched', pullRequests);
}
});
})();
});
});
}
Your problem is that whenever you call app.on('pull-requests:fetched', …), you add a new listener, meaning that when the second request arrives, it will trigger the first one again.
node then complains because you try reply twice to the first request.
You could fix your immediate issue by calling app.once, which would ensure that the only fires once, but you would still have an issue if 2 requests arrive at the same time.
The correct pattern in this case is to pass a callback to fetchRepos.
I'm relatively new to callbacks and have been unsuccessful in getting the following code to work. I have used the async.map function to return the data from each web call to its respective page. However, my console.log(return) is returning [ , undefined] even though the console.log(data) prior to the callback(data) is returning the web page's html. Here's my code:
var http = require("http"),
fs = require("fs"),
page, chap, ot,
async = require("async");
ot = fs.open('ot.txt', 'w');
page = "test";
chap = 2;
function getData(url, callback) {
var data = "";
var options = {
host: "rs.au.com",
port: 80
}
options.path = url;
console.log("request sent to: http://" + options.host + options.path);
var req = http.request(options, function(res) {
console.log("Response received " + res.statusCode);
res.on('data', function(chunk) {
data += chunk;
});
res.on('end', function(e) {
console.log(data);
callback(e, data);
});
}).end();
}
function main() {
var pathArr = [];
for ( var i = 1; i <= chap; i++ ) {
pathArr[i] = "/".concat(page, "/", i, ".html");
}
async.map(pathArr, getData, function(err, result) {
console.log("The result is :" + result);
});
}
main();
Could anyone please point out why my code is not working and how I can correct it?
Much appreciated!
EDIT: After Brandon Tilley's response I amended the callback function from callback(data) to callback(e, data), however I'm getting no response now from the last console.log output.
The Async library assumes your callbacks adhere to the standard Node.js callback signature, which is callback(err, others...). Since you are passing data as the first argument, Async assumes it's an error. You should use callback(e, data) instead (since e will be null in the case of no errors).
[Update]
The other problem is that your array is not correct. Since i starts at 1 and goes up to chap, pathArr[0] is undefined. Change:
pathArr[i] = "/".concat(page, "/", i, ".html");
to
pathArr[i-1] = "/".concat(page, "/", i, ".html");
At the moment I concatenate everything into a string as follows
var body = '';
res.on('data', function(chunk){
body += chunk;
});
How can I preserve and accumulate the raw stream so I can pass raw bytes to functions that are expecting bytes and not String?
Better use Buffer.concat - much simpler. Available in Node v0.8+.
var chunks = [];
res.on('data', function(chunk) { chunks.push(chunk); });
res.on('end', function() {
var body = Buffer.concat(chunks);
// Do work with body.
});
First off, check that these functions actually need the bytes all in one go. They really should accept 'data' events so that you can just pass on the buffers in the order you receive them.
Anyway, here's a bruteforce way to concatenate all data chunk buffers without decoding them:
var bodyparts = [];
var bodylength = 0;
res.on('data', function(chunk){
bodyparts.push(chunk);
bodylength += chunk.length;
});
res.on('end', function(){
var body = new Buffer(bodylength);
var bodyPos=0;
for (var i=0; i < bodyparts.length; i++) {
bodyparts[i].copy(body, bodyPos, 0, bodyparts[i].length);
bodyPos += bodyparts[i].length;
}
doStuffWith(body); // yay
});
Alternately, you can also use a node.js library like bl or concat-stream:
'use strict'
let http = require('http')
let bl = require('bl')
http.get(url, function (response) {
response.pipe(bl(function (err, data) {
if (err)
return console.error(err)
console.log(data)
}))
})