I'm getting data through a single request. But here I am trying to send multiple HTTP requests. Here I just struck unable to get data and how to pass data in view page i.e, in EJS
router.get('/specials',function(req,res,next){
var callbackThree = function(error, resp, body) {
var data = JSON.parse(body);
res.render("specials",{ data: data});
}
var callbackTwo = function(error, resp, body) {
request("https://siteblabla.com/wsmenu/sub_menu_list/789/", callBackThree);
}
var callbackOne = function(error, resp, body) {
request("https://siteblabla.com/wsspecials/specials_list/123/", callBackTwo);
}
// request("api.com/users", callBackOne);
});
You need to use Promises and there is a great npm package called ejs-promise which you can make use of in your case.
You can download it at the below URL,
https://www.npmjs.com/package/ejs-promise
Hope this helps!
Related
I want to access http request url and parameters in callback function. When I print id with console.log I get error is id undefined. How can I access id and request url?
const Request = require('request');
var id=5;
Request.get('https://example.com/'+id, function (error, response, body) {
console.log("id", id);
}
});
Your code works .may be some syntax error issue,this is the updated
code. I tested it in my console and is working fine.
const Request = require('request');
var id=5;
Request.get('https://example.com/'+id, function (error, response, body) {
if(error) {
return console.dir(error);
}
console.log("id", id);
});
You might need to have a look at the official documentation of the Request package-here and link which explains its usage in detail
I have a simple node Express app that has a service that makesa call to a node server. The node server makes a call to an AWS web service. The AWS simply lists any S3 buckets it's found and is an asynchronous call. The problem is I don't seem to be able to get the server code to "wait" for the AWS call to return with the JSON data and the function returns undefined.
I've read many, many articles on the web about this including promises, wait-for's etc. but I think I'm not understanding the way these work fully!
This is my first exposer to node and I would be grateful if somebody could point me in the right direction?
Here's some snippets of my code...apologies if it's a bit rough but I've chopped and changed things many times over!
Node Express;
var Httpreq = new XMLHttpRequest(); // a new request
Httpreq.open("GET","http://localhost:3000/listbuckets",false);
Httpreq.send(null);
console.log(Httpreq.responseText);
return Httpreq.responseText;
Node Server
app.get('/listbuckets', function (req, res) {
var bucketData = MyFunction(res,req);
console.log("bucketData: " + bucketData);
});
function MyFunction(res, req) {
var mydata;
var params = {};
res.send('Here are some more buckets!');
var request = s3.listBuckets();
// register a callback event handler
request.on('success', function(response) {
// log the successful data response
console.log(response.data);
mydata = response.data;
});
// send the request
request.
on('success', function(response) {
console.log("Success!");
}).
on('error', function(response) {
console.log("Error!");
}).
on('complete', function() {
console.log("Always!");
}).
send();
return mydata;
}
Use the latest Fetch API (https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) to make HTTP calls. It has built-in support with Promise.
fetch('http://localhost:3000/listbuckets').then(response => {
// do something with the response here
}).catch(error => {
// Error :(
})
I eventually got this working with;
const request = require('request');
request(url, function (error, response, body) {
if (!error && response.statusCode == 200) {
parseString(body, function (err, result) {
console.log(JSON.stringify(result));
});
// from within the callback, write data to response, essentially returning it.
res.send(body);
}
else {
// console.log(JSON.stringify(response));
}
})
I know the way to make a GET request to a URL using the request module. Eventually, the code just prints the GET response within the command shell from where it has been spawned.
How do I store these GET response in a local variable so that I can use it else where in the program?
This is the code i use:
var request = require("request");
request("http://www.stackoverflow.com", function(error, response, body) {
console.log(body);
});
The easiest way (but it has pitfalls--see below) is to move body into the scope of the module.
var request = require("request");
var body;
request("http://www.stackoverflow.com", function(error, response, data) {
body = data;
});
However, this may encourage errors. For example, you might be inclined to put console.log(body) right after the call to request().
var request = require("request");
var body;
request("http://www.stackoverflow.com", function(error, response, data) {
body = data;
});
console.log(body); // THIS WILL NOT WORK!
This will not work because request() is asynchronous, so it returns control before body is set in the callback.
You might be better served by creating body as an event emitter and subscribing to events.
var request = require("request");
var EventEmitter = require("events").EventEmitter;
var body = new EventEmitter();
request("http://www.stackoverflow.com", function(error, response, data) {
body.data = data;
body.emit('update');
});
body.on('update', function () {
console.log(body.data); // HOORAY! THIS WORKS!
});
Another option is to switch to using promises.
I'm playing around with using nodejs as a custom front end for drupal and i'm trying to come up with a way to match the backend menu system, blocks and views with the routing in express.
example route
module.exports = {
'/work': function(req, res){
//get view json for this page
request('http://site.api/casestudies', function(err, response, body){
views_body = JSON.parse(body);
//get node id from alias
request('http://site.api/alias-to-nid' + req.url, function(err, response, body){
body = JSON.parse(body);
var reqUrl = 'http://site.api/rest/api/' + body.path;
request(reqUrl, function(err, response, body){
body = JSON.parse(body);
//get the data we need
var node_title = body.title,
node_body = body.body.und[0].safe_value,
pageclass = 'not-front section-work';
res.render('work', {title: node_title, class:pageclass, node_title:node_title, node_body:node_body, views_body:views_body});
});
});
});
}
}
So, i hit /work and grab the json for the casestudies view that should exist on that page, then i lookup the node id from the /work alias using another request and finally use the node id in yet another nested request call to grab the rest of the json for the page before finally sending it on the the template.
Now - I have a feeling that this is a terrible way to go about this. What should I be doing instead!?
I'm apparently a little newer to Javascript than I'd care to admit. I'm trying to pull a webpage using Node.js and save the contents as a variable, so I can parse it however I feel like.
In Python, I would do this:
from bs4 import BeautifulSoup # for parsing
import urllib
text = urllib.urlopen("http://www.myawesomepage.com/").read()
parse_my_awesome_html(text)
How would I do this in Node?
I've gotten as far as:
var request = require("request");
request("http://www.myawesomepage.com/", function (error, response, body) {
/*
Something here that lets me access the text
outside of the closure
This doesn't work:
this.text = body;
*/
})
var request = require("request");
var parseMyAwesomeHtml = function(html) {
//Have at it
};
request("http://www.myawesomepage.com/", function (error, response, body) {
if (!error) {
parseMyAwesomeHtml(body);
} else {
console.log(error);
}
});
Edit: As Kishore noted, there are nice options for parsing available. Also see cheerio if you have python/gyp issues with jsdom on windows. Cheerio on github
That request() call is asynchronous, so the response is only available inside the callback. You have to call your parse function from it:
function parse_my_awesome_html(text){
...
}
request("http://www.myawesomepage.com/", function (error, response, body) {
parse_my_awesome_html(body)
})
Get used to chaining callbacks, that's essentially how any I/O will happen in javascript :)
JsDom is pretty good to achieve things like this if you want to parse the response.
var request = require('request'),
jsdom = require('jsdom');
request({ uri:'http://www.myawesomepage.com/' }, function (error, response, body) {
if (error && response.statusCode !== 200) {
console.log('Error when contacting myawesomepage.com')
}
jsdom.env({
html: body,
scripts: [
'http://code.jquery.com/jquery-1.5.min.js'
]
}, function (err, window) {
var $ = window.jQuery;
// jQuery is now loaded on the jsdom window created from 'agent.body'
console.log($('body').html());
});
});
also if your page has lot of javascript/ajax content being loaded you might want to consider using phantomjs
Source http://blog.nodejitsu.com/jsdom-jquery-in-5-lines-on-nodejs/