Hi im trying to use callback for variable external use outside the function but something is wrong, i think my callback is not so correct as i think:
function latitude(callback){
var mylat;
const https = require('https');
https.get('https://url_of_my_json', (resp) => {
let data = '';
// A chunk of data has been recieved.
resp.on('data', (chunk) => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
mylat = JSON.parse(data).results[0].geometry.location.lat;
callback(mylat);
});
}).on("error", (err) => {
console.log("Error: " + err.message);
});
}
var mylat = latitude(); // variable i want to use for the rest of code
Thanks
The syntax of the callback is incorrect.
here is the example, for better understanding of callback Try this example as simple as you can read, just copy save newfile.js do node newfile to run the application.
function myNew(next){
console.log("Im the one who initates callback");
next("nope", "success");
}
myNew(function(err, res){
console.log("I got back from callback",err, res);
});
happy coding :)
Related
The following code from the code pasted below it is too fast as the call to:
global.h2=jsonifiedver(l.number)
is too slow. How can i make it wait on getting an answer back from jsonifiedver() function call so i can get a correct answer. I tried using global variables, these work, but only after every second call, and plus so that's how i know the call works, it's just that program is ending too fast and then on the second call it has the data i want. I'm new to nodejs so any help is appreciated. Thanks!
const server = http.createServer((req, res) => {
if (req.method == 'POST') {
var body = ''
req.on('data', function(data) {
body += data
global.l = JSON.parse(body)
l = JSON.parse(body)
global.h2=jsonifiedver(l.number) // This call is slow and doesnt
// finish in time
global.h3 = JSON.stringify(h2)
console.log('Partial body: ' + body, global.l, global.l.number)
console.log("POST")
res.end("Not The real end");
})
} else {
console.log("GET")
}
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json'); // 'text/plain');
console.log(global.l)
res.end(global.h3); //"End");
});
so res.end(global.h3) finishes before my function call to global.h2=jsonifiedver(l.number) finishes. So i don't get the answer i need. Does that make sense?
The problem you are having is because, when the request is invoked, req.on('data', function(){}) is just adding a hook for data event, but you are also returning the response using res.end() after else statement. You should not send response back unless the req.on('end') is triggered, which implies that the request data has ended. In data event, you should ideally just append the data to body, and then on end event handler you should process the body and return the response. Like follows:
const server = http.createServer((req, res) => {
const methodType = req.method;
if (methodType === "GET") {
console.log("GET");
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
console.log(global.l);
res.end(global.h3);
} else if (methodType === 'POST') {
console.log("POST")
var body = ''
req.on('data', function(data) {
body += chunk;
});
req.on('end', function() {
global.l = JSON.parse(body);
l = JSON.parse(body);
global.h2=jsonifiedver(l.number);
global.h3 = JSON.stringify(h2);
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
console.log(global.l);
res.end(global.h3);
});
}
});
And if you are looking for waiting till jsonifiedver() call is done, make sure its defined as a Promise / Async function and then you can call it using await, make sure the wrapper function where you are invoking jsonifiedver() is defined async too.
This question already has answers here:
Why is my variable unaltered after I modify it inside of a function? - Asynchronous code reference
(7 answers)
Closed 4 years ago.
I'm trying to learn node.js and currently I'm just trying to print stuff out to the response. I am getting team roster from an NHL api and i can console.log it and i get the data. But when i try to call the function to the response.write that very variable is empty.
Here is the tutorial.js. And it's the gt.getTeam(team) that returns an empty string (from the variable 'players' from 'my_hockey_team.js' as seen below).
var http = require("http");
var gt = require("./my_hockey_team");
let team = process.argv.slice(2);
console.log(team[0]);
http.createServer(function (request, response){
response.writeHead(200, {'Content-Type': 'text/html'});
response.write("Players are" + gt.getTeam(team[0]));
console.log(gt.getTeam(team[0]));
response.end("Hello World");
}).listen(3000);
console.log("Server running at 127.0.0.1");
process.on('uncaughtException', function (err) {
console.log(err);
});
And here is the my_hockey_team.js. Inside the getTeam function the 'players'-variable never gets populated. I don't know if it's asynchronous thing or a scope thing or what it is.
const https = require ('https');
var http = require("http");
function getTeam(teamNr){
let players = "";
const request = https.get(`https://statsapi.web.nhl.com/api/v1/teams/${teamNr}/roster`, response => {
let body = "";
if (response.statusCode !== 200){
request.abort();
}
response.on('data', data => {
body += data;
});
response.on('end', () => {
if (response.statusCode === 200){
try {
const team = JSON.parse(body);
for (var i in team.roster){
players += team.roster[i].person.fullName;
}
} catch(error) {
console.log(error.message);
}
}
});
});
request.on('error', error => {
console.error(error.message + "Nåt gick fel med lagen?");
});
return players;
}
module.exports.getTeam = getTeam;
If i uncomment the console.log inside the respons.on('end..) i get all the data that i want but the 'players'-variable is just an empty string when i call the getTeam-function. Can someone please tell me what i'm doing wrong.
Thanks!
getTeam needs to be an async function, return a promise, or (if you're using a really old version of Node) take a callback, and then your calling code needs to handle that appropriately. Right now the method looks like it's setting up all the asynchronous response handling correctly and then returning players as initialized before any of it can actually happen.
If I query the box REST API and get back a readable stream, what is the best way to handle it? How do you send it to the browser?? (DISCLAIMER: I'm new to streams and buffers, so some of this code is pretty theoretical)
Can you pass the readStream in the response and let the browser handle it? Or do you have to stream the chunks into a buffer and then send the buffer??
export function getFileStream(req, res) {
const fileId = req.params.fileId;
console.log('fileId', fileId);
req.sdk.files.getReadStream(fileId, null, (err, stream) => {
if (err) {
console.log('error', err);
return res.status(500).send(err);
}
res.type('application/octet-stream');
console.log('stream', stream);
return res.status(200).send(stream);
});
}
Will ^^ work, or do you need to do something like:
export function downloadFile(req, res) {
const fileId = req.params.fileId;
console.log('fileId', fileId);
req.sdk.files.getReadStream(fileId, null, (err, stream) => {
if (err) {
console.log('error', err);
return res.status(500).send(err);
}
const buffers = [];
const document = new Buffer();
console.log('stream', stream);
stream.on('data', (chunk) => {
buffers.push(buffer);
})
.on('end', function(){
const finalBuffer = Buffer.concat(buffers);
return res.status(200).send(finalBuffer);
});
});
}
The first example would work if you changed you theoretical line to:
- return res.status(200).send(stream);
+ res.writeHead(200, {header: here})
+ stream.pipe(res);
That's the nicest thing about node stream. The other case would (in essence) work too, but it would accumulate lots of unnecessary memory.
If you'd like to check a working example, here's one I wrote based on scramjet, express and browserify:
https://github.com/MichalCz/scramjet/blob/master/samples/browser/browser.js
Where your streams go from the server to the browser. With minor mods it'll fit your problem.
I want to spider some links and after all tasks done,i want to do something else.
How to track tasks done is hard for me.Hope someone can help.
here is my code:
var urlList=[];
//Ready file lines
lineReader.eachLine('url.txt', function(line) {
console.log('url is :'+line);
urlList.push(line);
}).then(function(){//After Read,begin to proceed each line
console.log('read done!begin collect');
async.each(urlList,function(line){
console.log('begin line :'+line);
//down each url
download(line,function(data,cb){
var $=cheerio.load(data);//load cheerio
var title=$('head>title').text();//get title
console.log('title is '+title);
});
});
//At here i want to track whether all urls has been download,and i can do something else
if(/* allproceed */)
{
console.log('Task all done!Begin Next');
}
});
function download(url, callback) {
http.get(url, function(res) {
var data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on("end", function() {
callback(data);
});
}).on("error", function(e) {
console.log("Got error: " + e.message);
callback(null);
});
}
Hope someone can help me.
Thanks very much.
I've made some fixes to your code, see the result below:
var urlList=[];
//Ready file lines
lineReader.eachLine('url.txt', function(line) {
console.log('url is :'+line);
urlList.push(line);
}).then(function(){//After Read,begin to proceed each line
console.log('read done!begin collect');
async.each(urlList,function(line, callback){
console.log('begin line :'+line);
//down each url
download(line,function(err, data){
if (err) {
return callback(err);
}
var $=cheerio.load(data);//load cheerio
var title=$('head>title').text();//get title
console.log('title is '+title);
callback(null, title);
});
}, function continueHere(err) {
//At here i want to track whether all urls has been download,and i can do something else
console.log('Task all done!Begin Next');
});
});
function download(url, callback) {
http.get(url, function(res) {
var data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on("end", function() {
callback(null, data);
});
}).on("error", function(e) {
console.log("Got error: " + e.message);
callback(e);
});
}
Some things to pay special attention to:
You were already very close to your answer. async.each() is a tool you can use to get the job done, but you were not using correctly yet. The iterator function you pass it, the one that gets called for each item in urlList, takes a callback that you can call if the job for that iteration is done. I added that callback.
async.each() also takes a third argument: The function that gets called when all tasks have completed. In this function you can put the code that continues the rest of your application.
With regards to using callback: a pattern repeated across node.js is that the first argument passed to a callback is always an error, if one exists. If not, that argument is undefined or null. The actual result is passed as the second argument. It's a good idea to follow this pattern. async, for instance, expects you to obey it. If any of the tasks in the async.each() fail (by passing a non-null value as the first argument to the callback), async consideres the entire series failed, and passes that error on to the series callback (in the code above the function continueHere).
One last thing. Though the code above should work, it is mixing promises (signified by the .then() statement) with callbacks. These are two different ways to manage asynchronous code. Though you're free to mix them up if you want, for readability of the code it might help to pick one patter and stick with it ;).
I have the following code:
app.get('/pull-requests', function (request) {
fetchRepos(fetchPullRequests);
app.on('pull-requests:fetched', function (pullRequestsByRepo) {
var html = "";
_.each(pullRequestsByRepo, function (pullRequests) {
html += 'There is <strong>'+ pullRequests.length +'</strong> pending pull request(s) for <strong>'+ pullRequests[0].title +'</strong>:';
html += '<ul>';
_.each(pullRequests, function (pullRequest) {
html += '<li><em>'+ pullRequest.title +'</em> ('+ pullRequest.url +')</li>';
});
html += '</ul>';
});
response.send(html);
});
});
It works fine once. Every second request ends raising an error Can't set headers after they are sent..
EDIT: More code to explicit the logic
function fetchRepos (callback) {
_options.path = '/orgs/'+ app.get('org') +'/repos?client_id='+ app.get('client_id') +'&client_secret='+ app.get('client_secret');
// Fetch the list of repos for a given organisation
var request = https.get(_options, function (res) {
data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on('end', function () {
var repos = JSON.parse(data);
return callback(repos);
});
});
request.on('error', function (error) {
console.log('Problem with request: '+ e);
});
}
function fetchPullRequests (repos) {
var pullRequests = [];
_.each(repos, function (repo, index) {
_options.path = '/repos/'+ app.get('org') +'/'+ repo.name +'/pulls?client_id='+ app.get('client_id') +'&client_secret='+ app.get('client_secret');
var request = https.get(_options, function (res) {
(function () {
var data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on('end', function () {
data = JSON.parse(data);
if (data.length > 0) {
pullRequests.push(data);
}
if (index == (repos.length - 1)) {
app.emit('pull-requests:fetched', pullRequests);
}
});
})();
});
});
}
Your problem is that whenever you call app.on('pull-requests:fetched', …), you add a new listener, meaning that when the second request arrives, it will trigger the first one again.
node then complains because you try reply twice to the first request.
You could fix your immediate issue by calling app.once, which would ensure that the only fires once, but you would still have an issue if 2 requests arrive at the same time.
The correct pattern in this case is to pass a callback to fetchRepos.