Return object from REST call - node.js

I'm pretty new at node, so I may be going about this all wrong (if I am, don't be afraid to say) - I'd like to be able to create an object made of data from several different rest servers and return the final object to my calling function. For each rest api I have a function that looks a bit like this:
jobs.js
exports.get_users_jobs = function(options, onResult) {
var user = options.params.user;
console.log("Get jobs for user: " + user);
var optionsget = {
host: config.jobs_rest_host,
port: config.jobs_rest_port,
path: "/jobs3/user/" + user,
method: "GET",
headers: {
'Content-Type': 'application/json'
}
};
var reqGet = http.request(optionsget, function(res) {
var output = '';
res.setEncoding('utf-8');
res.on('data', function(chunk) {
output += chunk;
});
res.on('end', function() {
var obj = JSON.parse(output);
onResult.send(res.statusCode, obj);
});
});
reqGet.end();
reqGet.on('error', function(e) {
console.error('error: ' + e.message);
});
};
That works fine when I'm calling it directly from the browser, but now I'd like to call get_users_jobs from another function, take the data and plonk that into my uber object. So I've created something a bit like this (I've only put jobs in there for now, but soon there will be other variables)
users.js
var jobs = require('./jobs.js');
function User (jobs) {
this.jobs = jobs;
}
User.prototype.jobs = null;
/* lots of stuff that I don't think matters */
jobs_data = jobs.get_cb_users_jobs(req, res);
var u = User(jobs_data);
/* do lots of stuff with u like prepare reports etc */
But all that happens here is my jobs data is output in the browser (which makes sense since I have onResult.send(blah) - how can I construct my get_users_jobs function to just return the data from the rest call?
Thanks in advance to anyone that can help!

Instead of passing a response to your get_users_jobs, pass it a callback as a second parameter, something like this:
exports.get_users_jobs = function(options, cb) {
//...
//no changes
//...
var reqGet = http.request(optionsget, function(res) {
var output = '';
res.setEncoding('utf-8');
res.on('data', function(chunk) {
output += chunk;
});
res.on('end', function() {
var obj = JSON.parse(output);
cb(null, {
status: res.statusCode,
data: output
});
});
});
reqGet.end();
reqGet.on('error', function(e) {
cb(err);
});
};
and then in users.js:
jobs.get_cb_users_jobs(req, function(err, result) {
if (err) {
//handle the error
} else {
//do whatever you want with the result
}
});
Notice the call to callback inside res.on('data', ...) and res.on('error', ...) - this is a typical node.js callback pattern. You did the same thing, but passed the control to response instead of your own function.
If you still need to pass the result directly to response, add a wrapper function that passes
function(err, response) {
if (err) {
console.error('error: ' + e.message);
} else {
onResult.send(res.statusCode, obj);
}
}
as callback parameter to get_users_jobs

Related

How I should use promises for db + http calls in node js

I need to implement system which
Get data from parent collection.
Check if particular key found in redis
If no then do a http call and get json data then set cache
if yes then get data from cache
save data into child collection for parent id.
I have working solution using callback something like that.
MongoClient.connect(dsn).then(function(db) {
parentcollection.findOne({"_id" : new ObjectId(pid)}, function(err, data) {
var redis = require("redis"),
client = redis.createClient();
client.on("error", function (err) {
console.log("Error " + err);
});
// If not set
client.get(cacheKey, function(err, data) {
// data is null if the key doesn't exist
if(err || data === null) {
var options = {
host: HOST,
port: 80,
path: URI
};
var req = http.get(options, function(res) {
res.setEncoding('utf8');
res.on('data', function (chunk) {
body += chunk;
//console.log('CHUNK: ' + chunk);
});
res.on('end', function () {
data = JSON.parse(body);
// Get childdata After process of data
childcollection.save(childdata, {w:1}, function(cerr, inserted) {
db.close();
});
});
});
} else {
// Get childdata from cache data
childcollection.save(childdata, {w:1}, function(cerr, inserted) {
db.close();
});
}
});
});
I want to use promise (native one, not external ones like bluebird / request ) instead of callbacks. I checkout manuals and thinking if I need to implement like that
var promise1 = new Promise((resolve, reject) => {
MongoClient.connect(dsn).then(function(db) {
parentcollection.findOne({"_id" : new ObjectId(pid)}, function(err, data) {
});
}}.then(function(data){
var promise2 = new Promise((resolve, reject) => {
var redis = require("redis"),
client = redis.createClient();
client.on("error", function (err) {
console.log("Error " + err);
});
// If not set
client.get(cacheKey, function(err, data) {
// data is null if the key doesn't exist
if(err || data === null) {
var options = {
host: HOST,
port: 80,
path: URI
};
var promise3 = new Promise((resolve, reject) => {
var req = http.get(options, function(res) {
res.setEncoding('utf8');
res.on('data', function (chunk) {
body += chunk;
//console.log('CHUNK: ' + chunk);
});
res.on('end', function () {
data = JSON.parse(body);
// Get childdata After process of data
});
})
}).then(function(data){
childcollection.save(childdata, {w:1}, function(cerr, inserted) {
db.close();
});
});
} else {
// Get childdata from cache data
childcollection.save(childdata, {w:1}, function(cerr, inserted) {
db.close();
});
}
});
}}.then(function(data){
});
});
Which look like as dirty as callback hell or any better approach which not used promises like above ?
One issue is that you never call the resolve functions provided to the promise constructor callbacks. Without calling them, promises never resolve.
I would suggest creating those new promises in separate, reusable functions. On the other hand, some MongoDb methods return promises already when you don't provide the callback argument.
You could do it like below.
// Two promisifying functions:
function promiseClientData(client, key) {
return new Promise(function (resolve, reject) {
return client.get(key, function (err, data) {
return err ? reject(err) : resolve(data); // fulfull the promise
});
});
}
function promiseHttpData(options) {
return new Promise(function (resolve, reject) {
return http.get(options, function(res) {
var body = ''; // You need to initialise this...
res.setEncoding('utf8');
res.on('data', function (chunk) {
body += chunk;
//console.log('CHUNK: ' + chunk);
});
res.on('end', function () {
data = JSON.parse(body);
resolve(data); // fulfull the promise
});
);
});
}
// Declare the db variable outside of the promise chain to avoid
// having to pass it through
var db;
// The actual promise chain:
MongoClient.connect(dsn).then(function (dbArg) {
db = dbArg;
return parentcollection.findOne({"_id" : new ObjectId(pid)}); // returns a promise
}).then(function (data) {
var redis = require("redis"),
client = redis.createClient();
client.on("error", function (err) {
console.log("Error " + err);
});
// Get somehow cacheKey...
// ...
return promiseClientData(client, cacheKey);
}).then(function (data) {
// If not set: data is null if the key doesn't exist
// Throwing an error will trigger the next `catch` callback
if(data === null) throw "key does not exist";
return data;
}).catch(function (err) {
var options = {
host: HOST,
port: 80,
path: URI
};
return promiseHttpData(options);
}).then(function (data) {
// Get childdata by processing data (in either case)
// ....
// ....
return childcollection.save(childdata, {w:1}); // returns a promise
}).then(function () {
db.close();
});
I assume that the promises returned by MongoDb are fully compliant. In doubt, you can turn them into native JavaScript promises by calling Promise.resolve() on them, for instance like this:
return Promise.resolve(parentcollection.findOne({"_id" : new ObjectId(pid)}));
or:
return Promise.resolve(childcollection.save(childdata, {w:1}));

Limit 60 requests by minute in for loop with node JS

I'm working with zoho crm api, two make two api calls, one to get the ID and the other to update a record, the problem is that i have a 60 calls per minute.
I've an array with lots of info that I use to update the records.
Is there any way I can call 60 times per minute to avoid getting blocked? or any other work around?
var censados = [...] <-- big array
for (var i = 0; i < censados.length; i++) {
var elcorreo = censados[i].email;
var elpais = censados[i].country;
var criteria = "(Account Name:"+elcorreo+")"
var criteria = encodeURI(criteria);
var options = {
hostname: 'crm.zoho.com',
port: 443,
path: '/crm/private/json/Potentials/searchRecords?authtoken=apikey&scope=crmapi&newFormat=1&selectColumns=Potentials(POTENTIALID)&criteria=' + criteria,
method: 'GET'
};
https.request(options, function(response) {
var responseData = '';
response.setEncoding('utf8');
response.on('data', function(chunk) {
responseData += chunk;
jsonObject = JSON.parse(chunk);
});
response.once('error', function(err) {
// Some error handling here, e.g.:
res.serverError(err);
});
response.on('end', function() {
try {
//console.log(responseData.response.result.Potentials.row.FL.content)
console.log(jsonObject.response.result)
if (jsonObject.response.result.Potentials.row.FL.content) {
var IdPotential = jsonObject.response.result.Potentials.row.FL.content;
// START UPDATER
var xmlData = '\
<Potentials>\
<row no="1">\
<FL val="PaĆ­s de Recidencia">'+elpais+'</FL>\
</row>\
</Potentials>';
var xmlData = encodeURI(xmlData);
var options = {
hostname: 'crm.zoho.com',
port: 443,
path: '/crm/private/xml/Potentials/updateRecords?authtoken=apikey&scope=crmapi&id='+IdPotential+'&xmlData='+xmlData,
method: 'POST'
};
https.request(options, function(response) {
response.setEncoding('utf8');
response.on('data', function(chunk) {
responseData += chunk;
//jsonObject = JSON.parse(chunk);
});
response.once('error', function(err) {
// Some error handling here, e.g.:
res.serverError(err);
});
response.on('end', function() {
try {
console.log(responseData)
} catch (e) {
console.log(e)
}
});
}).end();
};
//END UPDATE
} catch (e) {
console.log(e)
}
});
}).end();
}
Seems that your code in not valid. You have to provide APIKEY for your account
Generate Auth Token
API calls limited on your account. As usual 250-500 requests /user depends on your subscription API Limit, so I suggest you use mass update call (version = 4).
You can play with API using Zoho CRM Console

Using Q library for HTTP api response testing in nodejs

how to use Q to make it wait until previous response has come from the server.
What I am looking to do here is compare the response from test server and production server for the same request.
I get the responses back from both the servers, but unable to compare them since the assert statement is executed before the response comes back.
Any one know what I am doing wrong. heres the code.
var Q = require('q');
var path='';
var prodResponse = '';
var tstReponse = '';
Q.fcall(readFile())
.then(secondFunction())
.then(thirdFunction())
.then(function(){
console.log("prodResponse: "+prodResponse);
console.log("tstResponse: "+tstResponse);
assert.strictEqual(prodResponse, tstResponse)
})
.catch(function(){
console.log('error occurred');
})
.done();
function readFile(){
fs.readFile('hostname.json', function (err, data) {
if (err) return console.error(err);
path = JSON.parse(data);
return JSON.parse(data);
});
}
function secondFunction(){
var prodOptions = {
hostname: 'somehostname.com',
port: 80,
path: "/path?"+path.path,
method: 'POST',
headers: {
'Content-Type': 'application/json;charset=UTF-8'
},
auth : ''
};
return http.request(prodOptions, function(res) {
console.log('Prod');
res.setEncoding('utf8');
res.on('data', function (chunk) {
prodResponse = chunk;
return chunk;
});
res.on('end', function() {
console.log('No more data in response.');
})
}).on('error', function(e) {
console.log('problem with request: ' + e.message);
}).end();
}
function thirdFunction(){
// same a second, only difference is the response http.
}
There is multiple errors in your code
Q.fcall(readFile())
Your q variable is q and not Q. So this line will crash because Q is undefined (javascript is case sensitive).
Then, readFile doesn't return any promise (in fact, it returns nothing). So the q library can't use anything to wait the end of any asynchronous work. The then callbacks will be fired immediatly.
You can use Q.ninvoke to make your readFile function return a promise, and you can use Q.defer to create and return a promise from your secondFunction:
var Q = require('q');
var path='';
var prodResponse = [];
var tstReponse = '';
readFile()
.then(secondFunction())
.then(thirdFunction())
.then(function(){
console.log("prodResponse: "+prodResponse);
console.log("tstResponse: "+tstResponse);
assert.strictEqual(prodResponse, tstResponse)
})
.catch(function(){
console.log('error occurred');
})
.done();
function readFile(){
return Q.ninvoke(fs, 'readFile', 'hostname.json').then(function (data) {
path = JSON.parse(data);
return path;
}, function (err) {
console.error(err);
});
}
function secondFunction(){
var prodOptions = {
hostname: 'somehostname.com',
port: 80,
path: "/path?"+path.path,
method: 'POST',
headers: {
'Content-Type': 'application/json;charset=UTF-8'
},
auth : ''
};
var defer = Q.defer();
var chunks = [];
http.request(prodOptions, function(res) {
console.log('Prod');
res.setEncoding('utf8');
res.on('data', function (chunk) {
chunks.push(chunk);
});
res.on('end', function() {
console.log('No more data in response.');
prodResponse = chunks.join('');
defer.resolve(prodResponse);
})
}).on('error', function(e) {
console.log('problem with request: ' + e.message);
defer.reject(e);
}).end();
return defer.promise;
}
function thirdFunction(){
// same a second, only difference is the response http.
}

Node.js http.request in loop

Am using a function to save data. Using the http.request option. It is working fine. If I call the same function in a loop, some of the data not saving in database. Also getting the parse error message for some response.
How Can I call the function of http.request in a loop?
for (var i = 1; i <= 23; i++) {
turn_timer(i);
}
function turn_timer(nos) {
try {
var str = "num=" + nos;
var len = str.length;
win_settings.headers = {
'Content-length': len,
'Content-Type': 'application/x-www-form-urlencoded'
}
var request = http.request(win_settings, function(response) {
response.on('data', function(data) {
});
response.on('error', function(err) {
});
response.on('end', function() {
});
});
request.on('error', function(err) {
});
request.write(str + "\0");
request.end();
} catch (err) {
console.log(err);
}
}
Check the scope of your variable that stores message because async function call may overwrite it.
I believe your problem is because you are using a for loop, instead of going with a more asynchronous approach. Here is a quick attempt to solve your problem. I have omitted some of your code, as it seemed to be incomplete. I have left the important parts and added a few things based on an answer to a similar question.
var http = require('http'),
async = require('async');
function turn_timer(n, callback) {
var var str = "num=" + n,
len = str.length,
req;
win_settings.headers = {
'Content-length': len,
'Content-Type': 'application/x-www-form-urlencoded'
};
req = http.request(options, function(response) {
...
callback(null);
});
req.on('error', function(err) {
...
callback(err);
});
req.end();
}
async.timesSeries(23, function (n, next) {
turn_timer(options, function(err) {
next(err);
});
});
For more information, you can read more about async.timesSeries here: https://github.com/caolan/async#timesseriesn-callback

How can you synchronize this process using nodejs?

I need to iterate on an array, for each item I apply an operation by calling an HTTP call.
The difficulty is that i need to syncronize this process in order to call a callback after the loop (containing the array after all the operation executed by the HTTP call).
Let's consider this short example:
function customName(name, callback) {
var option = {
host:'changename.com',
path: '/'+name,
headers: { 'Content-Type': 'application/json' },
port: 80,
method:'POST'
};
var req = http.request(option, function(res) {
var output = "";
res.on('data', function (chunk) {
output += chunk;
});
res.on('end', function() {
var obj = JSON.parse(output);
callback(obj.res);
});
});
req.on('error', function(e) {
console.error(e.message);
});
req.end();
}
function changeNames(OldNames, callback) {
var Res = [];
for (name in OldNames) {
customName(OldNames[name], function(new_name) { Res.push(new_name); });
});
callback(Res);
}
var Names = ['toto', 'tata', 'titi'];
changeNames(Names, function(Names) {
//...
});
Here the loop is over before the first HTTP call, so the Res array is empty.
How can we synchronize this execution?
I know it's not very good to synchronize treatments in nodejs. Do you think it would be better to communicate the names one by one with the client and not building an array?
You can use async.map for that. You pass it your list of names, it will run the getOriginalName function (which you mistakenly called customName, I think) for each name and gather the result, and in the end it will call a function with an array of results:
var http = require('http');
var async = require('async');
function getOriginalName(name, callback) {
var option = {
host:'changename.com',
path: '/'+name,
headers: { 'Content-Type': 'application/json' },
port: 80,
method:'POST'
};
var req = http.request(option, function(res) {
var output = "";
res.on('data', function (chunk) {
output += chunk;
});
res.on('end', function() {
var obj = JSON.parse(output);
callback(null, obj.res);
});
});
req.on('error', function(e) {
callback(e);
});
req.end();
}
function changeNames(OldNames, callback) {
async.map(OldNames, getOriginalName, callback);
}
var Names = ['toto', 'tata', 'titi'];
changeNames(Names, function(err, NewNames) {
console.log('N', NewNames);
});

Resources