I'm working with zoho crm api, two make two api calls, one to get the ID and the other to update a record, the problem is that i have a 60 calls per minute.
I've an array with lots of info that I use to update the records.
Is there any way I can call 60 times per minute to avoid getting blocked? or any other work around?
var censados = [...] <-- big array
for (var i = 0; i < censados.length; i++) {
var elcorreo = censados[i].email;
var elpais = censados[i].country;
var criteria = "(Account Name:"+elcorreo+")"
var criteria = encodeURI(criteria);
var options = {
hostname: 'crm.zoho.com',
port: 443,
path: '/crm/private/json/Potentials/searchRecords?authtoken=apikey&scope=crmapi&newFormat=1&selectColumns=Potentials(POTENTIALID)&criteria=' + criteria,
method: 'GET'
};
https.request(options, function(response) {
var responseData = '';
response.setEncoding('utf8');
response.on('data', function(chunk) {
responseData += chunk;
jsonObject = JSON.parse(chunk);
});
response.once('error', function(err) {
// Some error handling here, e.g.:
res.serverError(err);
});
response.on('end', function() {
try {
//console.log(responseData.response.result.Potentials.row.FL.content)
console.log(jsonObject.response.result)
if (jsonObject.response.result.Potentials.row.FL.content) {
var IdPotential = jsonObject.response.result.Potentials.row.FL.content;
// START UPDATER
var xmlData = '\
<Potentials>\
<row no="1">\
<FL val="PaĆs de Recidencia">'+elpais+'</FL>\
</row>\
</Potentials>';
var xmlData = encodeURI(xmlData);
var options = {
hostname: 'crm.zoho.com',
port: 443,
path: '/crm/private/xml/Potentials/updateRecords?authtoken=apikey&scope=crmapi&id='+IdPotential+'&xmlData='+xmlData,
method: 'POST'
};
https.request(options, function(response) {
response.setEncoding('utf8');
response.on('data', function(chunk) {
responseData += chunk;
//jsonObject = JSON.parse(chunk);
});
response.once('error', function(err) {
// Some error handling here, e.g.:
res.serverError(err);
});
response.on('end', function() {
try {
console.log(responseData)
} catch (e) {
console.log(e)
}
});
}).end();
};
//END UPDATE
} catch (e) {
console.log(e)
}
});
}).end();
}
Seems that your code in not valid. You have to provide APIKEY for your account
Generate Auth Token
API calls limited on your account. As usual 250-500 requests /user depends on your subscription API Limit, so I suggest you use mass update call (version = 4).
You can play with API using Zoho CRM Console
Related
I use NodeJS and request lib to make some request to an API.
I understand now that all requests are async and so it doesn't "wait" the result of the GET call, so the index of my Loop is always the same.
I was wondering if there was any simple way (without any lib) to wait for the response of the request call ?
For now, my code is this :
for (var i in entries) {
var entryId = entries[i]['id'];
var options = {
url: 'https://api.com/'+ entryId +'/get/status',
method: 'GET',
headers: {
'Authorization': auth
}
};
console.log(' ENTRY ID > '+ entryId);
request(options, function(error, response, body) {
var response = JSON.parse(body);
if (response.status.code == 200) {
var id = response.status.id;
var data = [];
data['id'] = id;
data = JSON.stringify(data);
// show first entryId of loop
console.log(' > MY ID : '+ id + ' - '+ entryId);
options = {
host: hostname,
port: 80,
path: '/path/function2',
method: 'PUT'
};
var post = http.request(options, function(json) {
var body = '';
json.on('data', function(d) {
body += d;
});
json.on('end', function() {
console.log('> DONE');
});
}).on('error', function(e) {
console.log(e);
});
post.write(data);
post.end();
}
});
}
You are looking for async/await.
Wrap your logic inside an async function, then you can await for the promise to resolve.
const request = require('request-promise')
async function foo (a) {
for (i in a)
try {
let a = await request('localhost:8080/')
// a contains your response data.
} catch (e) {
console.error(e)
}
}
foo([/*data*/])
Just use the promisified version of request module.
You also can use Promises to wait for your async code to finish.
function asyncCode(msg, cb){
setTimeout(function() {cb(msg);}, 1000);
}
var p1 = new Promises(function(resolve){
asyncCode("my asyncCode is running", resolve);
});
p1.then(function(msg) {
console.log(msg);
}).then(function() {
console.log("Hey I'm next");
});
console.log("SyncCode, Async code are waiting until I'm finished");
I'm trying to execute the following code inside AWS Lambda which only makes a POST http request to an ElasticSearch.
The problem I'm facing is that it seems the nodejs request has a read timeout and the response is almost always cut and an error is thrown. I've checked that the problem is not related with AWS Lambda timeout which is set to 10 seconds and the code throws an error in less than a second.
As you can see, I've tried to put a timeout to 5secs but I think that's a connection timeout and not a read timeout.
What am I doing wrong?
var http = require('http');
exports.handler = (event, context, callback) => {
var options = {
hostname: '172.31.40.10',
port: 9200,
path: '/articles/es/_search?_source=reference',
method: 'POST',
headers: {
'Content-Type': 'application/json',
}
};
var req = http.request(options, function(res) {
res.setEncoding('utf8');
res.on('data', function (body) {
var parsed = JSON.parse(body);
var b = [];
for (var i = 0; i < parsed.hits.hits.length; i++) {
b.push(parsed.hits.hits[i]._source.reference);
}
var response = {
statusCode: '200',
body: JSON.stringify(b),
headers: {
'Content-Type': 'application/json',
}
};
callback(null, response);
});
});
req.on('error', function(e) {
callback(new Error('fallo'));
});
req.setTimeout(5000, function() {req.abort;})
req.on('socket', function (socket) {
socket.setTimeout(5000);
socket.on('timeout', function() {
req.abort();
});
});
req.write(MY_QUERY_HERE);
req.end();
};
I think you should let the stream of incoming data finish before performing any data manipulation.
Example :
var http = require('http');
//var _ = require('underscore');
function MyPostRequest(callback) {
var options = {
hostname:'172.31.40.10',
port:9200,
path:'/articles/es/_search?_source=reference',
method:'POST',
headers:{'Content-Type':'application/json'}
};
http.request(options, function(res) {
var tmpstore = ''; //temp. data storage
//:Store the continuous incoming data stream
res.on('data', function(d){tmpstore += d;});
//:Data reception is done, use it now...
res.on('end', function() {
var parsed = JSON.parse(tmpstore); var b = [];
for (var i = 0; i < parsed.hits.hits.length; i++) {
b.push(parsed.hits.hits[i]._source.reference);
}
/* //I suggest using underscore module :
_.each(parsed.hits.hits,function(element, index, list){
b.push(element._source.reference);
});
*/
var response = {
statusCode:'200',
body:JSON.stringify(b),
headers:{'Content-Type':'application/json'}
};
callback(null, response);
});
//:Response contained an error
res.on('error', function(e){/*error handling*/callback(e,null);});
});
}
I am running the drill query using node.js.It is taking more than 500ms for retrieving only 10 records.I am using http request method of nodejs.
function executeService(params) {
return new Promise((resolve, reject)=> {
try {
var serverOptions = {
hostname: "127.0.0.1",
port: 8047,
path: "/query.json",
method: "POST",
headers: {
'Content-Type': 'application/json',
}
};
var http = require("http");
var req = http.request(serverOptions, function (res) {
if (params && params.response) {
res.setEncoding('binary');
} else {
res.setEncoding('utf8');
}
var body = '';
res.on('data', function (chunk) {
body += chunk;
});
res.on('end', function () {
resolve(body);
});
});
req.on('error', function (err) {
reject(err);
});
req.write(params);
req.end();
} catch (err) {
reject(err);
}
})
}
var params = '{"query": "select * from mongo.school.student limit 10", "queryType": "SQL"}';
executeService(params).then(function (res) {
console.log("res>>>>>>>>>>>" + res);
})
Is it right way of running drill query from nodejs?if yes how can i decrease the query time?
There is probably not much you can do about this on your end. If this is still an issue for you get in touch with the Drill team on the mailing lists https://drill.apache.org/mailinglists/ so that they can create a ticket and work on improving it.
I'm pretty new at node, so I may be going about this all wrong (if I am, don't be afraid to say) - I'd like to be able to create an object made of data from several different rest servers and return the final object to my calling function. For each rest api I have a function that looks a bit like this:
jobs.js
exports.get_users_jobs = function(options, onResult) {
var user = options.params.user;
console.log("Get jobs for user: " + user);
var optionsget = {
host: config.jobs_rest_host,
port: config.jobs_rest_port,
path: "/jobs3/user/" + user,
method: "GET",
headers: {
'Content-Type': 'application/json'
}
};
var reqGet = http.request(optionsget, function(res) {
var output = '';
res.setEncoding('utf-8');
res.on('data', function(chunk) {
output += chunk;
});
res.on('end', function() {
var obj = JSON.parse(output);
onResult.send(res.statusCode, obj);
});
});
reqGet.end();
reqGet.on('error', function(e) {
console.error('error: ' + e.message);
});
};
That works fine when I'm calling it directly from the browser, but now I'd like to call get_users_jobs from another function, take the data and plonk that into my uber object. So I've created something a bit like this (I've only put jobs in there for now, but soon there will be other variables)
users.js
var jobs = require('./jobs.js');
function User (jobs) {
this.jobs = jobs;
}
User.prototype.jobs = null;
/* lots of stuff that I don't think matters */
jobs_data = jobs.get_cb_users_jobs(req, res);
var u = User(jobs_data);
/* do lots of stuff with u like prepare reports etc */
But all that happens here is my jobs data is output in the browser (which makes sense since I have onResult.send(blah) - how can I construct my get_users_jobs function to just return the data from the rest call?
Thanks in advance to anyone that can help!
Instead of passing a response to your get_users_jobs, pass it a callback as a second parameter, something like this:
exports.get_users_jobs = function(options, cb) {
//...
//no changes
//...
var reqGet = http.request(optionsget, function(res) {
var output = '';
res.setEncoding('utf-8');
res.on('data', function(chunk) {
output += chunk;
});
res.on('end', function() {
var obj = JSON.parse(output);
cb(null, {
status: res.statusCode,
data: output
});
});
});
reqGet.end();
reqGet.on('error', function(e) {
cb(err);
});
};
and then in users.js:
jobs.get_cb_users_jobs(req, function(err, result) {
if (err) {
//handle the error
} else {
//do whatever you want with the result
}
});
Notice the call to callback inside res.on('data', ...) and res.on('error', ...) - this is a typical node.js callback pattern. You did the same thing, but passed the control to response instead of your own function.
If you still need to pass the result directly to response, add a wrapper function that passes
function(err, response) {
if (err) {
console.error('error: ' + e.message);
} else {
onResult.send(res.statusCode, obj);
}
}
as callback parameter to get_users_jobs
I need to iterate on an array, for each item I apply an operation by calling an HTTP call.
The difficulty is that i need to syncronize this process in order to call a callback after the loop (containing the array after all the operation executed by the HTTP call).
Let's consider this short example:
function customName(name, callback) {
var option = {
host:'changename.com',
path: '/'+name,
headers: { 'Content-Type': 'application/json' },
port: 80,
method:'POST'
};
var req = http.request(option, function(res) {
var output = "";
res.on('data', function (chunk) {
output += chunk;
});
res.on('end', function() {
var obj = JSON.parse(output);
callback(obj.res);
});
});
req.on('error', function(e) {
console.error(e.message);
});
req.end();
}
function changeNames(OldNames, callback) {
var Res = [];
for (name in OldNames) {
customName(OldNames[name], function(new_name) { Res.push(new_name); });
});
callback(Res);
}
var Names = ['toto', 'tata', 'titi'];
changeNames(Names, function(Names) {
//...
});
Here the loop is over before the first HTTP call, so the Res array is empty.
How can we synchronize this execution?
I know it's not very good to synchronize treatments in nodejs. Do you think it would be better to communicate the names one by one with the client and not building an array?
You can use async.map for that. You pass it your list of names, it will run the getOriginalName function (which you mistakenly called customName, I think) for each name and gather the result, and in the end it will call a function with an array of results:
var http = require('http');
var async = require('async');
function getOriginalName(name, callback) {
var option = {
host:'changename.com',
path: '/'+name,
headers: { 'Content-Type': 'application/json' },
port: 80,
method:'POST'
};
var req = http.request(option, function(res) {
var output = "";
res.on('data', function (chunk) {
output += chunk;
});
res.on('end', function() {
var obj = JSON.parse(output);
callback(null, obj.res);
});
});
req.on('error', function(e) {
callback(e);
});
req.end();
}
function changeNames(OldNames, callback) {
async.map(OldNames, getOriginalName, callback);
}
var Names = ['toto', 'tata', 'titi'];
changeNames(Names, function(err, NewNames) {
console.log('N', NewNames);
});