This is an extended question from Throttling requests to 3rd part Apis with Node . You can see my full code in that post if you want to see the sub functions and a little backstory of what im trying to do.
What i need to do is delay api request to an api. I am hitting the server max per second. I am trying to use Promise.delay but it doesnt seem to hold anything. This is how im using it.
function getAllRegions(req, res){
getAllRegionsHrefs().then(function (hrefs){
var chunks = _.chunk(hrefs, 25);
return Promise.map(chunks, function(chunk) {
return Promise.map(chunk, getRegion).then(function (getRegionResults){
for(var item in getRegionResults) {
Promise.map(getRegionResults[item].constellations, getConstellationInfo).then(function (constellationInfo) {
var chunks = _.chunk(constellationInfo, 150);
return Promise.map(chunks, function (chunk) {
return Promise.map(chunk, getSystem).delay(20000);
})
}).delay(20000);
}
}).delay(200000);
});
});
}
Now this will make just about 9k api requests and it builds a universe based off regions, constellations and systems. Is there a better way of throttling the nested requests? At each nest, the number of calls exponentially goes up.
Update
Still doesnt seem to want to wait. Did I mess up the nesting again? I added 1 more function in there that was needed for the flow. Also the chunking seemed to wrap a double array so I took it out.
function getAllRegions(req, res) {
getAllRegionsHrefs().then(function (hrefs) {
return Promise.mapSeries(hrefs, function (href) {
getRegion(href).then(function (regions) {
return Promise.mapSeries(regions.constellations, function (constellation) {
getConstellationInfo(constellation).then(function (constellationInfo) {
return Promise.map(constellationInfo, getSystem).delay(15000);
});
});
});
});
});
}
Error (per request):
Unhandled rejection RequestError: Error: connect ETIMEDOUT
Promise.map starts all requests in parallel, so your code runs like:
requests -> delay
requests -> delay
requests -> delay
....
But the desired behavior should be sequential:
requests -> delay -> requests -> delay -> requests -> delay ...
each chunk of requests should wait for delay of previous chunk.
You could achieve this by chaining promises:
startRequests(chunks[0])
.then(function() {
return startRequests(chunks[1])
}).then(function() {
return startRequests(chunks[2])
}) // ...
or use mapSeries
function getAllRegions(req, res){
getAllRegionsHrefs().then(function(hrefs) {
return Promise.mapSeries(hrefs, function(href) {
getRegion(href).then(function(region) {
var chunks = _.chunk(region.constellations, 150);
return Promise.mapSeries(chunks, function (chunk) {
return Promise.map(chunk, getSystem).delay(10000);
});
});
});
});
}
only innermost requests are chunked, others are started one by one.
edit: as Bergi said, returning result promise is required for mapSeries (and other promise utilities) to work. The first promise can also be returned for handling error or chaining more promises.
function getAllRegions(req, res){
return getAllRegionsHrefs().then(function(hrefs) {
//...
getAllRegions().then(systems => {
//next step
}).catch(err => {
//handle errors
})
Related
I'm trying to make several function calls which will aggregate information and then act upon that info. Some calls make HTTP requests, which are slow. Others are much faster.
All my function calls work and build the necessary data, but I need to wait on the HTTP request before moving forward.
I've tried promises, async/await etc.
const http = require('http');
async function operation() {
return new Promise(function(resolve, reject) {
const url = 'http://www.google.com';
http.get(url, (resp) => {
let data = '';
resp.on('data', (chunk) => {
data += chunk;
});
resp.on('end', () => {
resolve(resp.statusCode);
});
}).on("error", (err) => {
reject(err);
});
})
}
async function app() {
var a = await operation()
console.log('a: ', a) // this returns 200
}
function test() {
console.log('test: ','THIS SHOULD COME AFTER')
}
app()
test()
I need the result of the test function to come after app. I'm seeing "THIS SHOULD COME AFTER" print before the 200
As I mentioned in the comments, app is an asynchronous function whereas test is synchronous. This means if you call app(); test(); test will always complete before app resolves. However, keep in mind that Promises will eventually resolve or reject.
This means, to call the synchronous function after the asynchronous, you either need to call test within app, like so:
async function app() {
//add try-catch to handle rejection of promise
try {
var a = await operation()
console.log('a: ', a) // this returns 200
// now you can call test after app
test();
} catch (err) {
//handle error case to trigger rejection of promise
throw new Error(err)
}
}
or, remember that Promises are thenable:
app()
.then(someReturnedValue => test())
.catch(err => /*handle errors*/)
You mention in the comments you have several app -like functions that will be aggregated before test. You could consider using Promise.all, which takes in an array of Promises and will return an array of data corresponding to each resolved Promise or catch an error if any of the Promises reject.
Promise.all([app, app1, app2])
.then(arrayOfReturnedValues => test())
.catch(err => /*handle errors*/)
var urlArr = {
//ex url_for_site0 = 'https://www.google.com'
url_for_site0,
url_for_site1,
url_for_site2,
url_for_site3,
...
url_for_site50
};
urlArr.forEach(function(url, index) {
request(url, function(err, res, body) {
if(err) console.log(index+err);
else console.log(index+" success");
});
});
I got different unordered results and errors everytime I execute my app.
Example:
1 error : socket hang out
21 error : socket hang out
17 error : socket hang out
1 error : socket hang out
19 error : socket hang out
...(omission)
5 success
15 success
45 success
50 success
11 success
37 success
Everytime I get the results, they are in a different order.
Is this becasue I called too much request simultaneously?
When I request one by one, there's no error.
Example:
request(url_for_site0)
and restart program
request(url_for_site1)
and restart program
request(url_for_site2)
...
NodeJS events are all handled in a single pool and has a non-blocking nature. You can refer to the illustration below.
It happened to me once when I try to call multiple SQL queries. When I did it using C#, there is no problem at all. However, NodeJS gave me a similar behaviour to yours.
I am not sure if this is the best solution for the problem. However, here is how I fixed my problem with my SQL calls. I used the async waterfall function so that the whole process becomes synchronous. Each function will be run one by one with its return value piped to the next function. So, you can even do more stuffs. The usage of this library is not very staightforward, you can refer to this link to better help you understand how async waterfall works, then suit it to fit your solution.
https://gist.github.com/dineshsprabu/e6c1cf8f2ca100a8f5ae
Here is how I visualize your solution will roughly looks like:
var async = require('async');
async.waterfall(
[
function(callback) {
function_urlArr(url, index, function (returnVal) {
//Do something with the returnVal
callback(null, returnVal);
});
},
function(returnVal, callback) {
//the returnVal from first function gets passed here synchronously
function_urlArr(url2, index2, function (returnVal) {
//Do something with the returnVal
callback(null, returnVal);
});
},
function(returnVal, callback) {
//and so on ...
}
],
function (err) {
//console.log(err);
});
//define your function and enable callback
//you will need to include an extra third argument to receive the callback
function urlArr(url, index, callback) {
//your code
return callback(returnValue)
}
This is happening because of non-blocking nature of javascript.
If you want to make it happen one by one in order, you can use Async functions.
Socket hung up error may be because of the url you hit didn't respond anything after accepting the request.
You might have issue with non-blocking nature of loop forEach.
You can combine Promise and aysnc/await to make it blocking. Here is one way of handling it.
const request = require('request');
let urlArr = [
'https://localhost:9090',
'https://www.google.com',
'https://www.ebay.com',
'https://www.amazon.com',
];
//Creating promise for the requests.
let fetchPromise = function(url) {
return new Promise((resolve, reject) => {
request(url, (err, res, body) => {
if (err)
reject(Error(url + ' cannot be fetched'));
else
resolve(body);
});
}
);
};
//creating a blocking function
let fetchAllData = async function(urls) {
for (url of urls) { //using modern for loop instead for forEach
try {
data = await fetchPromise(url); // waiting until promise is resolved.
console.log('Recieved :' + data.length + 'bytes from ' + url);
} catch(e) {
console.log('Error :' + e); // catching error in case promise is rejected
}
}
};
//calling the function
fetchAllData(urlArr);
/*
// In case you want to wait until all promises are resolved.
// Then use Promise.all, however it will fail if any of the promise is rejected.
// One way to handle it would be to modify function fetchPromise such that it
// always resolves.
Promise
.all(urlArr.map(url => fetchPromise(url)))
.then(data => console.log(data))
.catch(err => console.log(err));
*/
I hope it helps.
I'm trying to update a tool that was created a while ago which uses nodejs (I am not a JS developer, so I'm trying to piece the code together) and am getting stuck at the last hurdle.
The new functionality will take in a swagger .json definition, compare the endpoints against the matching API Gateway on the AWS Service, using the 'aws-sdk' SDK for JS and then updates the Gateway accordingly.
The code runs fine on a small definition file (about 15 endpoints) but as soon as I give it a bigger one, I start getting tons of TooManyRequestsException errors.
I understand that this is due to my calls to the API Gateway service being too quick and a delay / pause is needed. This is where I am stuck
I have tried adding;
a delay() to each promise being returned
running a setTimeout() in each promise
adding a delay to the Promise.all and Promise.mapSeries
Currently my code loops through each endpoint within the definition and then adds the response of each promise to a promise array:
promises.push(getMethodResponse(resourceMethod, value, apiName, resourcePath));
Once the loop is finished I run this:
return Promise.all(promises)
.catch((err) => {
winston.error(err);
})
I have tried the same with a mapSeries (no luck).
It looks like the functions within the (getMethodResponse promise) are run immediately and hence, no matter what type of delay I add they all still just execute. My suspicious is that the I need to make (getMethodResponse) return a function and then use mapSeries but I cant get this to work either.
Code I tried:
Wrapped the getMethodResponse in this:
return function(value){}
Then added this after the loop (and within the loop - no difference):
Promise.mapSeries(function (promises) {
return 'a'();
}).then(function (results) {
console.log('result', results);
});
Also tried many other suggestions:
Here
Here
Any suggestions please?
EDIT
As request, some additional code to try pin-point the issue.
The code currently working with a small set of endpoints (within the Swagger file):
module.exports = (apiName, externalUrl) => {
return getSwaggerFromHttp(externalUrl)
.then((swagger) => {
let paths = swagger.paths;
let resourcePath = '';
let resourceMethod = '';
let promises = [];
_.each(paths, function (value, key) {
resourcePath = key;
_.each(value, function (value, key) {
resourceMethod = key;
let statusList = [];
_.each(value.responses, function (value, key) {
if (key >= 200 && key <= 204) {
statusList.push(key)
}
});
_.each(statusList, function (value, key) { //Only for 200-201 range
//Working with small set
promises.push(getMethodResponse(resourceMethod, value, apiName, resourcePath))
});
});
});
//Working with small set
return Promise.all(promises)
.catch((err) => {
winston.error(err);
})
})
.catch((err) => {
winston.error(err);
});
};
I have since tried adding this in place of the return Promise.all():
Promise.map(promises, function() {
// Promise.map awaits for returned promises as well.
console.log('X');
},{concurrency: 5})
.then(function() {
return console.log("y");
});
Results of this spits out something like this (it's the same for each endpoint, there are many):
Error: TooManyRequestsException: Too Many Requests
X
Error: TooManyRequestsException: Too Many Requests
X
Error: TooManyRequestsException: Too Many Requests
The AWS SDK is being called 3 times within each promise, the functions of which are (get initiated from the getMethodResponse() function):
apigateway.getRestApisAsync()
return apigateway.getResourcesAsync(resourceParams)
apigateway.getMethodAsync(params, function (err, data) {}
The typical AWS SDK documentation state that this is typical behaviour for when too many consecutive calls are made (too fast). I've had a similar issue in the past which was resolved by simply adding a .delay(500) into the code being called;
Something like:
return apigateway.updateModelAsync(updateModelParams)
.tap(() => logger.verbose(`Updated model ${updatedModel.name}`))
.tap(() => bar.tick())
.delay(500)
EDIT #2
I thought in the name of thorough-ness, to include my entire .js file.
'use strict';
const AWS = require('aws-sdk');
let apigateway, lambda;
const Promise = require('bluebird');
const R = require('ramda');
const logger = require('../logger');
const config = require('../config/default');
const helpers = require('../library/helpers');
const winston = require('winston');
const request = require('request');
const _ = require('lodash');
const region = 'ap-southeast-2';
const methodLib = require('../aws/methods');
const emitter = require('../library/emitter');
emitter.on('updateRegion', (region) => {
region = region;
AWS.config.update({ region: region });
apigateway = new AWS.APIGateway({ apiVersion: '2015-07-09' });
Promise.promisifyAll(apigateway);
});
function getSwaggerFromHttp(externalUrl) {
return new Promise((resolve, reject) => {
request.get({
url: externalUrl,
header: {
"content-type": "application/json"
}
}, (err, res, body) => {
if (err) {
winston.error(err);
reject(err);
}
let result = JSON.parse(body);
resolve(result);
})
});
}
/*
Deletes a method response
*/
function deleteMethodResponse(httpMethod, resourceId, restApiId, statusCode, resourcePath) {
let methodResponseParams = {
httpMethod: httpMethod,
resourceId: resourceId,
restApiId: restApiId,
statusCode: statusCode
};
return apigateway.deleteMethodResponseAsync(methodResponseParams)
.delay(1200)
.tap(() => logger.verbose(`Method response ${statusCode} deleted for path: ${resourcePath}`))
.error((e) => {
return console.log(`Error deleting Method Response ${httpMethod} not found on resource path: ${resourcePath} (resourceId: ${resourceId})`); // an error occurred
logger.error('Error: ' + e.stack)
});
}
/*
Deletes an integration response
*/
function deleteIntegrationResponse(httpMethod, resourceId, restApiId, statusCode, resourcePath) {
let methodResponseParams = {
httpMethod: httpMethod,
resourceId: resourceId,
restApiId: restApiId,
statusCode: statusCode
};
return apigateway.deleteIntegrationResponseAsync(methodResponseParams)
.delay(1200)
.tap(() => logger.verbose(`Integration response ${statusCode} deleted for path ${resourcePath}`))
.error((e) => {
return console.log(`Error deleting Integration Response ${httpMethod} not found on resource path: ${resourcePath} (resourceId: ${resourceId})`); // an error occurred
logger.error('Error: ' + e.stack)
});
}
/*
Get Resource
*/
function getMethodResponse(httpMethod, statusCode, apiName, resourcePath) {
let params = {
httpMethod: httpMethod.toUpperCase(),
resourceId: '',
restApiId: ''
}
return getResourceDetails(apiName, resourcePath)
.error((e) => {
logger.unimportant('Error: ' + e.stack)
})
.then((result) => {
//Only run the comparrison of models if the resourceId (from the url passed in) is found within the AWS Gateway
if (result) {
params.resourceId = result.resourceId
params.restApiId = result.apiId
var awsMethodResponses = [];
try {
apigateway.getMethodAsync(params, function (err, data) {
if (err) {
if (err.statusCode == 404) {
return console.log(`Method ${params.httpMethod} not found on resource path: ${resourcePath} (resourceId: ${params.resourceId})`); // an error occurred
}
console.log(err, err.stack); // an error occurred
}
else {
if (data) {
_.each(data.methodResponses, function (value, key) {
if (key >= 200 && key <= 204) {
awsMethodResponses.push(key)
}
});
awsMethodResponses = _.pull(awsMethodResponses, statusCode); //List of items not found within the Gateway - to be removed.
_.each(awsMethodResponses, function (value, key) {
if (data.methodResponses[value].responseModels) {
var existingModel = data.methodResponses[value].responseModels['application/json']; //Check if there is currently a model attached to the resource / method about to be deleted
methodLib.updateResponseAssociation(params.httpMethod, params.resourceId, params.restApiId, statusCode, existingModel); //Associate this model to the same resource / method, under the new response status
}
deleteMethodResponse(params.httpMethod, params.resourceId, params.restApiId, value, resourcePath)
.delay(1200)
.done();
deleteIntegrationResponse(params.httpMethod, params.resourceId, params.restApiId, value, resourcePath)
.delay(1200)
.done();
})
}
}
})
.catch(err => {
console.log(`Error: ${err}`);
});
}
catch (e) {
console.log(`getMethodAsync failed, Error: ${e}`);
}
}
})
};
function getResourceDetails(apiName, resourcePath) {
let resourceExpr = new RegExp(resourcePath + '$', 'i');
let result = {
apiId: '',
resourceId: '',
path: ''
}
return helpers.apiByName(apiName, AWS.config.region)
.delay(1200)
.then(apiId => {
result.apiId = apiId;
let resourceParams = {
restApiId: apiId,
limit: config.awsGetResourceLimit,
};
return apigateway.getResourcesAsync(resourceParams)
})
.then(R.prop('items'))
.filter(R.pipe(R.prop('path'), R.test(resourceExpr)))
.tap(helpers.handleNotFound('resource'))
.then(R.head)
.then([R.prop('path'), R.prop('id')])
.then(returnedObj => {
if (returnedObj.id) {
result.path = returnedObj.path;
result.resourceId = returnedObj.id;
logger.unimportant(`ApiId: ${result.apiId} | ResourceId: ${result.resourceId} | Path: ${result.path}`);
return result;
}
})
.catch(err => {
console.log(`Error: ${err} on API: ${apiName} Resource: ${resourcePath}`);
});
};
function delay(t) {
return new Promise(function(resolve) {
setTimeout(resolve, t)
});
}
module.exports = (apiName, externalUrl) => {
return getSwaggerFromHttp(externalUrl)
.then((swagger) => {
let paths = swagger.paths;
let resourcePath = '';
let resourceMethod = '';
let promises = [];
_.each(paths, function (value, key) {
resourcePath = key;
_.each(value, function (value, key) {
resourceMethod = key;
let statusList = [];
_.each(value.responses, function (value, key) {
if (key >= 200 && key <= 204) {
statusList.push(key)
}
});
_.each(statusList, function (value, key) { //Only for 200-201 range
promises.push(getMethodResponse(resourceMethod, value, apiName, resourcePath))
});
});
});
//Working with small set
return Promise.all(promises)
.catch((err) => {
winston.error(err);
})
})
.catch((err) => {
winston.error(err);
});
};
You apparently have a misunderstanding about what Promise.all() and Promise.map() do.
All Promise.all() does is keep track of a whole array of promises to tell you when the async operations they represent are all done (or one returns an error). When you pass it an array of promises (as you are doing), ALL those async operations have already been started in parallel. So, if you're trying to limit how many async operations are in flight at the same time, it's already too late at that point. So, Promise.all() by itself won't help you control how many are running at once in any way.
I've also noticed since, that it seems this line promises.push(getMethodResponse(resourceMethod, value, apiName, resourcePath)) is actually executing promises and not simply adding them to the array. Seems like the last Promise.all() doesn't actually do much.
Yep, when you execute promises.push(getMethodResponse()), you are calling getMethodResponse() immediately right then. That starts the async operation immediately. That function then returns a promise and Promise.all() will monitor that promise (along with all the other ones you put in the array) to tell you when they are all done. That's all Promise.all() does. It monitors operations you've already started. To keep the max number of requests in flight at the same time below some threshold, you have to NOT START the async operations all at once like you are doing. Promise.all() does not do that for you.
For Bluebird's Promise.map() to help you at all, you have to pass it an array of DATA, not promises. When you pass it an array of promises that represent async operations that you've already started, it can do no more than Promise.all() can do. But, if you pass it an array of data and a callback function that can then initiate an async operation for each element of data in the array, THEN it can help you when you use the concurrency option.
Your code is pretty complex so I will illustrate with a simple web scraper that wants to read a large list of URLs, but for memory considerations, only process 20 at a time.
const rp = require('request-promise');
let urls = [...]; // large array of URLs to process
Promise.map(urls, function(url) {
return rp(url).then(function(data) {
// process scraped data here
return someValue;
});
}, {concurrency: 20}).then(function(results) {
// process array of results here
}).catch(function(err) {
// error here
});
In this example, hopefully you can see that an array of data items are being passed into Promise.map() (not an array of promises). This, then allows Promise.map() to manage how/when the array is processed and, in this case, it will use the concurrency: 20 setting to make sure that no more than 20 requests are in flight at the same time.
Your effort to use Promise.map() was passing an array of promises, which does not help you since the promises represent async operations that have already been started:
Promise.map(promises, function() {
...
});
Then, in addition, you really need to figure out what exactly causes the TooManyRequestsException error by either reading documentation on the target API that exhibits this or by doing a whole bunch of testing because there can be a variety of things that might cause this and without knowing exactly what you need to control, it just takes a lot of wild guesses to try to figure out what might work. The most common things that an API might detect are:
Simultaneous requests from the same account or source.
Requests per unit of time from the same account or source (such as request per second).
The concurrency operation in Promise.map() will easily help you with the first option, but will not necessarily help you with the second option as you can limit to a low number of simultaneous requests and still exceed a requests per second limit. The second needs some actual time control. Inserting delay() statements will sometimes work, but even that is not a very direct method of managing it and will either lead to inconsistent control (something that works sometimes, but not other times) or sub-optimal control (limiting yourself to something far below what you can actually use).
To manage to a request per second limit, you need some actual time control with a rate limiting library or actual rate limiting logic in your own code.
Here's an example of a scheme for limiting the number of requests per second you are making: How to Manage Requests to Stay Below Rate Limiting.
Currently i have a dashboard that lists a bunch of records in a table. users can select 1 record and hit execute and i send a AJAX POST request to my routes middleware which executes 3 functions inside async.waterfall and returns a 200 response back to my client if everything works correctly. this async waterfall usually takes about 40-55 seconds to finish executing (fn_1,fn_2 and fn_3) and works perfectly fine.
router.post('/url', function(req, res, next) {
try {
async.waterfall([
fn_1,
fn_2,
fn_3
], function (err, body) {
res.writeHead(200, {'Content-Type': 'application/json'});
res.end(JSON.stringify({"error":err, "result":body}));
});
function fn_1(callback) {
callback(null, response);
}
function fn_1(result, callback) {
callback(err, result);
}
function fn_2(result, callback) {
callback(null, result);
}
}
catch (err){
console.log(err)
}
});
But, If i were to give provision for users to select MULTIPLE records and send that as an array back to my route middleware. how can i execute multiple async.waterfall methods for each item in the array in parallel
i can run a loop and execute the waterfall inside the loop but it again will wait for each item to complete and only then start the next iteration. this is not what i want.
is this doable in node / express . whats the easiest way to achieve this ? or are there modules/plugins that can help solve this case ?
Here is an abbreviated version of your code and how it could be changed to suit your needs. If none of your calls need data from any of the other calls, you can just run them in parallel with promises and use Promise.all to capture the result.
function fn_1(callback) {
// See function fn_2 for structure
}
function fn_1(result, callback) {
// See function fn_2 for structure
}
function fn_2(result, callback) {
return new Promise(resolve, reject => {
resolve(result)
})
.then(d => {
// Instead of callbacks, use a "then"
// block/statement.
//
// Do something with D here.
})
}
Promise.all([fn_1(), fn_2(), fn_3()])
.then(v => {
// Do somthing with v;
})
.catch(e => {
// Do something with e
})
I tend to advocate the use of native Promises over libraries like async, however, since your already using async...
You can use parallel and map each item in the array to a waterfall handler e.g.
async.parallel(
myArray.map(val => cb => async.waterfall(fn_1, fn_2, fn_3, cb)
, (err, results) => {
// return consolidated response
})
You would need to rework your waterfall handlers to not send a response but instead just propagate any errors.
It should also be noted that parallel is only useful if you are infact running I/O bound code, if the code is anything like your example then you won't really gain anything from using parallel over something like async.each
So I'm new to Node.js and Im just wondering if the way I have my code setup makes sense. Im coming from a Java background so the nested callback structure is new. I have a Node program that runs a bunch of code that I broke down into different methods. The thing is that the methods need to be called in order. My code has this structure right now:
functionOne(data, callback(err) {
functionTwo(data, callback(err) {
functionThree(data, callback(err) {
functionFour(data, callback(err) {
//Code
});
});
});
});
This is very minimalistic, but is this structure ok? With Java, I'd take the return values of all the methods, then just pass them to the next function. From my understanding so far, the Java approach I just mentioned is one of the main things that Node.js was trying to eliminate. But anyway... Does that structure look ok, and is that how its intended to look? Just want to be sure that I'm not making any major errors with Node in general. Thanks!
Your code structure looks fine if you work with callback pattern.
But if you're interested in make your code cleaner and readable you would like to use Promises in your asynchronous function, so instead of pass a callback to your functions you could do something like this :
function asyncFunction (data){
return new Promise(function(resolve, reject){
// Do something with data
// Here you can call reject(error) to throw an error
resolve();
});
}
And instead of nested function callbacks you can call then method of Promise.
asyncFunction(data)
.then(function(){
// Promise resolved
// Something has been done with data
});
With Promises you can also execute async fuctions in parallel :
Promise.all([asyncFunctionA(data), asyncFunctionB(data), asyncFunctionC(data)])
.then(function(){...});
EDIT
If you need to pass values of one function to another, your code should look like this :
asyncFunctionA(data)
.then(function(dataA){
return asyncFunctionB(dataA);
})
.then(function(dataB){
return asyncFunctionC(dataB);
})
.then(function(dataC){
// ...
});
You should try to use promises to avoid your callback hell, so it could be something like these...
const Q = require('q'); // you can do a research for this module.
var myModule = {};
myModule.functionOne = (params) => {
const deferred = Q.defer(); // wait for this to complete
// body function
deferred.resolve(data); // this would be the result of this function
return deferred.promise; // data is the output on your function
}
myModule.functionTwo = (params) => {
const deferred = Q.defer(); // wait for this to complete
// body function
deferred.resolve(data); // this would be the result of this function
return deferred.promise; // data is the output on your function
}
myModule.doAll = (params) => {
myModule.functionOne(params)
.then((outputFunctionOne) => {
// this is called after functionOne ends
return myModule.functionTwo(outputFunctionOne);
})
.then((outputFunctionTwo) => {
// this is called after function 2 ends
if (outputFunctionTwo.success) {
// if everything ok, resolve the promise with the final output
deferred.resolve(outputFunctionTwo);
} else {
// reject the promise with an error message
deferred.reject('error');
}
})
.fail((err) => {
// this is call if the promise is rejected or an exception is thrown
console.log(err); // TODO: Error handling
})
.done();
}
module.exports = myModule;
You can Chain as many promises as you want really easily, that way you get rid of the callback hell. Best part, you can do promises on Javascript or Node.js
Reference Link https://github.com/kriskowal/q
Hope this helps
Most of the other answers give Promise/A as the answer to your callback woes. This is correct, and will work for you. However I'd like to give you another option, if you are willing to drop javascript as your working language.
Introducing Iced Coffee, a branch of the CoffeeScript project.
With Iced Coffee you would write:
await functionOne data, defer err
await functionTwo data, defer err2
await functionThree data, defer err3
//etc
This then compiles to the CoffeeScript:
functionOne data, (err) ->
functionTwo data, (err2) ->
functionThree data, (err3) ->
//etc
Which then compiles to your Javascript.
functionOne(data, callback(err) {
functionTwo(data, callback(err2) {
functionThree(data, callback(err3) {
//etc
});
});
});