For loop in promise.then()? - node.js

I need to iterate between two values and create/touch files (I/O) on each iteration.
I'm using the fs-promise module to do so asynchronously:
const path = require('path');
const fsp = require('fs-promise');
function addPages(startAt, pages, mode) {
let htmlExt = mode.HTML;
let cssExt = mode.CSS;
fsp.readFile(path.join('.', 'templates', 'body.html'), { encoding: 'utf-8' })
.then((content) => {
// return Promise.all(() => {}).then().catch(); // Do this.
for (let i = startAt, endAt = startAt + pages; i < endAt; i++) {
console.log(i);
fsp.writeFile(path.join('.', 'manuscript', `page-${i}`, `style.${cssExt}`), '')
.then(() => { console.log('Yay!') })
.catch(console.log.bind(console));
// fsp.writeFile(path.join('.', 'manuscript', `page-${i}`, `style.${cssExt}`), '')
// .then((i, templateHTML) => {
// fsp.writeFile(path.join('.', 'manuscript', `page-${i}`, `body.${htmlExt}`), content);
// })
// .catch((err) => {
// console.log.bind(console);
// });
}
})
.catch((err) => {
if (err) return error('Couldn\'t create pages', err);
});
Now I did read that Promises.all([Array of promises]) is the way to go for looping inside the then() scope, but the question is why/how?
I'm unable to wrap my head around why the for-loop doesn't execute before the context moves out of the promised then() scope, and then how should I get to the expected outcome.

const path = require('path');
const fsp = require('fs-promise');
function addPages(startAt, pages, mode) {
let htmlExt = mode.HTML;
let cssExt = mode.CSS;
return fsp.readFile(path.join('.', 'templates', 'body.html'), { encoding: 'utf-8' })
.then((content) => {
var pendingWrites = [];
for (let i = startAt, endAt = startAt + pages; i < endAt; i++) {
let filename = path.join('.', 'manuscript', `page-${i}`, `style.${cssExt}`);
let thisWrite = fsp.writeFile(filename, '');
pendingWrites.push(thisWrite);
}
return Promise.all(pendingWrites);
})
.catch((err) => {
// either fully recover from the error or rethrow
console.log("Could not add pages: ", err);
throw err;
});
}
As elaborated in the comments, resist the temptation to introduce none-functional .catch() handlers into your promise chain.
Non-functional means in this case: It does not recover from the error and does not rethrow the error. A catch handler that does not throw marks an error as handled, i.e. it returns a resolved promise, not a rejected one. This makes proper error handling later in the promise chain impossible. It's bad practice and unhelpful.
If you want to log the error, log it and rethrow it. If you have fully recovered from the error and subsequent code is unimpeded, don't rethrow.

Related

Mongoose promise never gets to .then()

I am using q and I have multiple mongoose .exec() promises that never gets to the .then() part of the code, so never allow the q to resolve. Can't figure out why it never comes back.
var defer = q.defer();
var promises = [];
console.log('Exams:', exams.length);
for (var e=0; e<exams.length; e++) {
console.log('Exams:', exams[e]._id);
var newPromise = Pupilexam.find({ _exam: exams[e]._id }).populate('_user').exec()
.then((pupils) => {
console.log("Adding pupils", exams[e]._id);
exams[e].pupils = pupils;
resolve(exams[e]);
})
.catch((err) => {
reject(err);
});
console.log(typeof newPromise);
promises.push(newPromise);
console.log("Promised pushed");
}
q.all(promises).then(function(data){
console.log("q'd all");
defer.resolve(res.status(200).json(exams));
});
return defer;
The Pupilexam.find().exec() never reaches the .then() so the promises never resolve and the defer never resolves. Why would the mongoose find not get to the .then()? What have I missed?
*** UPDATE ***
Even using the built in promises, we get the same issue. The Pupilexams.find() call never comes back.
var promises = [];
for (var e=0; e<exams.length; e++) {
console.log('e:', e);
console.log('Exam', exams[e]._id);
var newPromise = Pupilexam.find({ _exam: exams[e]._id }).populate('_user').exec()
.then((pupils) => {
console.log("Adding pupils", exams[e]._id);
exams[e].pupils = pupils;
})
.catch(handleError(res));
promises.push(newPromise);
}
Promise.all(promises).then((exams) => {
console.log(values);
res.status(200).json(exams)
});
With this method I also get a headers error on the call UnhandledPromiseRejectionWarning: Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
** ADDITIONAL CODE REQUESTED **
function handleError(res, statusCode) {
statusCode = statusCode || 500;
return function(err) {
console.log(err.message);
res.status(statusCode).send(err);
};
}
To answer the updated question regarding the Cannot set headers after they are sent to the client error. Looks like you send a response to the client inside your handleError function. Now, if more than one Pupilexam.find call fails, handleError would be invoked twice, resulting in the mentioned error.
You should move the catch-handler down to the Promise.all call:
const promises = [];
for (const exam of exams) {
const newPromise = Pupilexam
.find({ _exam: exam._id }).populate('_user').exec()
.then((pupils) => {
exam.pupils = pupils;
});
promises.push(newPromise);
}
Promise.all(promises)
.then((exams) => {
res.status(200).json(exams);
})
.catch(handleError(res));
I guess that you are indeed returning your promise but you are returning an empty json.
There are 2 problems with your approach:
You are not returning from your then: should return pupils and it is returning undefined
You are logging values that I don't know what it is
.then((pupils) => {
console.log("Adding pupils", exams[e]._id);
exams[e].pupils = pupils;
// you should return something // return pupils
})
promises.push(newPromise);
Promise.all(promises).then((exams) => {
// ['undefined', 'undefined', ...]
console.log(values);
res.status(200).json(exams)
});
Looks like the answer was that on these two lines the exams[e] is not in scope, because by the time the promise comes back the loop has moved on, so e is wrong and gets too high so it was erroring.
console.log("Adding pupils", exams[e]._id);
exams[e].pupils = pupils;
Only discovered that when I read #eol's message about the catch and decided to catch it properly and output.
it is Look from your code.
//(async) function
for (var e of exams) {
try {
const pupils = await Pupilexam.find({ _exam: exams[e]._id
}).populate('_user').exec().lean()
e.pupils = pupils
}catch((err){
//handleError
};
}
res.status(200).json({data: exams})
maybe that will show you how match are you wrong

In Node.js, what's the correct way to handle an error using async / await?

Currently, I have a function as follows:
export async function getFiles (param: any) {
try {
const results = await ... // code to get results from database;
const files = [];
for (let i = 0; i < results.length; i++) {
files.push(results[i]);
}
return files;
}
catch (error) {
console.log(error);
}
}
Separately, I use the library function as follows:
const files = await lib.getFilesAsync(param);
for (const file of files) {
// process file here
}
How do I properly add error handling to this code?
I've tried returning an error in catch:
catch (error) {
console.log(error);
return new Error("getFiles error");
}
but then I get the following error at compile time:
Type 'Error' must have a '[Symbol.iterator]()' method that returns an iterator. (2488)
An async function returns a promise. So, inside that async function you have to make a design decision about whether you want an error to return a rejected promise or some sort of specific error that you can test for. In either case, the caller of the async function needs to do error checking. Here are two possible ways to implement it. It's really your design choice which way to go.
It's somewhat analogous to a synchronous function that can either return a sentinel value like null that must be tested for by the caller when it has an error or it can throw an exception that would be caught by the caller with try/catch.
Caller uses .catch() or try/catch
export async function getFiles (param: any) {
// if any await operation here rejects, the promise returned
// from getFiles() will reject and the caller can .catch() the error
const results = await ... // code to get results from database;
const files = [];
for (let i = 0; i < results.length; i++) {
files.push(results[i]);
}
return files;
}
// usage
getFiles(...).then(files => {
console.log(files);
}).catch(err => {
console.log(err);
})
// or, you could use it this way
try {
let files = await getFiles(...);
console.log(files);
} catch(e) {
console.log(e);
}
Caller checks for error on resolved promise
export async function getFiles (param: any) {
try {
const results = await ... // code to get results from database;
const files = [];
for (let i = 0; i < results.length; i++) {
files.push(results[i]);
}
return files;
}
catch (error) {
console.log(error);
return new Error("getFiles error");
}
}
// usage:
getFiles(...).then(files => {
console.log(files);
if (files instanceof Error) {
// error
} else {
// process files array here
}
});
// or, you could use it this way
let files = await getFiles(...);
console.log(files);
if (files instanceof Error) {
// error
} else {
// process files array here
}
Personally, I prefer to use the rejected promise because it's a lot easier to sequence or chain async operations if you let the promises manage the error propagation for you.

insert document into multiple instance of couch DB in Node JS with all success and failure result in any way possible

i have array of db like
const dbArr = ["http://localhost:5984", "http://xyz_couchdb.com:5984"]
data to insert
let data ={
_id: 324567,
name: Harry,
gerder: male
}
here is the logic i am using nano module
return new Promise((resolve, reject) => {
let res = [];
let rej = [];
let counter = 0;
for(let i = 0; i < dbArr.length ; i++){
dbArr[i].insert(data, (err, body) => {
err ? rej.push(err) : res.push(body)
if(counter === obj.dbArray.length -1){
rej.length ? reject(rej) : resolve(res)
}
counter++;
})
}
})
what can be the best possible way to achieve this using promise or async module or anything.
In the following example, we gotta use Array.map to create one promise for each element of dbArr, then we gotta wait all promises to end using Promise.all. The catch is here so we handle the errors.
function getAll(dbArr) {
return Promise.all(dbArr.map(x => x.insert(data)));
}
getAll(dbArr)
.then((rets) => {
// Handle the returns
// They are in an array
})
.catch((err) => {
// Handle the error
});
EDIT :
Ok after checking out the documentation of node-couchdb (the one I suppose you use) - I saw that the .insert() method do not return a Promise but only a callback.
So we gotta transform the method, so it will return a Promise using util.Promisify()
const {
promisify,
} = require('util');
function getAll(dbArr) {
return Promise.all(dbArr.map(x => promisify(x.insert)(data)));
}
getAll(dbArr)
.then((rets) => {
// Handle the returns
// They are in an array
})
.catch((err) => {
// Handle the error
});

Axios.all, how to configure axios wait time to mitigate hung up?

My application uses an internal webservice for fetching data, i have a job which creates approx 500 requests which getsfired async to complete the fetch operation.
I make use of Axios, by creating an array of axios promises and then resolving them using using Axios.all();
It works fine until some 200 requests but post that i get socket hung up, however on the server side i see the requests are being processed.
How to configure axios to set custom time out, or is it a better idea to splice my promises array and then run them as multiple batches ?
Source code
let getAxiosPromiseArray = (urlList) => {
var axiosArrayofPromise = [];
return new Promise ( (resolve, reject) => {
try {
urlList.forEach ( (URL) => {
axiosArrayofPromise.push(axios.get(URL));
});
resolve(axiosArrayofPromise);
}
catch (err) {
reject("There is a problem getting Axios array of promises " + err);
}
})
}
async function processAxiosPromises (PromiseArray) {
try {
var results = []
results = await axios.all(PromiseArray);
return results;
}
catch(err) {
throw("There was a problem resolving promises array (Axios) " + err);
}
}
getallID().then ( (urlList) => {
return getAxiosPromiseArray(urlList);
}).then( (AxiosPromises) => {
return processAxiosPromises(AxiosPromises);
}).then ((resultData) => {
console.log(resultData);
});
Error
There was a problem resolving promises array (Axios) Error: socket hang up
First, that pair of functions getAxiosPromiseArray() and processAxiosPromises() needs fixing.
Your new Promise() construction is unnecessary. You can simply return Promise.all(arrayofPromise) (or axios.all(...) if you must) and do away with the other function.
Renaming the remaining function to something meaningful, you would end up with eg :
let getData = (urlList) => {
return Promise.all(urlList.map(URL => axios.get(URL)))
.catch(error => {
error.message = "There is a problem getting Axios array of promises " + error.message; // augment the error message ...
throw error; // ... and re-throw the errror.
});
};
And call as follows :
getallID().then(getData)
.then(resultData => {
console.log(resultData);
}).catch(error => {
console.error(error);
});
That will put you on solid ground but, on its own, is unlikely to fix a concurrency problem (if that's what it is), for which the simplest approach is to use Bluebird's Promise.map with the concurrency option.
The caller code can remain the same, just change getData(), as follows:
let getData = (urlList) => {
let concurrency = 10; // play with this value to find a reliable concurrency limit
return Promise.map(urlList, URL => axios.get(URL), {'concurrency': concurrency})
.catch(error => {
error.message = "There is a problem getting Axios array of promises " + error.message;
throw error;
});
};
// where `Promise` is Bluebird.
const axios = require('axios');
const axiosThrottle = require('axios-throttle');
//pass axios object and value of the delay between requests in ms
axiosThrottle.init(axios,200)
const options = {
method: 'GET',
};
const urlList = [
'https://jsonplaceholder.typicode.com/todos/1',
'https://jsonplaceholder.typicode.com/todos/2',
'https://jsonplaceholder.typicode.com/todos/3',
'https://jsonplaceholder.typicode.com/todos/4',
'https://jsonplaceholder.typicode.com/todos/5',
'https://jsonplaceholder.typicode.com/todos/6',
'https://jsonplaceholder.typicode.com/todos/7',
'https://jsonplaceholder.typicode.com/todos/8',
'https://jsonplaceholder.typicode.com/todos/9',
'https://jsonplaceholder.typicode.com/todos/10'
];
const promises = [];
const responseInterceptor = response => {
console.log(response.data);
return response;
};
//add interceptor to work with each response seperately when it is resolved
axios.interceptors.response.use(responseInterceptor, error => {
return Promise.reject(error);
});
for (let index = 0; index < urlList.length; index++) {
options.url = urlList[index];
promises.push(axiosThrottle.getRequestPromise(options, index));
}
//run when all promises are resolved
axios.all(promises).then(responses => {
console.log(responses.length);
});
https://github.com/arekgotfryd/axios-throttle

Async/Await not waiting

I'm running into an issue which I don't fully understand. I feel like there are likely concepts which I haven't grasped, code that could be optimized, and possibly a bug thrown in for good measure.
To greatly simplify the overall flow:
A request is made to an external API
The returned JSON object is parsed and scanned for link references
If any link references are found, additional requests are made to populate/replace link references with real JSON data
Once all link references have been replaced, the original request is returned and used to build content
Here, is the original request (#1):
await Store.get(Constants.Contentful.ENTRY, Contentful[page.file])
Store.get is represented by:
async get(type, id) {
return await this._get(type, id);
}
Which calls:
_get(type, id) {
return new Promise(async (resolve, reject) => {
var data = _json[id] = _json[id] || await this._api(type, id);
console.log(data)
if(isAsset(data)) {
resolve(data);
} else if(isEntry(data)) {
await this._scan(data);
resolve(data);
} else {
const error = 'Response is not entry/asset.';
console.log(error);
reject(error);
}
});
}
The API call is:
_api(type, id) {
return new Promise((resolve, reject) => {
Request('http://cdn.contentful.com/spaces/' + Constants.Contentful.SPACE + '/' + (!type || type === Constants.Contentful.ENTRY ? 'entries' : 'assets') + '/' + id + '?access_token=' + Constants.Contentful.PRODUCTION_TOKEN, (error, response, data) => {
if(error) {
console.log(error);
reject(error);
} else {
data = JSON.parse(data);
if(data.sys.type === Constants.Contentful.ERROR) {
console.log(data);
reject(data);
} else {
resolve(data);
}
}
});
});
}
When an entry is returned, it is scanned:
_scan(data) {
return new Promise((resolve, reject) => {
if(data && data.fields) {
const keys = Object.keys(data.fields);
keys.forEach(async (key, i) => {
var val = data.fields[key];
if(isLink(val)) {
var child = await this._get(val.sys.linkType.toUpperCase(), val.sys.id);
this._inject(data.fields, key, undefined, child);
} else if(isLinkArray(val)) {
var children = await* val.map(async (link) => await this._get(link.sys.linkType.toUpperCase(), link.sys.id));
children.forEach((child, index) => {
this._inject(data.fields, key, index, child);
});
} else {
await new Promise((resolve) => setTimeout(resolve, 0));
}
if(i === keys.length - 1) {
resolve();
}
});
} else {
const error = 'Required data is unavailable.';
console.log(error);
reject(error);
}
});
}
If link references are found, additional requests are made and then the resulting JSON is injected into the original JSON in place of the reference:
_inject(fields, key, index, data) {
if(isNaN(index)) {
fields[key] = data;
} else {
fields[key][index] = data;
}
}
Notice, I'm using async, await, and Promise's I believe in their intended manor. What ends up happening: The calls for referenced data (gets resulting of _scan) end up occurring after the original request is returned. This ends up providing incomplete data to the content template.
Additional information concerning my build setup:
npm#2.14.2
node#4.0.0
webpack#1.12.2
babel#5.8.34
babel-loader#5.4.0
I believe the issue is in your forEach call in _scan. For reference, see this passage in Taming the asynchronous beast with ES7:
However, if you try to use an async function, then you will get a more subtle bug:
let docs = [{}, {}, {}];
// WARNING: this won't work
docs.forEach(async function (doc, i) {
await db.post(doc);
console.log(i);
});
console.log('main loop done');
This will compile, but the problem is that this will print out:
main loop done
0
1
2
What's happening is that the main function is exiting early, because the await is actually in the sub-function. Furthermore, this will execute each promise concurrently, which is not what we intended.
The lesson is: be careful when you have any function inside your async function. The await will only pause its parent function, so check that it's doing what you actually think it's doing.
So each iteration of the forEach call is running concurrently; they're not executing one at a time. As soon as the one that matches the criteria i === keys.length - 1 finishes, the promise is resolved and _scan returns, even though other async functions called via forEach are still executing.
You would need to either change the forEach to a map to return an array of promises, which you can then await* from _scan (if you want to execute them all concurrently and then call something when they're all done), or execute them one-at-a-time if you want them to execute in sequence.
As a side note, if I'm reading them right, some of your async functions can be simplified a bit; remember that, while awaiting an async function call returns a value, simply calling it returns another promise, and returning a value from an async function is the same as returning a promise that resolves to that value in a non-async function. So, for example, _get can be:
async _get(type, id) {
var data = _json[id] = _json[id] || await this._api(type, id);
console.log(data)
if (isAsset(data)) {
return data;
} else if (isEntry(data)) {
await this._scan(data);
return data;
} else {
const error = 'Response is not entry/asset.';
console.log(error);
throw error;
}
}
Similarly, _scan could be (assuming you want the forEach bodies to execute concurrently):
async _scan(data) {
if (data && data.fields) {
const keys = Object.keys(data.fields);
const promises = keys.map(async (key, i) => {
var val = data.fields[key];
if (isLink(val)) {
var child = await this._get(val.sys.linkType.toUpperCase(), val.sys.id);
this._inject(data.fields, key, undefined, child);
} else if (isLinkArray(val)) {
var children = await* val.map(async (link) => await this._get(link.sys.linkType.toUpperCase(), link.sys.id));
children.forEach((child, index) => {
this._inject(data.fields, key, index, child);
});
} else {
await new Promise((resolve) => setTimeout(resolve, 0));
}
});
await* promises;
} else {
const error = 'Required data is unavailable.';
console.log(error);
throw error;
}
}

Resources