In Node, I am using bluebird's Promise.all to execute Promises asynchronously. I don't want to wait for all Promises to resolve before I send results back to the client; rather, I'd like to stream the result of each Promise as soon as it resolves. Is this possible? My research indicates that it is not, but I thought it worth reaching out to the community.
Here is the code (with no attempt to implement streaming):
async bulkExecution(req, res) {
try {
const { assets } = req.body;
let thePromises = _.map(assets, (asset) => {
return onePromise(asset);
});
// I want to stream each Promise result to client as it resolves
let results = await Promise.all(thePromises);
return res.status(200).send(results);
} catch (err) {
return res.status(500).send(err);
}
}
I think you'd have to iterate over all the promises and do a .then() on each promise and write a partial response then close the response after all the promises complete. Something like this should work. The order of the responses may be different than the order of the promise array. For instance, my example below will return the second promise first.
let res = {
write: (data) => {console.log(data)},
end: () => {}
};
let promise1 = new Promise(function(resolve, reject) {
setTimeout(function() {
resolve('foo');
}, 500);
});
let promise2 = new Promise(function(resolve, reject) {
setTimeout(function() {
resolve('foo');
}, 300);
});
let promises = [promise1, promise2];
let count = 0;
promises.forEach((promise) => {
promise.then((data) => {
res.write(data);
count++;
if (count >= promises.length){
res.end();
}
})
})
Modified to write in JSON format.
let res = {
write: (data) => {
console.log(data)
},
end: () => {}
};
let promise1 = new Promise(function(resolve, reject) {
setTimeout(function() {
resolve({foo:"bar"});
}, 500);
});
let promise2 = new Promise(function(resolve, reject) {
setTimeout(function() {
resolve({bar:"foo"});
}, 300);
});
let promises = [promise1, promise2];
let count = 0;
res.write("[");
promises.forEach((promise) => {
promise.then((data) => {
res.write(JSON.stringify(data));
count++;
if (count >= promises.length) {
res.write("]");
res.end();
} else {
res.write(",");
}
})
})
Streaming out JSON like this isn't common, but could be used to reduce the amount of memory the server uses. That's because you don't need to create the full response in memory before sending the response. The client will still need enough memory to read the entire object into memory.
Try doing .then( data => data.json) on the promise like so
let thePromises = _.map(assets, (asset) => {
return onePromise(asset).then(data => data.json);
});
Related
Here I am trying to retrieve objects and push them into the array. For some reason there is only one record being pushed into the file when it should contain more objects. Can you help me out with this or let me know where I am going wrong? Here is my code:
exports.createjson = (req, res, next) => {
try {
var myPromise = () => {
// ...
};
var callMyPromise = async () => {
const responsearray = [];
var result = await myPromise();
return new Promise((resolve, reject) => {
result.forEach(element => {
NewsModel.findOne({ _id: element.newsId }).exec(
async (err, result) => {
if (err) {
throw err;
}
reportsModel
.findOne({
$and: [
{ userId: req.query.userId },
{ newsId: element.newsId }
]
})
.exec((err, newsResult) => {
if (err) {
throw err;
}
// console.log(newsResult);
var response = {
newsId: element.newsId,
title: result.title,
collection: result.group,
belivibalityIndex: element.belivibalityIndex,
priorknowledge: element.priorknowledge,
readingTime: element.readingTime,
userId: element.userId,
comment: element.comment,
report: newsResult !== null ? newsResult.feedback : null
};
// #all object pushed and displayed in console
responsearray.push(response);
console.log(response);
console.log(responsearray.length);
// let data = JSON.stringify(responsearray);
// #here is the issue // fs.writeFileSync("abc.json", data, null, null, flag = 'a');
return responsearray;
});
}
);
});
});
};
callMyPromise().then(function(responsearray) {
res.json(responsearray);
});
} catch (error) {
next(error);
}
};
You're not quite using Promises properly. For example, you create a Promise object but never call the resolve/reject functions. In the forEach loop you are calling functions that use callbacks and when that work is done you can resolve the promise you're wrapping it in.
Also you're calling res.json and writing the file (though it's commented out) while you're in the forEach loop. That means res.json will get called multiple times, which is not allowed. You can only have one response from an http request.
I restructured the code so that it collects each promise in an array of Promises then waits for all of them to resolve. Only after all of the work is done, we can write the file and call res.json to complete the http request.
exports.createjson = async (req, res, next) => {
const responsearray = [];
var elements = await myPromise();
var promises = []; // collect a bunch of promises to wait on
elements.forEach(element => {
// one promise per element that resolves when response is on the array
var promise = new Promise(function(resolve, reject) {
NewsModel.findOne({ _id: element.newsId }).exec((err, result) => {
if (err) { return reject(err); }
reportsModel
.findOne({
$and: [{ userId: req.query.userId }, { newsId: element.newsId }]
})
.exec((err, newsResult) => {
if (err) { return reject(err); }
var response = { /* response body */ };
responsearray.push(response);
console.log(response);
console.log(responsearray.length);
// complete the promise now that the response is on the array
return resolve();
});
});
});
// collect each promise in an array so we can wait for them all
promises.push(promise);
});
// wait for all the work to complete
await Promise.all(promises).catch(err => next(err));
// write the responsearray to a file as json
let data = JSON.stringify(responsearray);
fs.writeFileSync("abc.json", data);
return res.json(responsearray);
};
I also removed the try/catch block since the Promise allows you to use .catch in a cleaner way. It simplifies the nesting which makes it easier to read.
The key takeaway here is the general structure:
// get your array to work with
var array = await someFunction()
var manyPromises = []
var manyResults = []
// for each thing in the array create a promise
array.forEach( thing => {
manyPromises.push( new Promise((resolve,reject) => {
doSomething(thing, (err, result) => {
if (err) return reject(err);
// store the results in the array and resolve the promise
manyResults.push(result)
return resolve();
});
});
});
// wait for all promises in manyPromises to complete
await Promise.all(manyPromises).catch(err => return next(err));
// now the many promises are done and manyResponses are ready
saveResponsesToFile(JSON.stringify(manyResponses))
return res.json(manyReponses)
How can I know when all promises inside the for loop finished executing? Is there a better way to do it?
for (let index = 0; index < array.length; index++) {
request(array[index], function (error, response, body) {
promise1().then(result1 => {
if (result1 !== 0) {
promise2().then(result2 => {
promise3(result2);
}
} else {
promise3(result1);
}
});
});
}
console.log('all promises finished');
This is made easier by transforming the code to use async/await to map the items into promises, then using the standard Promise.all() function to wait for all of the promises to resolve:
// promisified request()
const requestP = item =>
new Promise((resolve, reject) => {
request(item, (error, response, body) => {
if (error) return reject(error);
resolve({ response, body });
});
});
const processItem = async item => {
const { response, body } = await requestP(item);
const result1 = await promise1(/* this probably uses response or body? */);
if (result1 !== 0) {
const result2 = await promise2();
return await promise3(result2);
} else {
return await promise3(result1);
}
};
const promises = array.map(processItem);
Promise.all(promises).then(() => {
console.log("all promises finished");
});
You can keep adding the promises you create in a list inside the for loop.
Once you are done with your loop, you can register a Promise.all method.
let list_of_promises = [];
for(...) {
list_of_promises.push(
new Promise(function(resolve, reject)) {
...
}
)
}
Promise.all(list_of_promises).then(function(data)) {
...
}.catch(function(err) {
...
});
If all your promises get resolved, .then method will be called.
Note: Even if one of your promise fails, it will go inside the .catch method.
My Codes below;
I've a then-catch block. My responseArray is a global variable. i got response from functionName function; but i can't use result out of then block. How can i use then response out of block?
My Codes below;
I've a then-catch block. My responseArray is a global variable. i got response from functionName function; but i can't use result out of then block. How can i use then response out of block?
module.exports = {
foo1: function(param){
return new Promise((resolve,reject) => {
var result = //some code here
resolve(result);
});
},
foo2: function(param){
return new Promise((resolve,reject) => {
this.foo1('abc').then(function(res){
let response = {
'item':'ok',
'result':res.some_field
};
console.log(response); // its ok here.
responseArray.push(response); //its ok here too
}).catch(err =>{
console.log(err);
reject(err);
});
console.log(responseArray); //nothing in array here
resolve(responseArray);
});
}
};
First thing to remember is that promises are asynchronous. Promises are doing exactly what they say, you are essentially signing a contract (promise) that you will get your data (or error) but not synchronously, but at some time in the future when the computations have finished.
In order to access your responseArray you will need to resolve your foo2 promise (inside of .then) and continue the promise chain by calling it, i.e.
module.exports = {
foo1: function(param){
return new Promise((resolve,reject) => {
var result = //some code here
resolve(result);
});
},
foo2: function(param){
return new Promise((resolve,reject) => {
this.foo1('abc').then(function(res){
let response = {
'item':'ok',
'result':res.some_field
};
console.log(response); // its ok here.
responseArray.push(response); //its ok here too
resolve(responseArray) // resolve the promise inside of .then
}).catch(err =>{
console.log(err);
reject(err);
});
});
}
};
foo2('someValue').then(response => {
console.log(response) // this will be your array
})
Also, as a side note, ensure you are not falling into the trap of the promise constructor anti-pattern. This is where you unnecessarily turn synchronous code into asynchronous code just for the sake of using "promises"
For example, a valid use of a promise would be to convert a callback, like so:
const getFile = filename => {
return new Promise((resolve, reject) => {
fs.readFile(filename, 'utf8', (err, data) => {
if (err) reject(err)
resolve(data)
})
})
}
whereas this is unnecessary:
const printData = data => {
return new Promise((resolve, reject) => {
resolve(console.log(data))
})
}
vs
const printData = data => {
console.log(data)
}
Read more here: What is the explicit promise construction antipattern and how do I avoid it?
I am trying to return Array of tokens stored in Firebase, and I am using 'promise'.
function getUsersTokens() {
let dbRef = db.ref('/system/users');
let result = new Promise((resolve, reject) => {
dbRef.once('value', (snap) => {
let tokens = [];
snap.forEach(child => {
if(child.Status != "occupied"){
helper.getToken(child.key,db).then(function(token){
tokens.push(token);
});
}
});
resolve(tokens);
}, (err) => {
reject(err);
});
});
return result;
}
and this is the 'getToken' method from the "helper" module.
exports.getToken=function(uid,db){
return db.ref(`/Tokens/${uid}`).once('value').then(function(result){
return result.val();
});
};
The problem is that every time I push token into the array it all works fine, but when exit getUsersTokens() the array gets empty.
thanks for the help.
The issue is that your result promise is resolving too early because the helper.getToken() is non-blocking, so your forEach will finish running before all of the getToken() calls have finished pushing their token into tokens.
To make things a little easier, you can split your result promise into two promises. The first promise will be in charge of getting snap. The second promise will be in charge of iterating through snap to produce an array of tokens:
function getUsersTokens() {
let dbRef = db.ref('/system/users');
let result = new Promise((resolve, reject) => {
dbRef.once('value', (snap) => {
resolve(snap);
}, (err) => {
reject(err);
});
});
return result.then(snap => {
let prommiseArr = [];
snap.forEach(child => {
if(child.Status != "occupied"){
let p = helper.getToken(child.key,db);
promiseArr.push(p);
}
});
return Promise.all(promiseArr); // resolves to array of tokens
});
}
Promise.all takes in an array of promises, and resolves when all of those promises have also resolved. the promise returned by getUsersToken will ultimately contain an array of tokens, because each promise of promiseArr resolves to a token.
It happens because the promise is resolved with the token array before getToken() resolves itself. You see an empty array because your handler runs before the tokens arrive.
You need to wait on that before resolving. Like this:
function getUsersTokens() {
let dbRef = db.ref('/system/users');
return new Promise((resolve, reject) => {
dbRef.once('value', (snap) => {
const tokensPromise = snap
.filter(child => child.Status !== "occupied")
.map(child => helper.getToken(child.key, db));
resolve(Promise.all(tokensPromise));
});
});
}
Promise.all as pointed out by #André Werlang and #Christian Santos it perfect here is an example using reduce way
function getUsersTokens() {
let dbRef = db.ref('/system/users');
let result = new Promise((resolve, reject) => {
dbRef.once('value', (snap) => {
snap.reduce((chain, child) => {
return chain.then(array => {
return helper.getToken(child.key,db).then(function(token){
return array.push(token);
});
});
}, Promise.resolve([])).then(tokens=>{
resolve(tokens);
});
}, (err) => {
reject(err);
});
});
return result;
}
I am fairly new with using Bluebird promises. I was trying to use them over an emitter. However, I am stuck on how to handle errors.
I have a stream object which is the emitter. Code is as below -
return new Promise((resolve, reject) => {
var onDocFunc = doc => {
//JSON.parse('*');
// some logic goes in here to construct setStmt
bulk.find(query).upsert().update({$set: setStmt});
count++;
if (count % bulkSize == 0) {
stream.pause();
var execute = Promise.promisify(bulk.execute);
execute().catch(() => {}).then(() => {
stream.resume();
});
}
};
stream.on('data', onDocFunc);
stream.on('end', () => {
JSON.parse('*'); // how to catch errors that happen here??
var boundResolve = resolve.bind(this, {count: count});
if (count % bulkSize != 0) {
Promise.promisify(bulk.execute)().then(boundResolve).catch(boundResolve);
}
else {
boundResolve();
}
});
stream.on('error', err => {
reject(err);
});
})
I want to know what is the recommended way to catch an error which occurs inside the callback of the end event handler? Right now if any error occurs, the NodeJS application crashes with uncaughtException: Unexpected token *
Don't mix application logic into the promisification of the event emitter. Such code (that can throw etc) should always go in then callbacks. In your case:
var execute = Promise.promisify(bulk.execute);
return new Promise((resolve, reject) => {
stream.on('data', onDocFunc); // not sure what this does
stream.on('end', resolve);
stream.on('error', reject);
}).then(() => {
JSON.parse('*'); // exceptions that happen here are caught implicitly!
var result = {count: count};
if (count % bulkSize != 0) {
return execute().catch(()=>{}).return(result);
} else {
return result;
}
});
Regarding your real code, I'd probably try to factor out the batching into a helper function:
function asyncBatch(stream, size, callback) {
var batch = [], count = 0;
stream.on('data', data => {
batch.push(data);
count++;
if (batch.length == size) {
stream.pause();
Promise.resolve(batch).then(callback).then(() => {
batch = [];
stream.resume();
}, e => {
stream.emit('error', e);
});
}
});
return new Promise((resolve, reject) => {
stream.on('end', resolve);
stream.on('error', reject);
}).then(() => batch.length ? callback(batch) : null).then(() => count);
}
Promise.promisifyAll(Bulk);
return asyncBatch(stream, bulkSize, docs => {
const bulk = new Bulk()
for (const doc of docs) {
// JSON.parse('*');
// some logic goes in here to construct setStmt
bulk.find(query).upsert().update({$set: setStmt});
}
return bulk.executeAsync().catch(err => {/* ignore */});
})
You'll have to use a try/catch block:
stream.on('end', () => {
try {
JSON.parse('*')
// ...the rest of your code
} catch (e) {
reject(e)
}
})