Knejx Js only promises - node.js

how to make this function work. Only a promise comes back to me.
codeProducts.forEach((code, index) => {
const qt = app.db('products').where('code',code).first().then(result => result.quantity)
data[index] = {
code: code,
quantity: qt
}
})
return data

There is two or three ways that you can solve this-
Approach: 1
I will make just one call to database.(I love my database :D) as-
let codeProducts = [55, 68, 96];
knex('products')
.whereIn('code', codeProducts)
.then((results) => {
// response to api call
console.log(results);
});
Approach: 2 (I don't like this approach. Too many call on db)
async function getData(codes) {
try {
let results = [];
for (let i = 0; i < codes.length; i++) {
let dbQuery = await knex('products').where('code', codes[i]).first();
results.push(dbQuery);
}
return results;
} catch (e) {
console.log(e);
}
}
const codeProducts = [54, 95];
getData()
.then((res) => {
console.log(res);
})

let data = codeProducts.map((code, index) => {
return app.db('products').where('code',code).first()
.then(result => {
return {
code: code,
quantity: result.quantity
}
})
})
return data
This code should fix your problem. You are accessing the quantity outside of the promise. In order to set the quantity on the data array, you need to do it inside the then

Related

Promise.all returning undefined in Node JS

I have a code to fetch directory names from first API. For every directory, need to get the file name from a second API. I am using something like this in my Node JS code -
async function main_function(req, res) {
const response = await fetch(...)
.then((response) => {
if (response.ok) {
return response.text();
} else {
return "";
}
})
.then((data) => {
dirs = ...some logic to extract number of directories...
const tempPromises = [];
for (i = 0; i < dirs.length; i++) {
tempPromises.push(getFilename(i));
}
console.log(tempPromises); // Prints [ Promise { <pending> } ]
Promise.all(tempPromises).then((result_new) => {
console.log(result_new); // This prints "undefined"
res.send({ status: "ok" });
});
});
}
async function getFilename(inp_a) {
const response = await fetch(...)
.then((response) => {
if (response.ok) {
return response.text();
} else {
return "";
}
})
.then((data) => {
return new Promise((resolve) => {
resolve("Temp Name");
});
});
}
What am I missing here?
Your getFilename() doesn't seem to be returning anything i.e it's returning undefined. Try returning response at the end of the function,
async function getFilename(inp_a) {
const response = ...
return response;
}
Thanks to Mat J for the comment. I was able to simplify my code and also learn when no to use chaining.
Also thanks to Shadab's answer which helped me know that async function always returns a promise and it was that default promise being returned and not the actual string. Wasn't aware of that. (I am pretty new to JS)
Here's my final code/logic which works -
async function main_function(req,res){
try{
const response = await fetch(...)
const resp = await response.text();
dirs = ...some logic to extract number of directories...
const tempPromises = [];
for (i = 0; i < dirs.length; i++) {
tempPromises.push(getFilename(i));
}
Promise.all(tempPromises).then((result_new) => {
console.log(result_new);
res.send({ status: "ok" });
});
}
catch(err){
console.log(err)
res.send({"status" : "error"})
}
}
async function getFilename(inp_a) {
const response = await fetch(...)
respText = await response.text();
return("Temp Name"); //
}

Trying to use async.parallelLimit to make to make successive parallel API calls

I have a data array of objects like [{number:1}, {number:2}, {number:3}... {number:100}]. And want to make parallel API calls in successive batches of 10 until the whole array has been processed.
How would I go about that?
Here's my code. It goes over the first 10, but then it stops.
const async = require("async");
const axios = require("axios");
const calls = require("../model/data"); // [{number:1}, {number:2},{number:3},...{number:100}]
let makeAPICall = function (request, callback) {
axios
.post("http://www.api.com/", {
number: `${request.number}`,
webhookURL: "http://localhost:8000/",
})
.then(function (response) {})
.catch(function (err) {
callback(err);
});
};
const functionArray = calls.map((request) => {
return (callback) => makeAPICall(request, callback);
});
exports.startCalls = (req, res, next) => {
async.parallelLimit(functionArray, 10, (err, results) => {
if (err) {
console.error("Error: ", err);
} else {
console.log("Results: ", results.length, results);
}
});
};
So, your approach is somewhat backwards as you have something that already returns a promise (Axios) which is the modern way to manage asynchronous operations and now you're trying to convert it back to a plain callback so you can use an old-fashioned library (the async library). It's also slower to run 10, wait for all 10 to finish before starting any more when that is typically not necessary.
Instead, I would suggest you use the Axios promise you already have. You can either use Bluebird's .map() which has a concurrency setting or you can use this bit of code that gives you a function for running promise-producing functions in parallel while controlling the max number that are in-flight at any given time:
// takes an array of items and a function that returns a promise
function mapConcurrent(items, maxConcurrent, fn) {
let index = 0;
let inFlightCntr = 0;
let doneCntr = 0;
let results = new Array(items.length);
let stop = false;
return new Promise(function(resolve, reject) {
function runNext() {
let i = index;
++inFlightCntr;
fn(items[index], index++).then(function(val) {
++doneCntr;
--inFlightCntr;
results[i] = val;
run();
}, function(err) {
// set flag so we don't launch any more requests
stop = true;
reject(err);
});
}
function run() {
// launch as many as we're allowed to
while (!stop && inflightCntr < maxConcurrent && index < items.length) {
runNext();
}
// if all are done, then resolve parent promise with results
if (doneCntr === items.length) {
resolve(results);
}
}
run();
});
}
You would then use it like this:
let makeAPICall = function(request) {
return axios.post("http://www.api.com/", {
number: `${request.number}`,
webhookURL: "http://localhost:8000/",
});
};
mapConcurrent(calls, 10, makeAPICall).then(results => {
// all results in order here
console.log(results);
}).catch(err => {
console.log(err);
});
See another similar issue here: Promise.all consumes all my RAM
If you really want to run them in fixed batches where the whole batch finishes before you run any more requests, you could do something like this:
const axios = require("axios");
const calls = require("../model/data"); // [{number:1}, {number:2},{number:3},...{number:100}]
function makeAPICall(request) {
return axios.post("http://www.api.com/", {
number: `${request.number}`,
webhookURL: "http://localhost:8000/",
});
};
async function runBatches(array, batchSize, fn) {
let index = 0;
let results = [];
while (index < array.length) {
let promises = [];
for (let num = 0; num < batchSize && index < array.length; ++num) {
promises.push(makeAPICall(array[index++]));
}
let batchResults = await Promise.all(promises);
results.push(...batchResults);
}
return results;
}
runBatches(calls, 10, makeAPICall).then(results => {
// all results in order here
console.log(results);
}).catch(err => {
console.log(err);
});

foreach loop in sync function in nodejs

I have code written
function getDetails (req, res) {
const dbQuery = `call spGetSLAReportsDetails('${req.body.domainId}', ${req.body.days},'${req.body.type}','${req.body.app}')`
try {
connectDatabase(dbQuery).then((rows) => {
if (!_.isEmpty(rows.dbData) && !_.isEmpty(rows.dbData[0])) {
const resultList = []
rows.dbData.pop()
var bar = new Promise((resolve, reject) => {
rows.dbData[0].forEach((element, index, array) => {
let query = `select * from YCW.YWFWIC ic where ic.witem=${element.witem} and ic.reqno=${element.reqno};`
connectDatabase(query).then((data) =>{
for (var i = 0; i < data.dbData.length; i++) {
element[data.dbData[i]["cfield"]] = data.dbData[i]["cvalue"]
}
resultList.push(element)
// console.log(resultList)
}).catch((err) => {
console.log(err)
})
if (index === array.length -1) resolve();
});
});
bar.then(() => {
console.log(resultList);
});
res.status(msgCodeJson.ERR004.code).send({
result: resultList })
} else {
console.log("empty array")
res.status(msgCodeJson.ERR004.code).send({
message : "No data found"
})
// httpResponseHandlerError(res, msgCodeJson.ERR001.code, msgCodeJson.ERR001.msg)
}
}).catch(() => {
httpResponseHandlerError(res, msgCodeJson.ERR002.code, msgCodeJson.ERR002.msg)
})
} catch (err) {
httpResponseHandlerError(res, msgCodeJson.ERR009.code, msgCodeJson.ERR009.msg)
}
}
module.exports.getDetails = getDetails
i want data to be fit in resultlist but i get empty list after all operation.
while in foreach loop i am getting proper output.
kindly help in issue.
i tried with async foreach loop but some syntax error is coming.
kindly help
as mentioned in the comment of the code you're using
Best way to wait for .forEach() to complete
This is OK if there is no async processing inside the loop.
yet you have an async function inside your forEach callback, namly this:
connectDatabase(query).then((data) => {
for (var i = 0; i < data.dbData.length; i++) {
element[data.dbData[i]["cfield"]] = data.dbData[i]["cvalue"]
}
resultList.push(element)
}).catch((err) => {
console.log(err)
})
you'll need to resolve the "outer/parent" promise from inside the "inner/child" promise
I suggest using a regular good old for loop and/or checking the count of resolved promises against the rows.dbData[0].length and calling a final code/function once they match

How to wait for an asynchronous process to complete inside a for loop before incrementing the loop

I need to iterate through an array. With each iteration, I need to update my database. I need to wait for the first update to be complete and then make the second update.
After searching through several answers, I found ASYNC/AWAIT feature of ES2017. However, I have not been able to implement it so far. The updates are happening randomly and not in a sequence. Please let me know how to implement ASYNC/AWAIT in this situation
Here is my code snippet:
function findRecipe(product, qty) {
return new Promise((resolve, reject) => {
Recipe.findOne({
product: product
}, (err, recipe) => {
if (err) {
reject(err)
} else {
for (let i = 0; i < recipe.items.length; i++) {
Item.findOne({
name: recipe.items[i].name
}, (err, item) => {
if (err) {
reject(err)
} else {
var lessAmt = recipe.quantities[i] * qty;
item.stock -= lessAmt;
item.save((err, item) => {
if (err) {
console.log(err)
} else {
resolve(item)
}
})
}
})
}
}
})
});
}
for (let i = 0; i < bill.product.length; i++) {
//Calling function for updates for each item
findRecipe(bill.product[i], bill.qty[i])
}
It looks like you are nearly there, Just Wrap the loop in a function and make it async.
async function updateAllRecipe(){
for(let i=0;i<bill.product.length;i++){
//Calling function for updates for each item
await findRecipe(bill.product[i],bill.qty[i])
}
}
But seriously though, I think you can leverage of parallelism here using the Promise.All. Is it really necessary to wait for the recipe to finish before queing the next findRecipe method? If not use the promise.all for it to perform faster
Async Await is easy to implement once you know the basic concept of asynchronous nature of Nodejs, I have used for...of loop here which also works asynchronously.
//Async function to perform database updates
async function findRecipe(product, qty) {
try {
let recipe = await Recipe.findOne({ product: product });
let i = 0;
for (itemObj of recipe.items) {
let item = await Item.findOne({ name: itemObj.name });
var lessAmt = recipe.quantities[i] * qty;
item.stock -= lessAmt;
let updatedItem = await item.save();
i++;
}
return true;
}
catch (err) {
return err;
}
}
async function someasyncFunction() {
for (let i = 0; i < bill.product.length; i++) {
//Calling function for updates for each item
await findRecipe(bill.product[i], bill.qty[i])
}
}
Use Promise.all and start process parallel. It'll increase the performance of API.
async function findRecipe(product, qty) {
try {
let recipe = await Recipe.findOne({
product: product
});
const items = await Promise.all(recipe.items.map(itemObj => Item.findOne({
name: itemObj.name
})));
const items = await Promise.all(items.map((item, i) => {
var lessAmt = recipe.quantities[i] * qty;
item.stock -= lessAmt;
return item.save();
}));
return true;
} catch (err) {
return err;
}
}
async function someasyncFunction() {
await Prmise.all(bill.product.map((product, i) => findRecipe(product, bill.qty[i])));
}

How can I use 'finally' within a 'Promise' clause in NodeJS?

I want something like this:
let promises = [];
for (let i = 0; i < some_length; i++) {
let output = '';
promises.push(some_func(i)
.then((result) => {
output = result;
})
.catch((error) => {
output = error.message;
})
.finally(() => {
console.log(output);
})
);
}
return Promise.all(promises);
But I get a runtime error .then(...).catch(...).finally is not a function.
How can I resolve this?
Node 10 finally added support for it. Tested with node 10.7.0.
Promise.resolve().finally(() => {
console.log("It finally works!")
})
It finally works!
(Chrome and Firefox also support it btw.)
Actually, I Think your some-func function isn't return Promise, with returning JavaScript Promisees the then, catch and finally has meaning, so I think you must declare some_func function as a new instance of Promise object, see below code:
let promises = [];
for (let i = 0; i < some_length; i++) {
let output = '';
let some_func = (i) => {
return new Promise(function(resolve, reject) {
setTimeout(resolve(`success: ${i}`), 100, 'foo');
});
}
promises.push(some_func(i)
.then((result) => {
output = result;
})
.catch((error) => {
output = error.message;
})
.finally(() => {
console.log(output);
})
);
}
return Promise.all(promises);
Maybe this code have some other errors, I don't know because I do not test it, but undoubtedly your error is for just like I said in above sentences.

Resources