nodejs express nested sequelize async await - node.js

I could not get the a nested logic to work. I need to combined data from 2 table and return it to the request. I do not want a join table as I need to return an individual record from tableA first then combine with tableB record before it is returned. Below is my simplified code
exports.get_caution_reasons = async (req, res) => {
let return_data = [];
await db.sequelize.query("SELECT TableA xxxxx",{
type: QueryTypes.SELECT
}).then(recordA => {
for (let index = 0; index < recordA.length; index++) {
return_data.push({recordA[index].xxx, recordA[index].yyy})
db.sequelize.query("SELECT TableB xxxxx WHERE zzz=recordA.zzz",{
type: QueryTypes.SELECT
}).then(recordB => {
for (let index = 0; index < recordB.length; index++) {
return_data.push({recordB[index].xxx, recordB[index].yyy})
}
})
}
res.status(200).json({data: return_data});
})
};
It only return the record for TableA only. I tried various async and await to get the recordB in there but without success. Any help is useful.

Probably something like that should work:
exports.get_caution_reasons = async (req, res, next) => {
try {
let options = { type: QueryTypes.SELECT }
let data = []
let result_a = await db.sequelize.query("SELECT TableA xxxxx", options)
for (let index = 0; index < result_a.length; index++) {
data.push({ /* whatever you need... */ })
let result_b = await db.sequelize.query("SELECT TableB xxxxx WHERE zzz=recordA.zzz", options)
for (let index = 0; index < result_b.length; index++) {
data.push({ /* ... */ })
}
}
res.json({ data })
} catch (err) {
next(err)
}
}

Related

How to use dynamoDB batchWriteItem with nodejs sdk?

I have a use case where I need to perform a batch_write operation on dynamodb. I referred this article which has a good solution for similar use case. I implemented it with few cleanup in my code and it works as expected.
const _ = require('lodash');
// helper methods
async function asyncForEach(array, cb) {
await Promise.all(array.map(async (item) => {
await cb(item, array);
}));
}
function to(promise) {
return promise.then((data) => [null, data])
.catch((err) => [err]);
}
const call = function (params) {
return dynamoDb.batchWriteItem(params).promise();
};
async function batchWrite25(arrayOf25, tableName) {
// 25 is as many as you can write in one time
const itemsArray = [];
_.forEach(arrayOf25, (item) => {
itemsArray.push({
PutRequest: {
Item: item,
},
});
});
const params = {
RequestItems: {
[tableName]: itemsArray,
},
};
await to(call(params));
}
async function batchWrite(itemArray, tableName) {
let mainIndex = 0;
let subIndex = 0;
let arrayOf25 = [];
const arrayLength = itemArray.length;
await asyncForEach(itemArray, async (item) => {
arrayOf25.push(item);
subIndex += 1;
mainIndex += 1;
// 25 is as many as you can write in one time
if (subIndex % 25 === 0 || mainIndex === arrayLength) {
await to(batchWrite25(arrayOf25, tableName));
subIndex = 0; // reset
arrayOf25 = [];
}
});
}
module.exports = {
batchWrite,
};
However, the code looks a bit complicated here with so many callbacks involved. Is there a cleaner way of writing the same thing without using -- call or asyncForEach or to methods ?
Here's one simple way to batch the items:
const BATCH_MAX = 25;
const batchWrite = async (items, table_name) => {
const BATCHES = Math.floor((items.length + BATCH_MAX - 1) / BATCH_MAX);
for (let batch = 0; batch < BATCHES; batch++) {
const itemsArray = [];
for (let ii = 0; ii < BATCH_MAX; ii++) {
const index = batch * BATCH_MAX + ii;
if (index >= items.length) break;
itemsArray.push({
PutRequest: {
Item: items[index],
},
});
}
const params = {
RequestItems: {
[table_name]: itemsArray,
},
};
console.log("Batch", batch, "write", itemsArray.length, "items");
await dynamodb.batchWriteItem(params).promise();
}
};
To make the entire process asynchronous, you can convert this function to return an array of promises and later call Promise.all(promises) on that array. For example:
const batchWrite = (items, table_name) => {
const promises = [];
const BATCHES = Math.floor((items.length + BATCH_MAX - 1) / BATCH_MAX);
for (let batch = 0; batch < BATCHES; batch++) {
// same code as above here ...
promises.push(dynamodb.batchWriteItem(params).promise());
}
return promises;
};
A much cleaner way using lodash that worked for me is listed below. Hope this helps somone.
batchWrite=async ()=> {
const batchSplitArr=_.chunk(this.dynamoPayload,25); //dynamoPayload has the entire payload in the desired format for dynamodb insertion.
await Promise.all(
batchSplitArr.map(async (item) => {
const params = {
RequestItems: {
[this.tableName]: item,
},
};
await this.dynamoDb.batchWriteItem(params).promise();
})
);
};

dynammic way to use the same code for employeeID and employeeName

I have this project in order to return "the employee's screenshots", and I set up the api to return the screenshots:
http://localhost:3000/employee/screenshot/14ll0a54kb9kkvh8?page=5
employeeId="14ll0a54kb9kkvh8".............................................,
and with this api i pass the employee Id and the postman return screenshots for this employee , and the code is at the bottom, but in any case I want to pass the "employee’s name" not the "employee's Id" to the api, so how can I add this thing to the code, "in a dynamic way", meaning without returning to write all this same code ?
I need to have two api with the same code ,the first api with id and the second with name:
http://localhost:3000/employee/screenshot/14ll0a54kb9kkvh8?page=5
http://localhost:3000/employee/screenshot/George?page=5
screenshotServices.js:
async getAll(employeeId, pageNumber, pageSize) {
// Grab images from db according to pagination data and uid
const dbImages = await ScreenshotModel
.findAndCountAll({
where: {
employeeId: employeeId
},
limit: pageSize,
offset: (pageNumber - 1) * pageSize
})
.then(screenshots => {
// console.log(dbRes);
const imagesData = [];
for (let i = 0; i < screenshots.rows.length; i++) {
imagesData.push(screenshots.rows[i]['dataValues']);
}
return imagesData;
})
.catch(dbError => {
console.log(dbError);
return null;
});
if (dbImages === null) return dbImages;
// Database returns images paths
// Now we need to get the actual images from files
// getting images paths from db response
const imagePaths = [];
for (let i = 0; i < dbImages.length; i++) {
imagePaths.push(dbImages[i]['imagePath']);
}
const directoryPath = rootDirectory + `/images/screenshots/${employeeId}`;
// Grabbing images from files
return await ScreenshotModel
.findAllInDirectoryWithSpecifiedImagePaths(directoryPath, imagePaths)
.then(readFromDirectoryResponse => {
return readFromDirectoryResponse;
})
.catch(readFromDirectoryError => {
console.log(readFromDirectoryError);
return null;
});
}
module.exports = ScreenshotService;
screenshotController.js:
const _getAll = async (req, res) => {
// Grabbing data
const employeeId = req.params['id'];
// Pagination data
const pageNumber = +req.query['page'];
const pageSize = 3;
// console.log(uid);
// console.log(pageNumber);
const screenshots = await ScreenshotService.Instance.getAll(employeeId, pageNumber,
pageSize);
if(screenshots === null)
return res.status(500).json({message: 'failed', screenshots: screenshots});
return res.status(200).json({message: 'succeeded', screenshots: screenshots});
};
module.exports = {
getAll: _getAll
};
the best solution should be separate the entry point (the endpoint) and receive different param values, then each endpoint goes to a common function.
other solution is to write a regex then you can identify if the incoming value is an id or a name.
EDIT:
screenshotController.js:
const criteriaRegex = new RegExp('^(?=.*[0-9])(?=.*[a-zA-Z])([a-zA-Z0-9]+)$'); // define this in a global place
const _getAll = async (req, res) => {
const filter = {};
// Grabbing data
const critieria = req.params['id'];
if (criteriaRegex.test(critieria)){ // if true is because the ID is alphanumeric, thus, we assume this is an ID
filter = {employeeId: critieria}
} else {
filter = {employeeName: critieria}
}
// Pagination data
const pageNumber = +req.query['page'];
const pageSize = 3;
// console.log(uid);
// console.log(pageNumber);
const screenshots = await ScreenshotService.Instance.getAll(filter, pageNumber,
pageSize);
if(screenshots === null)
return res.status(500).json({message: 'failed', screenshots: screenshots});
return res.status(200).json({message: 'succeeded', screenshots: screenshots});
};
module.exports = {
getAll: _getAll
};
screenshotServices.js:
async getAll(filter, pageNumber, pageSize) {
// Grab images from db according to pagination data and uid
const dbImages = await ScreenshotModel
.findAndCountAll({
where: {
...filter
},
limit: pageSize,
offset: (pageNumber - 1) * pageSize
})
.then(screenshots => {
// console.log(dbRes);
const imagesData = [];
for (let i = 0; i < screenshots.rows.length; i++) {
imagesData.push(screenshots.rows[i]['dataValues']);
}
return imagesData;
})
.catch(dbError => {
console.log(dbError);
return null;
});
if (dbImages === null) return dbImages;
// Database returns images paths
// Now we need to get the actual images from files
// getting images paths from db response
const imagePaths = [];
for (let i = 0; i < dbImages.length; i++) {
imagePaths.push(dbImages[i]['imagePath']);
}
const directoryPath = rootDirectory + `/images/screenshots/${employeeId}`;
// Grabbing images from files
return await ScreenshotModel
.findAllInDirectoryWithSpecifiedImagePaths(directoryPath, imagePaths)
.then(readFromDirectoryResponse => {
return readFromDirectoryResponse;
})
.catch(readFromDirectoryError => {
console.log(readFromDirectoryError);
return null;
});
}
module.exports = ScreenshotService;

How to return 2 arrays after saving data to mongodb using node js

I need help with code below. I get an array of items from the client then the goal is to save them in mongodb and return the list classified as 'saved' and 'failed' items. sample of failed items are those that are duplicate on a unique attribute.
I know the code below will not work because of variable scope. how do i get around it? the code below returns an empty array for both savedItems and failedItems. Thanks!
router.post('/addItems', async (req, res, next) => {
let items = req.body;
let result = {
savedItems: [],
failedItems: []
};
function saveData() {
for (i = 0; i < items.length; i++) {
item = items[i];
Model.create({ ...item }, (err, data) => {
if (err) {
result.failedItems.push(item);
} else {
result.savedItems.push(item);
}
});
}
return result;
}
saveData().then(result => {
res.send({
results: result
});
});
});
router.post('/addItems', async (req, res, next) => {
// use try catch when use async
try {
let items = req.body;
let result = {
savedItems: [],
failedItems: []
};
for (let i = 0; i < items.length; i++) {
const item = items[i];
// use the returned promise instead of callback for Model.create
const data = await Model.create({ ...item });
result.savedItems.push(item);
// if also need to handle failed item in result use anathor try catch inside
/*try {
const data = await Model.create({ ...item });
result.savedItems.push(item);
} catch( err ) {
result.failedItems.push(item);
}*/
}
res.send({
results: result
});
} catch( err ) {
// To all the errors unexpected errors + thrown rejected promises
res.send({
error: err
});
}
});
Your saveData method didn't return a promise, try this
function saveData() {
return new Promise(resolve => {
let items = req.body;
let result = {
savedItems: [],
failedItems: []
};
let promises = [];
for (i = 0; i < items.length; i++) {
item = items[i];
let promise = new Promise(resolve => {
Model.create({ ...item }, (err, data) => {
if (err) {
result.failedItems.push(item);
} else {
result.savedItems.push(item);
}
resolve();
});
});
promises.push(promise);
}
Promise.all(promises).then(() => resolve(result));
})
}

Trouble updating an outer array from inside a mongoose query

I have an array that I'd like to fill with objects, but the console prints out []. I know this is a problem with the async nature of mongoose, but I'm not sure how to fix it... I want itemCart to be filled with all the user's items.
Any guidance/help would be appreciated. Thanks!
Here is the code:
let itemCart = [];
User
.findById(req.params.username)
.then(user => {
for (let i = 0; i < user.cart.length; i++) {
let itemId = user.cart[i];
Item.findById(itemId).then(item => {
itemCart.push(item);
});
}
console.log(itemCart); // returns []
})
.catch(err => {
console.error(err)
});
You can apply async/await like this:-
let itemCart = [];
User
.findById(req.params.username)
.then(async user => {
for (let i = 0; i < user.cart.length; i++) {
let itemId = user.cart[i];
Let item = await Item.findById(itemId);
itemCart.push(item);
}
console.log(itemCart); // now it will return the array
})
.catch(err => {
console.error(err)
});

is there a way to avoid set a timeout

I'm trying to collect all values from a mysql table with all the values of the referenced_table_name for each index of the table.
How avoid set a random time out while waiting for a promise
To collect the expected information i need to set a random time out, otherwise my object is undefined...
module.exports = {
getTable: async (req, res) => {
const tablename = req.params.table,
dbName = req.params.dbName;
let jsonResult = {};
getTableValues(dbName, tablename)
.then(tableValues => {
getTableIndexedCol(dbName, tablename)
.then(indexedColumns => {
let indexedArr = {};
for (let index = 0; index < indexedColumns.length; index++) {
const element = indexedColumns[index],
column = element.column_name,
referencedTable = element.referenced_table_name;
let allValuesRefTable = new Array();
getTableValues(dbName, referencedTable)
.then(referencedTableValues => {
for (let i = 0; i < referencedTableValues.length; i++) {
const el = referencedTableValues[i];
allValuesRefTable.push(el.name);
}
})
.catch(err => console.log(err));
/*IF NO TIMEOUT DOESN'T WORK*/
setTimeout(function(){
indexedArr[column] = allValuesRefTable;
}, 100);
}
setTimeout(function(){
jsonResult = {
name: tablename,
rows : tableValues,
rowIndexed : indexedArr
}
res.json(jsonResult);
}, 5000);
})
.catch(err => console.log(err));
})
.catch(err => console.log(err));
}
};
Is there a way to don't use setTimeout? or how can I 'wait' that the promise is resolved?
Here is my function getTableIndexedCol for example:
async function getTableIndexedCol(dbName, tablename) {
const sqlRefTable = SELECT...;
return new Promise (async function(resolve, reject){
try{
[refTable, refTableFields] = await promisePool.query(sqlRefTable)
}
catch(err){
reject(err)
}
setTimeout(function () {
resolve(refTable);
}, 500);
})
If you are already using async/await you can use it all the way and avoid the "Promise Hell" (nested .then calls):
module.exports = {
getTable: async (req, res) => {
try {
const tablename = req.params.table,
dbName = req.params.dbName;
const tableValues = await getTableValues(dbName, tablename);
const indexedColumns = await getTableIndexedCol(dbName, tablename);
let indexedArr = {};
for (let index = 0; index < indexedColumns.length; index++) {
const element = indexedColumns[index],
column = element.column_name,
referencedTable = element.referenced_table_name;
let allValuesRefTable = new Array();
const referencedTableValues = await getTableValues(dbName, referencedTable);
for (let i = 0; i < referencedTableValues.length; i++) {
const el = referencedTableValues[i];
allValuesRefTable.push(el.name);
}
indexedArr[column] = allValuesRefTable;
}
const = jsonResult = {
name: tablename,
rows: tableValues,
rowIndexed: indexedArr
}
res.json(jsonResult);
} catch (err) {
console.log(err);
}
}
};

Resources