dynammic way to use the same code for employeeID and employeeName - node.js

I have this project in order to return "the employee's screenshots", and I set up the api to return the screenshots:
http://localhost:3000/employee/screenshot/14ll0a54kb9kkvh8?page=5
employeeId="14ll0a54kb9kkvh8".............................................,
and with this api i pass the employee Id and the postman return screenshots for this employee , and the code is at the bottom, but in any case I want to pass the "employee’s name" not the "employee's Id" to the api, so how can I add this thing to the code, "in a dynamic way", meaning without returning to write all this same code ?
I need to have two api with the same code ,the first api with id and the second with name:
http://localhost:3000/employee/screenshot/14ll0a54kb9kkvh8?page=5
http://localhost:3000/employee/screenshot/George?page=5
screenshotServices.js:
async getAll(employeeId, pageNumber, pageSize) {
// Grab images from db according to pagination data and uid
const dbImages = await ScreenshotModel
.findAndCountAll({
where: {
employeeId: employeeId
},
limit: pageSize,
offset: (pageNumber - 1) * pageSize
})
.then(screenshots => {
// console.log(dbRes);
const imagesData = [];
for (let i = 0; i < screenshots.rows.length; i++) {
imagesData.push(screenshots.rows[i]['dataValues']);
}
return imagesData;
})
.catch(dbError => {
console.log(dbError);
return null;
});
if (dbImages === null) return dbImages;
// Database returns images paths
// Now we need to get the actual images from files
// getting images paths from db response
const imagePaths = [];
for (let i = 0; i < dbImages.length; i++) {
imagePaths.push(dbImages[i]['imagePath']);
}
const directoryPath = rootDirectory + `/images/screenshots/${employeeId}`;
// Grabbing images from files
return await ScreenshotModel
.findAllInDirectoryWithSpecifiedImagePaths(directoryPath, imagePaths)
.then(readFromDirectoryResponse => {
return readFromDirectoryResponse;
})
.catch(readFromDirectoryError => {
console.log(readFromDirectoryError);
return null;
});
}
module.exports = ScreenshotService;
screenshotController.js:
const _getAll = async (req, res) => {
// Grabbing data
const employeeId = req.params['id'];
// Pagination data
const pageNumber = +req.query['page'];
const pageSize = 3;
// console.log(uid);
// console.log(pageNumber);
const screenshots = await ScreenshotService.Instance.getAll(employeeId, pageNumber,
pageSize);
if(screenshots === null)
return res.status(500).json({message: 'failed', screenshots: screenshots});
return res.status(200).json({message: 'succeeded', screenshots: screenshots});
};
module.exports = {
getAll: _getAll
};

the best solution should be separate the entry point (the endpoint) and receive different param values, then each endpoint goes to a common function.
other solution is to write a regex then you can identify if the incoming value is an id or a name.
EDIT:
screenshotController.js:
const criteriaRegex = new RegExp('^(?=.*[0-9])(?=.*[a-zA-Z])([a-zA-Z0-9]+)$'); // define this in a global place
const _getAll = async (req, res) => {
const filter = {};
// Grabbing data
const critieria = req.params['id'];
if (criteriaRegex.test(critieria)){ // if true is because the ID is alphanumeric, thus, we assume this is an ID
filter = {employeeId: critieria}
} else {
filter = {employeeName: critieria}
}
// Pagination data
const pageNumber = +req.query['page'];
const pageSize = 3;
// console.log(uid);
// console.log(pageNumber);
const screenshots = await ScreenshotService.Instance.getAll(filter, pageNumber,
pageSize);
if(screenshots === null)
return res.status(500).json({message: 'failed', screenshots: screenshots});
return res.status(200).json({message: 'succeeded', screenshots: screenshots});
};
module.exports = {
getAll: _getAll
};
screenshotServices.js:
async getAll(filter, pageNumber, pageSize) {
// Grab images from db according to pagination data and uid
const dbImages = await ScreenshotModel
.findAndCountAll({
where: {
...filter
},
limit: pageSize,
offset: (pageNumber - 1) * pageSize
})
.then(screenshots => {
// console.log(dbRes);
const imagesData = [];
for (let i = 0; i < screenshots.rows.length; i++) {
imagesData.push(screenshots.rows[i]['dataValues']);
}
return imagesData;
})
.catch(dbError => {
console.log(dbError);
return null;
});
if (dbImages === null) return dbImages;
// Database returns images paths
// Now we need to get the actual images from files
// getting images paths from db response
const imagePaths = [];
for (let i = 0; i < dbImages.length; i++) {
imagePaths.push(dbImages[i]['imagePath']);
}
const directoryPath = rootDirectory + `/images/screenshots/${employeeId}`;
// Grabbing images from files
return await ScreenshotModel
.findAllInDirectoryWithSpecifiedImagePaths(directoryPath, imagePaths)
.then(readFromDirectoryResponse => {
return readFromDirectoryResponse;
})
.catch(readFromDirectoryError => {
console.log(readFromDirectoryError);
return null;
});
}
module.exports = ScreenshotService;

Related

How to use dynamoDB batchWriteItem with nodejs sdk?

I have a use case where I need to perform a batch_write operation on dynamodb. I referred this article which has a good solution for similar use case. I implemented it with few cleanup in my code and it works as expected.
const _ = require('lodash');
// helper methods
async function asyncForEach(array, cb) {
await Promise.all(array.map(async (item) => {
await cb(item, array);
}));
}
function to(promise) {
return promise.then((data) => [null, data])
.catch((err) => [err]);
}
const call = function (params) {
return dynamoDb.batchWriteItem(params).promise();
};
async function batchWrite25(arrayOf25, tableName) {
// 25 is as many as you can write in one time
const itemsArray = [];
_.forEach(arrayOf25, (item) => {
itemsArray.push({
PutRequest: {
Item: item,
},
});
});
const params = {
RequestItems: {
[tableName]: itemsArray,
},
};
await to(call(params));
}
async function batchWrite(itemArray, tableName) {
let mainIndex = 0;
let subIndex = 0;
let arrayOf25 = [];
const arrayLength = itemArray.length;
await asyncForEach(itemArray, async (item) => {
arrayOf25.push(item);
subIndex += 1;
mainIndex += 1;
// 25 is as many as you can write in one time
if (subIndex % 25 === 0 || mainIndex === arrayLength) {
await to(batchWrite25(arrayOf25, tableName));
subIndex = 0; // reset
arrayOf25 = [];
}
});
}
module.exports = {
batchWrite,
};
However, the code looks a bit complicated here with so many callbacks involved. Is there a cleaner way of writing the same thing without using -- call or asyncForEach or to methods ?
Here's one simple way to batch the items:
const BATCH_MAX = 25;
const batchWrite = async (items, table_name) => {
const BATCHES = Math.floor((items.length + BATCH_MAX - 1) / BATCH_MAX);
for (let batch = 0; batch < BATCHES; batch++) {
const itemsArray = [];
for (let ii = 0; ii < BATCH_MAX; ii++) {
const index = batch * BATCH_MAX + ii;
if (index >= items.length) break;
itemsArray.push({
PutRequest: {
Item: items[index],
},
});
}
const params = {
RequestItems: {
[table_name]: itemsArray,
},
};
console.log("Batch", batch, "write", itemsArray.length, "items");
await dynamodb.batchWriteItem(params).promise();
}
};
To make the entire process asynchronous, you can convert this function to return an array of promises and later call Promise.all(promises) on that array. For example:
const batchWrite = (items, table_name) => {
const promises = [];
const BATCHES = Math.floor((items.length + BATCH_MAX - 1) / BATCH_MAX);
for (let batch = 0; batch < BATCHES; batch++) {
// same code as above here ...
promises.push(dynamodb.batchWriteItem(params).promise());
}
return promises;
};
A much cleaner way using lodash that worked for me is listed below. Hope this helps somone.
batchWrite=async ()=> {
const batchSplitArr=_.chunk(this.dynamoPayload,25); //dynamoPayload has the entire payload in the desired format for dynamodb insertion.
await Promise.all(
batchSplitArr.map(async (item) => {
const params = {
RequestItems: {
[this.tableName]: item,
},
};
await this.dynamoDb.batchWriteItem(params).promise();
})
);
};

mongoose find multiple filters not working

Good day guys, I am trying to set up a store filtering API with node and MongoDB in the format below.
getProduct = async (req, res) => {
try {
const {
name,
category,
brand,
minPrice,
maxPrice,
minRating,
field,
sort,
} = req.query;
//queryObjects
const queryObjects = {};
if (name) {
queryObjects.name = { $regex: name, $options: 'i' };
}
// if(category){
// queryObjects.category={$regex:category,$options:'i'}
// }
// if(brand){
// queryObjects.brand={$regex:brand,$options:'i'}
// }
queryObjects.price = { $gt: minPrice, $lt: maxPrice };
queryObjects.rating = { $gt: minRating };
var result = Product.find(queryObjects);
//sorting result
if (sort) {
const sortList = sort.split(',').join(' ');
result = result.sort(sortList);
} else {
result = result.sort('rating');
}
//show specific fields
if (field) {
const fieldList = field.split(',').join(' ');
result = result.select(fieldList);
}
console.log(queryObjects);
const page = Number(req.query.page);
const limit = Number(req.query.limit);
const skip = (page - 1) * limit;
result = result.skip(skip).limit(limit);
const products = await result;
res.send({ data: products });
} catch (error) {
res.send(error);
}
};
when I make a request(get) I get an empty array as my responsepostman response
when I log queryObject in the console I getqueryObject
everything works when I add the filters one by one.
what am I doing wrong and how can I pass in multiple filters to the find method in mongoose.
Please help, thanks

Why await does not wait till its completion. I can't get posts results with paginated values

ViewController.js File
const Article = require("../models/articleModel");
const catchAsync = require("../utils/catchAsync");
const APIFeatures = require("../utils/apiFeatures");
exports.homePage = catchAsync(async (req, res, next) => {
let filter = {};
if (req.params.articleId) {
filter = { article: req.params.articleId };
}
const requestedCategory = new APIFeatures(
Article.find(filter),
req.query,
Article
)
.filter()
.pagination();
const posts = await requestedCategory.query;
const { previousPage, nextPage } = requestedCategory;
console.log(previousPage,nextPage);
res.status(200).json(posts);
ApiFeatures.js
It has many methods one of them is pagination
class APIFeatures {
constructor(query, queryString, Model) {
this.query = query;
this.queryString = queryString;
this.Model = Model;
this.previousPage = null;
this.nextPage = null;
}
async pagination() {
let page;
if (this.queryString.page) {
page = this.queryString.page * 1;
} else {
page = 1;
}
const limit = 5;
const startIndex = (page - 1) * limit;
const endIndex = page * limit;
if (endIndex < (await this.Model.countDocuments().exec())) {
console.log("I got printed next page");
this.nextPage = {
page: page + 1,
limit: limit,
};
}
if (startIndex > 0) {
console.log("I got printed prev page");
this.previousPage = {
page: page - 1,
limit: limit,
};
}
this.query = this.query.skip(startIndex).limit(limit);
return this;
}
}
module.exports = APIFeatures;
my output it does not wait for paginated result and there is no posts results
undefined
undefined
GET / 200 854.131 ms - -
I got printed next page
if I removed async/await in apifeatures.js then my output is
I can the posts results but not previousPage and nextPage values
it got executed after the query
null
null
GET / 200 1238.407 ms - -

NodeJS Error in if else case - Can't set headers after they are sent

I know, this is something old. There are many questions regarding to this. But none of them didn't guide me or didn't gave me the actual concept.
My case is:
if something
render something
else
render somethingAnother
Why is this logic generates this error enter image description here
After 1st execution, I'm not able to continue this process (I could continue for a while, but after some time error will come), by pressing back button of browser and then returning back to the home page. Everytime I should restart my server using node command. Why headers won't reset if I press back button of browser, how to do some header reset or something which will correct my logic.
const cv = require('opencv4nodejs');
var async = require('async');
var OrderID;
var OrderNo;
var compare = 0;
var CompanyName;
var notSimilar = 0;
const download = require('image-downloader')
const distanceThreshold = 30;
var url;
var FolderPath;
var isSimilar = 0;
var j = 0;
var image1;
var dbPath;
var filterCount = 0;
var image2;
var dbImgCount = 0;
var express = require('express');
var request = require('request');
var app = express();
app.set('view engine', 'pug')
var multer = require('multer');
var storage = multer.diskStorage({
destination: function (req, file, callback) {
callback(null, './');
},
filename: function (req, file, callback) {
callback(null, file.fieldname + '-' + Date.now());
}
});
var upload = multer({ storage : storage}).single('userPhoto');
const sql = require("mssql");
var config = {
user: '***',
password: '****',
server: '192.168.5.100\\SQLEXPRESS',
database: 'Test_MatdesignDB1',
connectionTimeout: 300000,
requestTimeout: 300000,
pool: {
idleTimeoutMillis: 300000,
max: 100
}
};
sql.connect(config).then(pool => {
return pool.request()
.query('select count(*) from OrderImageUpload; select FolderPath from OrderImageUpload;')
}).then(result => {
var a = JSON.stringify(result.recordset[0]);
dbImgCount = a.slice(4,6);
FolderPath = result.recordsets[1];
sql.close();
}).catch(err => {
console.log(err);
sql.close();
})
app.get('/',function(req,res){
res.sendFile(__dirname + "/index.html");
});
app.post('/api/photo',function(req,res){
compare = 1;
upload(req,res,function(err) {
if(err) {
console.log(err);
res.send("File uploading error");
}
else{
// console.log("Success");
image1 = req.file.filename;
var matchFeatures = ({ url, img1, img2, detector, matchFunc }) => {
// detect keypoints
const keyPoints1 = detector.detect(img1);
const keyPoints2 = detector.detect(img2);
// compute feature descriptors
const descriptors1 = detector.compute(img1, keyPoints1);
const descriptors2 = detector.compute(img2, keyPoints2);
// match the feature descriptors
const matches = matchFunc(descriptors1, descriptors2);
// only keep good matches
const bestN = 40;
const bestMatches = matches.sort(
(match1, match2) => (match1.distance - match2.distance)
).slice(0, bestN);
//console.log(bestMatches);
for(var i=0; i<bestN; i++){
if((bestMatches[i].distance) <= (distanceThreshold)){
filterCount++;
}
}
if(filterCount >= (bestN/4))
isSimilar = 1;
if(isSimilar){
notSimilar = 0;
filterCount = 0;
isSimilar = 0;
console.log("Similar images\n");
dbPath = url;
sql.close();
(async function() {
try {
let pool = await sql.connect(config)
let result1 = await pool.request()
.query("select OrderID from Test_MatdesignDB1.dbo.OrderImageUpload where FolderPath = '"+dbPath+"';")
OrderID = result1.recordset[0].OrderID;
let result2 = await pool.request()
.query('select OrderNo , CompanyName from Test_MatdesignDB1.dbo.[Order] where OrderID = '+OrderID);
OrderNo = result2.recordset[0].OrderNo;
CompanyName = result2.recordset[0].CompanyName;
res.render('similar', { title: 'Similar', CompanyName: CompanyName, OrderID: OrderID, OrderNo: OrderNo, img_path_var : dbPath }) //Render number 1 in 'if' case
} catch (err) {
console.log(err);
sql.close();
}
sql.close();
})()
sql.on('error', err => {
console.log(err);
})
}
else{
isSimilar = 0;
filterCount = 0;
notSimilar++;
if(notSimilar >= (dbImgCount ))
{
notSimilar = 0;
res.render('notSimilar', { title: 'Not Similar', message: 'No Similar Images' }) //Render number 2 in 'else' case
}
console.log("Not similar\n");
}
return cv.drawMatches(
img1,
img2,
keyPoints1,
keyPoints2,
bestMatches
);
};
for (j=0; j<dbImgCount; j++) {
(function(j) {
async.waterfall([
async function downloadIMG(done) {
try {
var options = {
url: FolderPath[j].FolderPath,
dest: '/home/ubuntu/imgCompare/DBimages/'
}
const { filename, image } = await download.image(options);
return [filename, options.url];
} catch (e) {
console.error(e)
}
},
async function featureMatching([a, MatchURL], done){
const img1 = cv.imread(image1);
url = MatchURL;;
const img = a.slice(33);
const img2 = cv.imread('./DBimages/'+img);
const orbMatchesImg = matchFeatures({
url,
img1,
img2,
detector: new cv.ORBDetector(),
matchFunc: cv.matchBruteForceHamming
});
done(null);
}
],
function (err) {});
})(j);
}
}
});
});
app.listen(5000,function(){
console.log("Working on port 5000");
});
You need to add return before rendering a view. It's happening because the view rendering is happening more than 1 time there must be a condition in your code which is letting views to render multiple times. Add this return statement:
return res.render();
You're getting this error because you're calling matchFeatures() multiple times within a for loop.
app.post('/api/photo', function (req, res) {
var matchFeatures = ({url, img1, img2, detector, matchFunc}) => {
if (isSimilar) {
res.render('similar', {
title: 'Similar',
...
}) //Render number 1 in 'if' case
} else {
res.render('notSimilar', {
title: 'Not Similar',
message: 'No Similar Images'
}) //Render number 2 in 'else' case
}
};
for (j = 0; j < dbImgCount; j++) {
async function featureMatching() {
const orbMatchesImg = matchFeatures({ // since you're calling it multiple times here
url, // matchFeatures() will attempt to send
img1, // responses multiple times
img2,
detector: new cv.ORBDetector(),
matchFunc: cv.matchBruteForceHamming
});
}
}
});
To fix this, You need to consolidate all these responses and send to client only once.
I figured out the error. I didn't reset the variable notSimilar at the entry point.
Done resetting of notSimilar as below, no error! Thanks Everyone.
app.post('/api/photo',function(req,res){
notSimilar = 0;

Concurrent request handling in Nodejs

I have a issue of concurrent request, which modifies db.
What am I doing is.
One request fetch data for user-1 , then calculate data for user-1 modified a field-1 in record, and save.
next request fetch data for user-1 , then calculate data for user-1 modified a field-1 in record, and save.
Both request operates simultaneously. so last request update the wrong data.
function calculate() {
var needUpdate = false;
user = new UserLib(user_id);
var old_config = user.config;
if (old_config[req.id]) {
old_config[req.id].value = 0;
needUpdate = true;
}
if (req.delete == void(0) || req.delete == false) {
delete req.delete;
old_config[req.id].value = old_config[req.id].value + 1;
needUpdate = true;
}
if (needUpdate) {
return user.save();
}
return true;
}
We are getting both requests at the same time.
var express = require('express');
var app = express();
app.get('/update', function(req, res) {
res.writeHead(200, {
'Content-Type': 'text/html'
});
calculate(req);
function calculate(req) {
var needUpdate = false;
user = new UserLib(user_id);
var old_config = user.config;
if (old_config[req.id]) {
old_config[req.id].value = 0;
needUpdate = true;
}
if (req.delete == void(0) || req.delete == false) {
delete req.delete;
old_config[req.id].value = old_config[req.id].value + 1;
needUpdate = true;
}
if (needUpdate) {
user.save();
}
}
res.end('Done');
});
first reuest with following parameter {
user_id: 1,
id: 1,
value: 5,
delete: false
}
Anothere request with follwing parmter {
user_id: 1,
id: 1,
delete: true
}
If you want to operate on each request simultaneously, I'd suggest to use Bluebird.map where you can handle each request as you want, with concurrency, and a final result.
For example:
let users = ['foo', 'bar']; //fetching users you want
Bluebird.map(users, (user) => {
return user.calculate()
.then((res) => res.shouldUpdate ? user.save() : Promise.resolve())
}, {concurrency: 2})
.then((results) => {
//results is an array with both resolved promises from below
})
You may also be interested in Bluebird.join where you could calculate, and join the resulting data for more than one promise.
Second example where you fetch same user twice in the same promise:
//both are promises
Bluebird.all([fetchUser1, fetchUser2])
.spread(function(user1, user2) {
//check if should update
return user1.delete !== user2.delete ? user.delete() : null
})
.then(() => {})
Bluebird.spread documentation

Resources