how to troubleshhot slow queries using mongoose - node.js

I have set up a node/mongoose API project for the first time. I am having trouble trying to figure out why some of the queries are taking so long. I am trying to use, explain method but I cannot figure out where to put it.
mongoose.connect(mongo.uri, {useNewUrlParser: true})
.then(() => console.log('Connected'))
.catch(error => console.log('Error' + error))
mongoose.Promise = Promise
Object.keys(mongo.options).forEach((key) => {
mongoose.set(key, mongo.options[key])
})
mongoose.Promise = Promise
/* istanbul ignore next */
mongoose.Types.ObjectId.prototype.view = function () {
return { id: this.toString() }
}
/* istanbul ignore next */
mongoose.connection.on('error', (err) => {
console.error('MongoDB connection error: ' + err)
process.exit(-1)
})
export default mongoose
export const create = ({ body }, res) => {
const action = (error, collection) => {
if (error) failure(res.status(500).json({ status: 'error', error: error }))
else {
collection.find({'Series.ExcludeFromAudit': false}).toArray((error, data) => {
error && failure(res.status(500).json({ status: 'error', error: error }))
!error && success(res.status(200).json({ data }))
})
}
}
mongoose.connection.db.collection('MetrostudyProjects', action)
}

Related

ExpressJS res.status(500).json - Cannot read properties of undefined

I'm trying to setup a MeiliSearch index to be populated when my backend server (NodeJS/ExpressJS) starts. Below is my code:
const withDB = async (operations, res) => {
try {
const client = await MongoClient.connect('mongodb://localhost:27017', { useNewUrlParser: true });
const db = client.db('database-name');
await operations(db);
client.close();
} catch (error) {
res.status(500).json({ message: 'Error connecting to db', error });
}
}
app.listen(8000, async(res) => {
withDB(async (db) => {
const searchReports = await db.collection('reports').find().limit(500).toArray()
res.status(200).json(searchReports);
const searchIndex = new MeiliSearch({ host: 'http://127.0.0.1:7700' })
searchIndex.index('Reports').addDocuments(searchReports)
.then((res) => console.log(res));
}, res)
});
Everything works fine apart from when I add either of the line referencing 'res'. At that point I get the error:
res.status(500).json({
^
TypeError: Cannot read properties of undefined (reading 'status')

Send KnexJS error message to the frontend

I am having trouble sending an error to the front end when a csv file is uploaded and the numbers already exist in the database. The backend is logging an error that the primary key value already exist, but the code I have written tells the front end that the file uploaded just fine.
Code snippet:
router.post('/:program/upload', upload.single('testUpload'), (req, res, next) => {
try {
CSVtoPSQL(req.params.program, req.file.filename)
return res.status(201).json({
message: 'File Uploaded Just fine :)'
});
} catch (error) {
return res.status(500).json({
message: error
})
}
});
const CSVtoPSQL = (program, filePath) => {
let stream = fs.createReadStream(path.resolve(__dirname, '../files', filePath));
let csvData = [];
let csvStream = csv
.parse({ headers: false })
.on('error', error => console.error(error))
.on('data', (data) => {
csvData.push(data.toString());
})
.on('end', () => {
csvData.forEach(item => {
queries.upload(program, item)
.then(() => {
console.log('QR Code Added: ' + item);
}).catch((err) => {
console.log(`oopsie: ${err}`);
});
})
});
stream.pipe(csvStream);
}
Pretty confident the issue is with my poor understanding of promises.
As expected, I wasn't handling my promises correctly. I've updated the code a bit and now it responds with 2 arrays of successful uploads and errored uploads.
router.post('/:program/upload', upload.single('testUpload'), async (req, res, next) => {
try {
const result = await CSVtoPSQL(req.params.program, req.file.filename)
return res.status(201).json(result);
}
catch (error) {
return res.status(500).json({
message: error,
})
}
});
const CSVtoPSQL = (program, filePath) => {
let stream = fs.createReadStream(path.resolve(__dirname, '../files', filePath));
let csvData = [];
return new Promise((resolve) => {
const results = {
seccess: [],
error: [],
}
let csvStream = csv
.parse({ headers: false })
.on('error', error => console.error(error))
.on('data', (data) => {
csvData.push(data.toString());
})
.on('end', async () => {
await Promise.all(
csvData.map(async (item) => {
try{
await queries.upload(program, item);
results.success.push(item);
console.log('QR Code Added: ' + item);
}
catch (error) {
console.log(`oopsie: ${error}`)
results.error.push(item);
}
})
)
resolve(results);
});
stream.pipe(csvStream);
})
}

Writing Mocha Chai Test cases for NodeJs Controllers

I am new to unit testing. I am trying to write test cases for controller.js files for nodejs microservices files. I am unable to understand where I am going wrong. Always throws an error "TypeError: Cannot read property 'empId' of undefined" for 2 of these properties.
This is the controller code:
const crmgDetails = db.crmgResource_details;
const employeeProposal = db.employee_Proposal;
const Op = db.Sequelize.Op;
const raDetails = db.crmgRaSheet_entity;
let results = [];
Sequelize = require('sequelize')
exports.findOne = (req, res) => {
console.log(req.body.empId);
crmgDetails.findAll({where: {
resEmployeeNumber: req.body.empId
}
})
.then(data => {
res.send(data);
})
.catch(err => {
res.status(500).send({
message:
err.message || "Some error occurred while retrieving tutorials."
});
});
};
exports.findMatchingDemandsForRmg = (req,res) => {
let proposedDemands = [];
employeeProposal.findAll({
where: {
emp_id: req.body.empId,
demandSbu : req.body.sbu
}
}).then(proposedEmployee => {
console.log('proposedEmployee',proposedEmployee);
if(proposedEmployee.length === 0){
crmgDetails.findAll({
where: {
resEmployeeNumber: req.body.empId,
demandSbu: req.body.sbu
}
}).then(matchingDemands => {
console.log('matchingDemands ',matchingDemands)
proposedDemands = matchingDemands;
})
}
else{
console.log("crmg Employee")
console.log(proposedEmployee)
for(let employee of proposedEmployee){
crmgDetails.findOne({
where: {
demandUid: employee.demandUid,
resEmployeeNumber: req.body.empId,
demandSbu: req.body.sbu
}
}).then( crmgProposed=> {
proposedDemands.push(crmgProposed);
})
}
}
setTimeout(() => {
console.log(proposedDemands)
res.send(proposedDemands);
}, 3000);
}).catch((err)=>{
res.status(500).send({
message:
err.message || "Some error occurred while retrieving tutorials."
});
})
}
exports.getResourceAllocationDetails = (req,res) => {
employeeProposal.findAll({
include: {
model: raDetails
},
where: Sequelize.and(
{activeFlag : true},
Sequelize.or({status:"Accepted By RMG"},
{status:"Rejected"}
))
}).then(employees => {
res.send(employees)
})
}
This is the test file I tried to write without my head:
const CrmgRaSheetModel = require('../controllers/crmgResource_Details.controller')
describe('Check for succcessful fetech API call', () => {
it('property getResourceAllocationDetails should be called', async () => {
CrmgRaSheetModel.getResourceAllocationDetails((res) => {
expect(res).to.be.an('object')
return res.json()
})
});
it('property findMatchingDemandsForRmg should be called', async () => {
CrmgRaSheetModel.findMatchingDemandsForRmg((res) => {
expect(res).to.be.an('object')
return res.json()
})
});
it('property findOne should be called', async () => {
CrmgRaSheetModel.findOne((res) => {
expect(res).to.be.an('object')
return res.json()
})
})
})
from test file you are calling controller method with only res, so no chance to send your input as your body.
So pass req,res both and pass your input value in req

Adding req object to Kue job

Been hunting the internet trying to find an answer to why the following doesn't work.
I am trying to pass in the req object when I add the job so that I have access to it when the job is processed.
But the process is never executed when the whole req object is passed to job.data. Yet I can pass parts of the req object.
What I'm trying to do maybe anti-pattern and a big no no. But, I am trying to understand why it won't work. It seems strange that it just continues without any error.
Below is an example, hopefully it is clear.
My kue is abstracted into a separate file, and initialised onto app.locals.Q as follows:
// Q.js
class Q {
constructor(options) {
this.q = kue.createQueue(options)
}
addJob = (name, data) => {
return Queue.create({
queue_job: name,
queue_route: data.route,
queue_user: data.user,
queue_added: new Date(),
})
.then(response => {
this.q.create(name, {
id: response.get('queue_id'),
route: data.route,
request: data.request
})
.save();
return Promise.resolve(response);
})
.catch(error => {
return Promise.reject(error);
});
processJob = (name, work, options = {}) => {
const {concurrency} = options;
this.q.process(name, concurrency || 1, (job, done) => {
const {data: {id, route, request}} = job;
Queue.update({
queue_running: true
}, {
where: {
queue_id: id
}
})
.then(() => {
if (process.env.NODE_ENV !== 'production') {
console.log(`running job ${id} from ${route}`);
}
return new Promise((resolve, reject) => {
return work(resolve, reject, request);
});
})
.then(results => {
return Queue.update({
queue_running: false,
queue_completed: new Date(),
queue_results_path: results || null
}, {
where: {
queue_id: job.data.id
}
});
})
.then(() => {
if (process.env.NODE_ENV !== 'production') {
console.log(`completed job ${id} from ${route}`);
}
done();
})
.catch((error) => {
if (process.env.NODE_ENV !== 'production') {
console.log(`failed job ${id} from ${route}`);
console.log(error);
}
Queue.update({
queue_running: false,
queue_error: `${error}`
}, {
where: {
queue_id: id
}
})
.then(() => {
done(error);
})
.catch(err => {
console.error(err);
done(err);
});
});
});
};
};
// example route
queue = (req, res) => {
const {locals: {Q}} = req.app;
Q.addJob('foo', {
route: req.path,
user: req.user.get('username'),
request: req
})
.then(queue_id => {
Q.processJob('foo', (resolve, reject, request) => {
console.log(request)
resolve('complete')
})
res.json({sucess: true})
})
}
redis can't serialize the req object.
kue simply silently fails.

Sample code to query MongoDB in node.js (lambda) code

I'm new to node.js and trying to create a lambda function that queries a collection from MongoDB.
Here is a code I found as a starting point:
'use strict';
const MongoClient = require('mongodb').MongoClient;
const ATLAS_URI = "mongodb://lambdaUser:PASSWORD#cluster0-shard-00-00-ddlwo.mongodb.net:27017,cluster0-shard-00-01-ddlwo.mongodb.net:27017,cluster0-shard-00-02-ddlwo.mongodb.net:27017/mydb?ssl=true&replicaSet=Cluster0-shard-0&authSource=admin";
let cachedDb = null;
function connectToDatabase(uri) {
console.log('=> connect to database');
if (cachedDb) {
console.log('=> using cached database instance');
return Promise.resolve(cachedDb);
}
return MongoClient.connect(uri)
.then(db => { cachedDb = db; return cachedDb; });
}
function queryDatabase(db) {
console.log('=> query database');
return db.collection('sensordata').find({}).toArray()
.then(() => { return { statusCode: 200, body: 'success' }; })
.catch(err => { return { statusCode: 500, body: 'error' }; });
}
exports.handler = (event, context, callback) => {
connectToDatabase(ATLAS_URI)
.then(db => queryDatabase(db))
.then(result => {
console.log('=> returning result: ', result);
context.succeed(result);
})
.catch(err => {
console.log('=> an error occurred: ', err);
context.failed(err);
});
};
This code works fine, but I don't know how to recover the data from the query...
Looking other code I see there is a function(err,data) inside the find(), but in this case I don't know how to insert that or modify the code to return the data instead of the {statuscode: 200, body: 'success'} json object.
I would appreciate any help.
Thanks
Gus

Resources