Nodejs - Wrong usage of promises - node.js

I have the following async method:
alreadyLoaded: async function (id) {
const pool = await poolPromise;
return pool.request()
.input('idParameter', id)
.query('SELECT count(*) AS value FROM dbo.partidos WHERE id=#idParameter')
.then(result => {
console.log(result.recordset[0].value)
result.recordset[0].value > 0
}).catch(function(err) {
console.log(err.message);
});
Invoked in another one:
processMatches: function(payload) {
payload.matches.forEach(p => {
if(partidosRepository.alreadyLoaded(p.id))
{
console.log("El partido ya fue cargado.");
return;
}
The alreadyLoaded method checks if some record is already inserted on database, and it's invoked inside another method to validate. The problem here is that the processMatches method continues processing records before the alreadyLoaded finishs with the current one.
I'm having a wrong manage of promises here, Can anyone help me solve this out?
Here's the database connection:
const poolPromise = new sql.ConnectionPool(config)
.connect()
.then(pool => {
console.log('Connected to localhost '+ config.database +' database');
return pool;
})
.catch(err => console.log('Database connection failed. Error: ', err));
module.exports = {
sql, poolPromise
}

Why don't you build your if statement like this
if(await partidosRepository.alreadyLoaded(p.id))
also keep in mind you are not returning boolean value here
}).catch(function(err) {
console.log(err.message);
});

Related

Passing input parameters to node mssql query function

I'm using a Node.js server to make requests to an Azure sql database.
As far as I understand the following function does not prevent sql injection:
Current code: (working but unsafe)
var executeQuery = async function(query, response) {
const pool = new sql.ConnectionPool(dbConfig)
pool.on('error', err => {
console.log('sql errors', err);
});
try {
await pool.connect();
let result = await pool.request().query(query);
response.send(result.recordset);
return {success: result}
} catch (err) {
return {err: err};
} finally {
console.log('request complete')
pool.close(); // closing connection after request is finished
}
};
app.get("/api/workOrders/byId/:workOrderId", function(req, res) {
console.log(req.params);
var query = "SELECT * FROM [WorkOrder] WHERE [idWorkOrder]=" + req.params.workOrderId;
executeQuery(query, res);
});
I would like to have the executeQuery function standalone, but I did not find an answer for that yet. Anyway, this is the code I constructed from mssql documentation:
New Code (not working)
app.get("/api/test/:workOrderId", function(req, res) {
console.log(req.params.workOrderId);
(async function() {
const pool = new sql.ConnectionPool(dbConfig)
pool.on('error', err => {
console.log('sql errors', err);
});
try {
await pool.connect();
let result = await pool.request()
.input('input_parameter', sql.VarChar(50), req.params.workOrderId)
.query('SELECT * FROM [Quotation] WHERE [idWorkOrder]= #input_parameter');
console.log(result);
res.send(result.recordset);
return {success: result}
} catch (err) {
return {err: err};
} finally {
console.log('request complete')
pool.close(); // closing connection after request is finished
}
});
})
This version should be injection proof, but It does not return anything. Is there an option to pass the input values to the executeQuery function as in the current code?
You can pass the value of req.params.workOrderId into your async function and then use that value inside. check the following code.
app.get("/api/test/:workOrderId", function(req, res) {
console.log(req.params.workOrderId);
(async function(workOrderId) {
const pool = new sql.ConnectionPool(dbConfig)
pool.on('error', err => {
console.log('sql errors', err);
});
try {
await pool.connect();
let result = await pool.request()
.input('input_parameter', sql.VarChar(50), workOrderId)
.query('SELECT * FROM [Quotation] WHERE [idWorkOrder]= #input_parameter');
console.log(result);
res.send(result.recordset);
return {success: result}
} catch (err) {
return {err: err};
} finally {
console.log('request complete')
pool.close(); // closing connection after request is finished
}
})(req.params.workOrderId); // <===pass value to the function
})

NodeJS in AWS Lambda to Mongo Atlas - connection fails with no logs

I am JS newbie so this may be some silly trouble. I have a lambda written in NodeJS 10.x and I am trying to add MongoDB Atlas insertion. I have started with this tutorial: https://docs.atlas.mongodb.com/best-practices-connecting-to-aws-lambda/
This is my code:
const MongoClient = require('mongodb').MongoClient;
let cachedDb = null;
function connectToDatabase (uri) {
console.log('Connect to mongo database');
if (cachedDb) {
console.log('Using cached database instance');
return Promise.resolve(cachedDb);
}
return MongoClient.connect(uri)
.then(db => {
console.log('Successful connect');
cachedDb = db;
return cachedDb;
}).catch(err => {
console.log('Connection error occurred: ', err);
callback(err);
});
}
function insertUser(db, email) {
console.log('=> modify database');
return db.collection('users').insertOne({"email" : email})
.then(() => { callback(null, result); })
.catch(err => {
console.log('Insert error occurred: ', err);
callback(err);
});
}
exports.handler = (payload, context, callback) => {
const { email, password } = JSON.parse(payload.body);
context.callbackWaitsForEmptyEventLoop = false;
connectToDatabase(MONGODB_URI)
.then(db => {
console.log('Mongo connected')
insertUser(db, email);
})
.then(result => {
console.log('Mongo insert succeeded', result);
})
.catch(err => {
console.log('Mongo insert failed', err);
return responses.INTERNAL_SERVER_ERROR_500(err, callback, response);
});
console.log('finished mongo stuff');
I can see the following logs in CloudWatch:
START RequestId: 0338d336-7d33-40d5-abc7-1511f1c9ea4c Version: $LATEST
2020-01-11T12:18:00.808Z 0338d336-7d33-40d5-abc7-1511f1c9ea4c INFO Connect to mongo database
2020-01-11T12:18:00.855Z 0338d336-7d33-40d5-abc7-1511f1c9ea4c INFO finished mongo stuff
2020-01-11T12:18:01.416Z 0338d336-7d33-40d5-abc7-1511f1c9ea4c ERROR (node:8) DeprecationWarning: current Server Discovery and Monitoring engine is deprecated, and will be removed in a future version. To use the new Server Discover and Monitoring engine, pass option { useUnifiedTopology: true } to the MongoClient constructor.
END RequestId: 0338d336-7d33-40d5-abc7-1511f1c9ea4c
The item is not inserted in Atlas. I have added more verbose logging but it is not shown. If the mongo connect failed there shall be an error. But it seems that the error handlers are ignored. Where is the problem?
You are very close. Few things are missing
callback function needs to be called from your handler function as the insert function doesn't know about callback.
When you do a .then on a promise and you use curly braces you need to return whatever is in there to send it to the next then (unless you do it in a single line).
db in insert method needs to be db.db()
I would recommend to to use async/await instead of callback style. Here is the equivalent code
exports.handler = async (payload) => {
try {
const { email, password } = JSON.parse(payload.body);
const db = await connectToDatabase(MONGODB_URI);
console.log("Mongo connected");
const result = await insertUser(db, email);
console.log("Mongo insert succeeded", result);
return result;
} catch(err) {
console.error(err);
}
};

Mongo makes new connection on each page refresh

When I load my page I want some products to be shown, so I make a GET request and it retrieves them from the database. However, when I refresh the page I notice the old connection remains. How to make sure the old connections close?
Here's my code:
const MongoClient = require('mongodb').MongoClient;
const connection = (closure) => {
return MongoClient.connect(config.connectionString, (err, client) => {
if (err) {
return winston.log('error', now() + err);
}
closure(client);
});
};
...
router.get('/products', (req, res) => {
connection((client) => {
client.db('dbname').collection('collectionname')
.find({})
.toArray()
.then((products) => {
response.data = products;
response.message = "Products retrieved successfully!"
res.json(response);
})
.catch((err) => {
winston.log('error', now() + err);
sendError(err, res);
});
});
});
Well, each time your /products route is called, you do create a new MongoClient instance. In that extent to limit the number of connection to your Database, you may either connect once, and save your MongoClient instance:
let client = undefined;
const connection = (closure) => {
// Return the client if any...
if(client) return closure(client);
return MongoClient.connect(config.connectionString, (err, c) => {
if (err) {
return winston.log('error', now() + err);
}
// Save the client.
client = c;
closure(client);
});
};
...or simply close the MongoClient connection you instantiated once you're done with it:
router.get('/products', (req, res) => {
connection((client) => {
client.db('dbname').collection('collectionname')
.find({})
.toArray()
.then((products) => {
response.data = products;
response.message = "Products retrieved successfully!"
// Close the MongoClient...
client.close();
res.json(response);
})
.catch((err) => {
winston.log('error', now() + err);
sendError(err, res);
// Close the MongoClient...
client.close();
});
});
});
I would advise you to go with the first solution: The MongoClient maintains a connection pool, so having multiple clients does not have any advantages. In addition, it allows you to check whether or not the DB is remotely available, before executing anything else as well (just connect to the DB on your app init(), and save the client instance, and you'll be done).

Sample code to query MongoDB in node.js (lambda) code

I'm new to node.js and trying to create a lambda function that queries a collection from MongoDB.
Here is a code I found as a starting point:
'use strict';
const MongoClient = require('mongodb').MongoClient;
const ATLAS_URI = "mongodb://lambdaUser:PASSWORD#cluster0-shard-00-00-ddlwo.mongodb.net:27017,cluster0-shard-00-01-ddlwo.mongodb.net:27017,cluster0-shard-00-02-ddlwo.mongodb.net:27017/mydb?ssl=true&replicaSet=Cluster0-shard-0&authSource=admin";
let cachedDb = null;
function connectToDatabase(uri) {
console.log('=> connect to database');
if (cachedDb) {
console.log('=> using cached database instance');
return Promise.resolve(cachedDb);
}
return MongoClient.connect(uri)
.then(db => { cachedDb = db; return cachedDb; });
}
function queryDatabase(db) {
console.log('=> query database');
return db.collection('sensordata').find({}).toArray()
.then(() => { return { statusCode: 200, body: 'success' }; })
.catch(err => { return { statusCode: 500, body: 'error' }; });
}
exports.handler = (event, context, callback) => {
connectToDatabase(ATLAS_URI)
.then(db => queryDatabase(db))
.then(result => {
console.log('=> returning result: ', result);
context.succeed(result);
})
.catch(err => {
console.log('=> an error occurred: ', err);
context.failed(err);
});
};
This code works fine, but I don't know how to recover the data from the query...
Looking other code I see there is a function(err,data) inside the find(), but in this case I don't know how to insert that or modify the code to return the data instead of the {statuscode: 200, body: 'success'} json object.
I would appreciate any help.
Thanks
Gus

Node js stop long process task when Promise reject it

I create a promise function to processing a long-time query task. Some time the task will block for hours. I want set a time out to stop the task. Below is the code.
It can return error message correctly, but it still running connection.execute() for long time before stop. So how can stop it immediately when it return reject message?
Thanks!
function executeQuery(connection, query) {
return new Promise((resolve, reject) => {
"use strict";
//long time query
connection.execute(query, function (err, results) {
if (err) reject('Error when fetch data');
else resolve(results);
clearTimeout(t);
});
let t = setTimeout(function () {
reject('Time Out');
}, 10);
})
(async () => {
"use strict";
oracle.outFormat = oracle.OBJECT;
try {
let query = fs.readFileSync("query.sql").toString();
let results = await executeQuery(connection, query);
console.log(results.rows);
} catch (e) {
console.log(`error:${e}`);
}
So how can stop it immediately when it return reject message?
According to the docs, you can use connection.break:
return new Promise((resolve, reject) => {
connection.execute(query, (err, results) => {
if (err) reject(err);
else resolve(results);
clearTimeout(t);
});
const t = setTimeout(() => {
connection.break(reject); // is supposed to call the execute callback with an error
}, 10);
})
Make sure to also release the connection in a finally block.
Try this (using bluebird promises):
var execute = Promise.promisify(connection.execute);
function executeQuery(connection, query) {
return execute.call(connection, query)
.timeout(10000)
.then(function (results) {
// handle results here
})
.catch(Promise.TimeoutError, function (err) {
// handle timeout error here
});
.catch(function (err) {
// handle other errors here
});
};
If this still blocks, there's a possibility that the database driver you are using is actually synchronous rather than asynchronous. In that case, that driver would be incompatible with the node event loop and you may want to look into another one.
As Bergi mentioned, you'll need to use the connection.break method.
Given the following function:
create or replace function wait_for_seconds(
p_seconds in number
)
return number
is
begin
dbms_lock.sleep(p_seconds);
return 1;
end;
Here's an example of its use:
const oracledb = require('oracledb');
const config = require('./dbConfig.js');
let conn;
let err;
let timeout;
oracledb.getConnection(config)
.then((c) => {
conn = c;
timeout = setTimeout(() => {
console.log('Timeout expired, invoking break');
conn.break((err) => {
console.log('Break finished', err);
});
}, 5000);
return conn.execute(
`select wait_for_seconds(10)
from dual`,
[],
{
outFormat: oracledb.OBJECT
}
);
})
.then(result => {
console.log(result.rows);
clearTimeout(timeout);
})
.catch(err => {
console.log('Error in processing', err);
if (/^Error: ORA-01013/.test(err)) {
console.log('The error was related to the timeout');
}
})
.then(() => {
if (conn) { // conn assignment worked, need to close
return conn.close();
}
})
.catch(err => {
console.log('Error during close', err)
});
Keep in mind that the setTimeout call is just before the execute (because of the return statement). That timeout will start counting down immediately. However, the execute call isn't guaranteed to start immediately as it uses a thread from the thread pool and it may have to wait till one is available. Just something to keep in mind...

Resources