I'm using a Node.js server to make requests to an Azure sql database.
As far as I understand the following function does not prevent sql injection:
Current code: (working but unsafe)
var executeQuery = async function(query, response) {
const pool = new sql.ConnectionPool(dbConfig)
pool.on('error', err => {
console.log('sql errors', err);
});
try {
await pool.connect();
let result = await pool.request().query(query);
response.send(result.recordset);
return {success: result}
} catch (err) {
return {err: err};
} finally {
console.log('request complete')
pool.close(); // closing connection after request is finished
}
};
app.get("/api/workOrders/byId/:workOrderId", function(req, res) {
console.log(req.params);
var query = "SELECT * FROM [WorkOrder] WHERE [idWorkOrder]=" + req.params.workOrderId;
executeQuery(query, res);
});
I would like to have the executeQuery function standalone, but I did not find an answer for that yet. Anyway, this is the code I constructed from mssql documentation:
New Code (not working)
app.get("/api/test/:workOrderId", function(req, res) {
console.log(req.params.workOrderId);
(async function() {
const pool = new sql.ConnectionPool(dbConfig)
pool.on('error', err => {
console.log('sql errors', err);
});
try {
await pool.connect();
let result = await pool.request()
.input('input_parameter', sql.VarChar(50), req.params.workOrderId)
.query('SELECT * FROM [Quotation] WHERE [idWorkOrder]= #input_parameter');
console.log(result);
res.send(result.recordset);
return {success: result}
} catch (err) {
return {err: err};
} finally {
console.log('request complete')
pool.close(); // closing connection after request is finished
}
});
})
This version should be injection proof, but It does not return anything. Is there an option to pass the input values to the executeQuery function as in the current code?
You can pass the value of req.params.workOrderId into your async function and then use that value inside. check the following code.
app.get("/api/test/:workOrderId", function(req, res) {
console.log(req.params.workOrderId);
(async function(workOrderId) {
const pool = new sql.ConnectionPool(dbConfig)
pool.on('error', err => {
console.log('sql errors', err);
});
try {
await pool.connect();
let result = await pool.request()
.input('input_parameter', sql.VarChar(50), workOrderId)
.query('SELECT * FROM [Quotation] WHERE [idWorkOrder]= #input_parameter');
console.log(result);
res.send(result.recordset);
return {success: result}
} catch (err) {
return {err: err};
} finally {
console.log('request complete')
pool.close(); // closing connection after request is finished
}
})(req.params.workOrderId); // <===pass value to the function
})
Related
I have the following API, the API is inserting into a table based on user selection from the client. User can select different material belonging to same experiment. In my payload, I have materials as array, experiment as string. I tried several ways to resolve my error. Following was the last try:
app.post("/insertMaterials", (req, res) => {
for (let mat of req.body["material"]) {
try {
oracledb.getConnection(
{
user: "some_user",
password: "some_pw",
connectString: "someConnStr",
},
function (err, connection) {
if (err) {
console.error("1" + err);
return;
}
connection.execute(
"INSERT INTO MATERIALS (ID, MAT_NAME, EXPR) VALUES((SELECT max(ID) + 1 FROM MATERIALS), :1, :2)",
[mat, req.body["experiment"]],
(err, result) => {
if (err) {
console.error("log " + err);
}
connection.commit();
connection.close();
}
);
}
);
} catch (error) {
console.log(error);
}
}
return res.status(200).json({
title: "SUCCESS: Materials Inserted",
});
});
I always get:
triggerUncaughtException(err, true / fromPromise /);
^
[Error: DPI-1002: invalid dpiConn handle] { errorNum: 0, offset: 0 }
Before I had a separate function of the block inside the for loop and I also tried with execeuteMany. Still same error. After trying lot other ways and reading in internet, I couldn't solve the issue. Except for finally catching uncaughtException and logging the error:
process.on('uncaughtException', (error, next) => {
let date = new Date()
errorLogStream.write(`Date: ${date}. Err: ${error.stack} \n`)
return
})
By catching this exception, my program does not break anymore and data is always inserted. But it would be great to know how and when this is raised and how this can be resolved or where if I am doing a mistake.
UPDATE
Payload example: {'material': ['F99999.7', 'J84845.4'], 'experiment': 'NA32R'}
Function:
async function addMatToExpr(exp, mat) {
let connection;
try {
connection = await oracledb.getConnection(
{
user: "some_user",
password: "some_pw",
connectString: "someConnStr",
});
result = await connection.execute("INSERT INTO MATERIALS (ID,
MAT_NAME, EXPR) VALUES((SELECT max(ID) + 1 FROM MATERIALS), :1, :2)",
[exp, mat], { autoCommit: true })
} catch (error) {
return res.status(404).json({
title: error,
});
} finally {
if (connection) {
try {
await connection.close()
} catch(error) {
console.log(error)
}
}
}
}
API:
app.post("/insertMaterials", (req, res) => {
for (let mat of req.body["materials"]) {
addMatToExpr(req.body["experiment"], mat)
}
});
Added the async/await function and the api that calls the function.
You need to 'await' the Oracle function calls so each completes before continuing. Currently the connection is being closed before the statement is executed.
See all the node-oracledb documentation and examples.
E.g.
async function run() {
let connection;
try {
connection = await oracledb.getConnection(dbConfig);
result = await connection.execute(sql, binds, options);
console.dir(result, { depth: null });
} catch (err) {
console.error(err);
} finally {
if (connection) {
try {
await connection.close();
} catch (err) {
console.error(err);
}
}
}
}
I have written the following code in Nodejs which is saving data in MongoDB:
function insertDoc(db,data){
return new Promise(resolve => {
callback=db.collection('AnalysisCollection').insertOne(data).then(function(response,obj){
console.log("Inserted record");
resolve(obj);
//console.log(obj);
// response.on('end',function(){
// resolve(obj);
// });
//return resolve(obj);
}).then(() => { return obj }
).catch(function(error){
throw new Error(error);
});
})
}
I am calling the above function from the main function like this:
async function cosmosDBConnect(nluResultJSON){
try{
//console.log("Inserting to cosmos DB");
console.log(nluResultJSON);
var url = config.cosmos_endpoint;
var result="";
var data = JSON.parse(JSON.stringify(nluResultJSON));
MongoClient.connect(url, function(err, client) {
assert.equal(null, err);
var db = client.db('NLUAnalysisDB');
// insertDoc(db, data, function() {
result=insertDoc(db, data, function() {
console.log(result);
client.close();
//return data._id;
});
});
}
catch (e) {
console.log(e);
}
}
module.exports = { cosmosDBConnect };
But in cosmosDBConnect, I am getting 'undefined' for the result, though in insertDoc I am getting the output for'obj' with _id for the inserted record.
Please help me to return this _id to cosmosDBConnect.
You are use callbacks inside of async function, which creates internal scopes. So your return aplies to them instead of whole function. You should use Promise-based methods inside of async function using await (without callbacks) or wrap whole function into own Promise otherwise.
Example:
function cosmosDBConnect(nluResultJSON) {
return new Promise((resolve, reject) => {
var url = config.cosmos_endpoint;
var result = '';
var data = JSON.parse(JSON.stringify(nluResultJSON));
MongoClient.connect(url, function(err, client) {
if (err) return reject(err);
assert.equal(null, err);
var db = client.db('NLUAnalysisDB');
insertDoc(db, data).then(obj => {
console.log(obj);
client.close();
return resolve(data._id);
});
});
});
}
Also you need to understand that your insertDoc return Promise and do not accept callback you tried to pass.
Ref: async function
result = insertDoc(db, data).then((data) => {
console.log(data);
}).catch(err => console.error(err));
I'm fairly new to nodejs and have stumbled into a problem with my code.
The documentation for SQL Server and a guide I found on Youtube both handle their code this way, but after starting to use bycrypt I've noticed my function ends after the request is complete although I'm using .then().
Anyways, here's my code so far:
router.post('/login', (req, res) => {
getLoginDetails(req.body.username, req.body.password).then(result => {
console.log(result);
res.json(result);
})
});
async function getLoginDetails(username, password) {
await pool1Connect;
try {
const request = pool1.request();
request.input('username', sql.NVarChar, username);
request.query('SELECT * FROM users WHERE username = #username', (err, result) => {
if (err) {
return ({err: err})
}
if (result.recordset.length > 0) {
bcrypt.compare(password, result.recordset[0].user_password, (err, response) => {
if (response) {
console.log(result.recordset);
return(result.recordset);
} else {
return({message: "Wrong password or username!"})
}
})
return(result)
} else {
return({message: "user not found!"})
}
})
} catch (err) {
return err;
}
}
I tried logging both the request and the return value from the function getLoginDetails and the request log came faster, so I assume it's not waiting for the program to actually finish and I can't figure out why...
Sorry if that's obvious, but I'd love to get some help here!
EDIT:
router.post('/login', async (req, res) => {
// res.send(getLoginDetails(req.body.username, req.body.password))
await pool1Connect
try {
const request = pool1.request();
request.input('username', sql.NVarChar, req.body.username);
request.query('SELECT * FROM users WHERE username = #username', (err, result) => {
console.log(result);
bcrypt.compare(req.body.password, result.recordset[0].user_password, (err, response) => {
if (response) {
res.send(result);
} else {
res.send('wrong password')
}
})
//res.send(result)
})
} catch (err) {
res.send(err);
}
});
This code works, but when I tried to encapsulate it in a function it still didn't work.
#Anatoly mentioned .query not finishing in time which makes sense, but I thought mssql .query is an async function?
Your problem arises from an wrong assumption that callbacks and promises are alike, but on the contrary callbacks don't "respect" promise/async constructs
When the program hits the bottom of getLoginDetails the progrm execution has already split into 2 branches one branch returned you the (empty) result whereas the other one still busy with crypto operations.
Though it is true that an async function always returns a promise but that doesn't cover any future callbacks that might execute inside it. As soon as node reaches the end of function or any return statement the async function's promise get resolved(therefore future callbacks are meaningless), what you can do instead is handroll your own promise which encampasses the callbacks as well
router.post('/login', (req, res) => {
getLoginDetails(req.body.username, req.body.password))
.then((result)=>{
res.send(result);
})
.catch((err)=>{
res.send(err);
})
});
async function getLoginDetails(username, password) {
await pool1Connect
return new Promise( (resolve,reject) => {
try {
const request = pool1.request();
request.input('username', sql.NVarChar, username);
request.query('SELECT * FROM users WHERE username = #username', (err, result) => {
console.log(result);
bcrypt.compare(password, result.recordset[0].user_password, (err, response) => {
if (response) {
resolve(result);
} else {
resolve('wrong password')
}
})
})
} catch (err) {
reject(err);
}
});
}
You didn't return any result to getLoginDetails. Either you use async versions of request.query and bcrypt.compare (if any) or wrap request.query to new Promise((resolve, reject) like this:
const asyncResult = new Promise((resolve, reject) => {
request.query('SELECT ...
...
if (err) {
resolve({err: err}) // replace all return statements with resolve calls
}
...
})
const queryResult = await asyncResult;
The async function below is supposed to check if a url is a legit url
let CheckUrl = function (url, done) {
dns.lookup(url, function(err, address) {
if (err) return done(err);
done(null, true); //return true because I don't care what the address is, only that it works
});
}
The express.js code below gets the url but I'm having trouble understanding how to write the if statement so that it returns true or false.
// Gets URL
app.post("/api/shorturl/new", function(req, res) {
if (CheckUrl(req.body.url)) {
// do something
}
});
I'm not sure what to pass as the second argument in CheckUrl() in this if statement. Or maybe I wrote the first async function incorrectly to begin with?
Please use the async await
I have written a test code for you as below:
const express = require('express');
const app = express();
const dns = require('dns');
let CheckUrl = function (url, done) {
return new Promise((resolve, reject) => {
dns.lookup(url, function(err, address) {
console.log("err " , err)
if (err) {
resolve(false)
} else {
resolve(true)
}
});
});
}
app.post("/api/shorturl/new", async function(req, res) {
try {
let result = await CheckUrl(req.body.url);
console.log("result " , result)
res.send(result)
}
catch (error) {
console.log("in catch error " , error)
res.send(error)
}
});
app.listen(3000)
you can get the knowledge to know about the Promise here. The Promise object represents the eventual completion (or failure) of an asynchronous operation and its resulting value.
As mentioned by DeepKakkar, this was what I was looking for:
app.post("/api/shorturl/new", async (req, res) => {
try {
let result = await CheckUrl(req.body.url);
res.send(result)
}
catch (error) {
return new Error('Could not receive post');
}
});
I am learning node.js and database. I am trying to stream heavy data about 7,700,000 rows and 96 columns from oracle to client. Later i use that data for virtual table. But in client it is showing only one row and then in node command error is displaying "Cannot set headers after they are sent to the client". How to stream data in client. Please help
var oracledb = require('oracledb');
const cors = require('cors');
var express = require('express');
var app = express();
app.use(cors());
oracledb.outFormat = oracledb.ARRAY;
oracledb.getConnection({
user: 'user',
password: 'password',
connectString: 'some string'
},
(err, connection) => {
if (err) {
console.error(err.message);
return;
}
var rowsProcessed = 0;
var startTime = Date.now();
var dataSize = 0;
var stream = connection.queryStream(
'SELECT * FROM table',
);
// stream.on('data', function (data) {
// rowsProcessed++;
// // console.log(JSON.stringify(data));
// // console.log(data);
// dataSize = dataSize + data.length;
// // oracleData.push(data);
// // console.log("pushing");
// // console.log(oracleData);
// // app.get('/data', (req, res) => {
// // res.send(data);
// // })
// // console.log(data);
// });
app.get('/data', (req, res) => {
stream.on('data', (data) => {
rowsProcessed++;
dataSize = dataSize + data.length;
res.send(JSON.stringify(data));
})
})
stream.on('end', function () {
var t = ((Date.now() - startTime) / 1000);
console.log('queryStream(): rows: ' + rowsProcessed +
', seconds: ' + t);
// console.log(dataSize + ' bytes');
connection.close(
function (err) {
if (err) {
console.error(err.message);
} else {
console.log("connection closed")
}
}
)
})
}
);
app.listen(5000, () => {
console.log('Listening at 5000')
})
I tried using above approach. But it is failing. How can I achieve the output?
The browser is freezing if I output entire data at single time that's why I am trying to use streaming and in the node command prompt it is displaying out of memory if I load entire data at single time.
Thank you.
The first thing you'll want to do is organize your app a little better. Separation of concerns is important, you should have a connection pool, etc. Have a look at this series for some ideas: https://jsao.io/2018/03/creating-a-rest-api-with-node-js-and-oracle-database/
Once you get the organization figured out, incorporate this example of streaming a large result set out.
const oracledb = require('oracledb');
async function get(req, res, next) {
try {
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
res.write('[');
stream.on('data', (row) => {
res.write(JSON.stringify(row));
res.write(',');
});
stream.on('end', () => {
res.end(']');
});
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
If you find you're doing this a lot, simplify things by creating a reusable transform stream:
const oracledb = require('oracledb');
const { Transform } = require('stream');
class ToJSONArray extends Transform {
constructor() {
super({objectMode: true});
this.push('[');
}
_transform (row, encoding, callback) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
this.push(',');
}
this._prevRow = row;
callback(null);
}
_flush (done) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
}
this.push(']');
delete this._prevRow;
done();
}
}
async function get(req, res, next) {
try {
const toJSONArray = new ToJSONArray();
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
stream.pipe(toJSONArray).pipe(res);
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;