Streaming data from oracle db to browser with node.js - node.js

I am learning node.js and database. I am trying to stream heavy data about 7,700,000 rows and 96 columns from oracle to client. Later i use that data for virtual table. But in client it is showing only one row and then in node command error is displaying "Cannot set headers after they are sent to the client". How to stream data in client. Please help
var oracledb = require('oracledb');
const cors = require('cors');
var express = require('express');
var app = express();
app.use(cors());
oracledb.outFormat = oracledb.ARRAY;
oracledb.getConnection({
user: 'user',
password: 'password',
connectString: 'some string'
},
(err, connection) => {
if (err) {
console.error(err.message);
return;
}
var rowsProcessed = 0;
var startTime = Date.now();
var dataSize = 0;
var stream = connection.queryStream(
'SELECT * FROM table',
);
// stream.on('data', function (data) {
// rowsProcessed++;
// // console.log(JSON.stringify(data));
// // console.log(data);
// dataSize = dataSize + data.length;
// // oracleData.push(data);
// // console.log("pushing");
// // console.log(oracleData);
// // app.get('/data', (req, res) => {
// // res.send(data);
// // })
// // console.log(data);
// });
app.get('/data', (req, res) => {
stream.on('data', (data) => {
rowsProcessed++;
dataSize = dataSize + data.length;
res.send(JSON.stringify(data));
})
})
stream.on('end', function () {
var t = ((Date.now() - startTime) / 1000);
console.log('queryStream(): rows: ' + rowsProcessed +
', seconds: ' + t);
// console.log(dataSize + ' bytes');
connection.close(
function (err) {
if (err) {
console.error(err.message);
} else {
console.log("connection closed")
}
}
)
})
}
);
app.listen(5000, () => {
console.log('Listening at 5000')
})
I tried using above approach. But it is failing. How can I achieve the output?
The browser is freezing if I output entire data at single time that's why I am trying to use streaming and in the node command prompt it is displaying out of memory if I load entire data at single time.
Thank you.

The first thing you'll want to do is organize your app a little better. Separation of concerns is important, you should have a connection pool, etc. Have a look at this series for some ideas: https://jsao.io/2018/03/creating-a-rest-api-with-node-js-and-oracle-database/
Once you get the organization figured out, incorporate this example of streaming a large result set out.
const oracledb = require('oracledb');
async function get(req, res, next) {
try {
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
res.write('[');
stream.on('data', (row) => {
res.write(JSON.stringify(row));
res.write(',');
});
stream.on('end', () => {
res.end(']');
});
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
If you find you're doing this a lot, simplify things by creating a reusable transform stream:
const oracledb = require('oracledb');
const { Transform } = require('stream');
class ToJSONArray extends Transform {
constructor() {
super({objectMode: true});
this.push('[');
}
_transform (row, encoding, callback) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
this.push(',');
}
this._prevRow = row;
callback(null);
}
_flush (done) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
}
this.push(']');
delete this._prevRow;
done();
}
}
async function get(req, res, next) {
try {
const toJSONArray = new ToJSONArray();
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
stream.pipe(toJSONArray).pipe(res);
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;

Related

How can I get list record from SQL Server in NodeJS

I start to develop a simple web application with NodeJS. and when I try to get a list record from SQL Server to show on the list page but somehow it's not working.
Here is the code :
const express = require("express");
const bodyParser = require("body-parser");
const sql = require("mssql");
const DBUtils = require("./DBUtils");
const app = express();
app.get("/all", (req, res, next) => {
let mypromise = new Promise((reso, rej) => {
let nameList = DBUtils.getNameList(sql);
if (nameList !== null || typeof nameList !== "undefined") {
reso(nameList);
} else {
rej("Error");
}
})
.then((result) => {
res.send(result);
})
.catch((err) => {
console.log(err);
});
});
app.get("/", (req, res, next) => {
console.log("the / route");
res.send("<h1>Hello to NodeJS</h1>");
});
app.listen(5003);
My DBUtils
const config = {
user: "sa",
password: "123",
server: "DESKTOP-7KGJI7L", // You can use 'localhost\\instance' to connect to named instance
database: "java",
options: {
encrypt: false,
},
};
const getNameList = (sql) => {
let nameList = "";
let errorString = "";
// Create connection
sql.connect(config, function (err) {
// Err
if (err) {
console.log(err);
}
// Create Request object
let sqlRequest = new sql.Request();
// QueryString
let queryString = `select * from NAME`;
// Run the query
sqlRequest.query(queryString, (err, data) => {
if (err) console.log(err);
//console.log(data); //data.recordset(array)[index].name
data.recordset.forEach((el) => {
nameList += `<li>${el.name}</li>`;
});
return nameList;
});
});
};
exports.getNameList = getNameList;
I pretty sure something wrong in Promise line but don't know how to fix it. Any suggest?
I think you are a newbie in Nodejs You made a common mistake. You did not use promise pattern correctly. Also, no need to pass next callback unless required.
Change getNameList as below :
const getNameList = (sql) => {
let nameList = "";
let errorString = "";
// Create connection
return new Promise (function(resolve,reject) {
sql.connect(config, function (err) {
// Err
if (err) {
console.log(err);
reject(err)
}
// Create Request object
let sqlRequest = new sql.Request();
// QueryString
let queryString = `select * from NAME`;
// Run the query
sqlRequest.query(queryString, (err, data) => {
if (err) {console.log(err)
reject(err)
}
//console.log(data); //data.recordset(array)[index].name
data.recordset.forEach((el) => {
nameList += `<li>${el.name}</li>`;
});
resolve(nameList);
});
});
})
};
Change app.get("/all") as below:
app.get("/all", (req, res) => {
DBUtils.getNameList(sql).then(function(list) {
res.status(200).send(list)
}).catch(function(err) { //handle error here
res.status(500)
})
})
Moreover, learn how to use promises and async-await.
Use appropriate body-parser as per requirement ie json, text etc.
Learn how and when to use next

Nodejs Multiple pools created on refresh

I have several DBs for which i am using connection pools in node.js. Every time i refresh page i think pools are created again. i refresh page 3 times and 3 times promises resolved. i have removed several databases just to make it little bit easier to read here.
and if i un-comment connection close line my app crashes. i can't seem to figure out why
const config = require("../config/config");
const oracledb = require("oracledb");
var crm1connPromise = new Promise((resolve, reject) => {
oracledb.createPool({
user: config.crm1.user,
password: config.crm1.password,
connectString: config.crm1.connectString,
poolAlias: config.crm1.poolAlias,
poolMin: 0,
poolMax: 10,
poolTimeout: 300
}, (error, pool) => {
if (error) {
reject(err);
}
resolve("CRM1 Promise resolved")
});
});
var query2connPromise = new Promise((resolve, reject) => {
oracledb.createPool({
user: config.query2.user,
password: config.query2.password,
connectString: config.query2.connectString,
poolAlias: config.query2.poolAlias,
poolMin: 0,
poolMax: 10,
poolTimeout: 300
}, (error, pool) => {
if (error) {
reject(err);
}
resolve("QUERY2 Promise resolved --------")
});
});
var promiseArray = [crm1connPromise, crm2connPromise, crm3connPromise, crm4connPromise, csfp1connPromise, csfp2connPromise, csfp3connPromise, csfp4connPromise, cact1connPromise, cact2connPromise, cact3connPromise, cact4connPromise, cospconnPromise, cchnconnPromise, bbaseconnPromise, bcdrconnPromise, vcdbconnPromise, crptconnPromise, query2connPromise];
function getDBConnection (dbname) {
return new Promise((resolve, reject) => {
try {
Promise.all(promiseArray).then((message) => {
console.log(message);
const pool = oracledb.getPool(dbname);
pool.getConnection( (err, connection) => {
if (err) {
reject(err);
console.log(err);
}
resolve(connection);
});
});
} catch (error) {
reject(error);
}
});
}
module.exports.query = function(dbname, sql, bind = []){
return new Promise ((resolve,reject) =>{
var conn
try {
getDBConnection(dbname).then((connection) =>{
connection.execute(sql,bind,(err,result)=>{
if (err){
reject(err);
}
resolve(result);
})
//connection.close(0);
})
} catch (error) {
reject(error);
}
})
}
you can use 'Singleton'
please google 'Singleton pattern' and examples.
like this:
dataBaseManager.js:
'use strict'
var Singleton = (function () {
var instance;
function createInstance() {
var object = new dataBaseManager();
return object;
}
return {
getInstance: function () {
if (!instance) {
instance = createInstance();
}
return instance;
}
};
})();
function dataBaseManager() {
this.connected = false;
this.client = null;
this.dataBase = null;
//public methods
this.connect = function () {
try {
your_database.connect({}, (err, client) => {
if (err) {
this.connected = false;
this.client = null;
this.dataBase = null;
return;
}
this.connected = true;
this.client = client;
this.dataBase = client.db();
});
} catch (error) {
}
};
this.disconnect = function () {
try {
if (this.client) {
this.client.close();
this.connected = false;
this.client = null;
this.dataBase = null;
}
} catch (error) {
}
}
}
module.exports = Singleton;
repository.js:
const dataBaseManager = require("./dataBaseManager").getInstance();
your_get_dample_data_from_data_base_func = function (data) {
dataBaseManager.dataBase
.find({})
.toArray(function (err, result) {
if (err) {
return callback(err, null);
}
callback(null, result);
});
};
index.js:
const dataBaseManager = require("./dataBaseManager").getInstance();
function connect() {
dataBaseManager.connect();
}
function disconnect() {
dataBaseManager.disconnect();
}
Look at the node-oracledb example webappawait.js which starts the pool outside the web listener code path.
async function init() {
try {
await oracledb.createPool({
user: dbConfig.user,
password: dbConfig.password,
connectString: dbConfig.connectString
});
const server = http.createServer();
server.on('error', (err) => {
console.log('HTTP server problem: ' + err);
});
server.on('request', (request, response) => {
handleRequest(request, response);
});
await server.listen(httpPort);
console.log("Server is running at http://localhost:" + httpPort);
} catch (err) {
console.error("init() error: " + err.message);
}
}
async function handleRequest(request, response) {
. . .
}

Passing input parameters to node mssql query function

I'm using a Node.js server to make requests to an Azure sql database.
As far as I understand the following function does not prevent sql injection:
Current code: (working but unsafe)
var executeQuery = async function(query, response) {
const pool = new sql.ConnectionPool(dbConfig)
pool.on('error', err => {
console.log('sql errors', err);
});
try {
await pool.connect();
let result = await pool.request().query(query);
response.send(result.recordset);
return {success: result}
} catch (err) {
return {err: err};
} finally {
console.log('request complete')
pool.close(); // closing connection after request is finished
}
};
app.get("/api/workOrders/byId/:workOrderId", function(req, res) {
console.log(req.params);
var query = "SELECT * FROM [WorkOrder] WHERE [idWorkOrder]=" + req.params.workOrderId;
executeQuery(query, res);
});
I would like to have the executeQuery function standalone, but I did not find an answer for that yet. Anyway, this is the code I constructed from mssql documentation:
New Code (not working)
app.get("/api/test/:workOrderId", function(req, res) {
console.log(req.params.workOrderId);
(async function() {
const pool = new sql.ConnectionPool(dbConfig)
pool.on('error', err => {
console.log('sql errors', err);
});
try {
await pool.connect();
let result = await pool.request()
.input('input_parameter', sql.VarChar(50), req.params.workOrderId)
.query('SELECT * FROM [Quotation] WHERE [idWorkOrder]= #input_parameter');
console.log(result);
res.send(result.recordset);
return {success: result}
} catch (err) {
return {err: err};
} finally {
console.log('request complete')
pool.close(); // closing connection after request is finished
}
});
})
This version should be injection proof, but It does not return anything. Is there an option to pass the input values to the executeQuery function as in the current code?
You can pass the value of req.params.workOrderId into your async function and then use that value inside. check the following code.
app.get("/api/test/:workOrderId", function(req, res) {
console.log(req.params.workOrderId);
(async function(workOrderId) {
const pool = new sql.ConnectionPool(dbConfig)
pool.on('error', err => {
console.log('sql errors', err);
});
try {
await pool.connect();
let result = await pool.request()
.input('input_parameter', sql.VarChar(50), workOrderId)
.query('SELECT * FROM [Quotation] WHERE [idWorkOrder]= #input_parameter');
console.log(result);
res.send(result.recordset);
return {success: result}
} catch (err) {
return {err: err};
} finally {
console.log('request complete')
pool.close(); // closing connection after request is finished
}
})(req.params.workOrderId); // <===pass value to the function
})

Mongoose remove and create in get route

I have a small issue with mongoose, what I am doing is getting data from online rss feeds, parsing it, and passing it to an array, from which I feed a mongoose model, and all this happens in the get route, what I want to accomplish is delete all the data first from the mongoose model and then populate it with the new data, but it always either deletes the data all together, since the parser iterates a few times, or it doesn't delete anything and the data just keeps adding to the model.
Here's my code
'use strict';
const Promise = require('bluebird');
const request = require('request');
const FeedParser = require('feedparser');
const express = require('express');
const router = express.Router();
const xray = require('x-ray')();
var Post = require('../models/post');
var dataArray = [];
router.get('/', function (req, res) {
const fetch = (url) => {
return new Promise((resolve, reject) => {
if (!url) {
return reject(new Error(`Bad URL (url: ${url}`));
}
const feedparser = new FeedParser();
const items = [];
feedparser.on('error', (e) => {
return reject(e);
}).on('readable', () => {
// This is where the action is!
var item;
console.time('loading')
while (item = feedparser.read()) {
items.push(item);
}
}).on('end', () => {
resolve({
meta: feedparser.meta,
records: items
});
});
request({
method: 'GET',
url: url
}, (e, res, body) => {
if (e) {
return reject(e);
} else if (res.statusCode != 200) {
return reject(new Error(`Bad status code (status: ${res.statusCode}, url: ${url})`));
}
feedparser.end(body);
feedparser.on('end', function () {
console.log('Done');
});
});
});
};
Promise.map([
'url',
'url',
'url',
'url'], (url) => fetch(url), { concurrency: 4 }) // note that concurrency limit
.then((feeds) => {
feeds.forEach(feed => {
feed.records.forEach(record => {
dataArray.push(record);
});
});
}).catch(function (error) {
console.log(error);
});
Post.remove({}, function (err) {
if (err) {
console.log(err);
} else {
console.log('collection removed');
}
});
dataArray.forEach(post => {
Post.create({
title: post.title,
content: post.description,
created: post.date,
image: post['rss:image']['#'],
link: post.link
}, function (err, newPost) {
console.log(newPost.title);
});
});
Post.find({}, function (err, posts) {
if (err) {
console.log(err);
} else {
res.render('index/home', {
posts: posts
});
}
});
});
module.exports = router;
None of this is going to run synchronously. You can do Something like this :
'use strict';
const Promise = require('bluebird');
const request = require('request');
const FeedParser = require('feedparser');
const express = require('express');
const router = express.Router();
const xray = require('x-ray')();
var Post = require('../models/post');
var dataArray = [];
const fetch;
router.get('/', function (req, res) {
Post.remove({}, function (err) {
if (err) {
console.log(err);
} else {
console.log('collection removed. Starting to fetch Posts from Service');
fetch = (url) => {
return new Promise((resolve, reject) => {
if (!url) {
return reject(new Error(`Bad URL (url: ${url}`));
}
const feedparser = new FeedParser();
const items = [];
feedparser.on('error', (e) => {
return reject(e);
}).on('readable', () => {
// This is where the action is!
var item;
console.time('loading')
while (item = feedparser.read()) {
items.push(item);
}
}).on('end', () => {
resolve({
meta: feedparser.meta,
records: items
});
});
request({
method: 'GET',
url: url
}, (e, res, body) => {
if (e) {
return reject(e);
} else if (res.statusCode != 200) {
return reject(new Error(`Bad status code (status: ${res.statusCode}, url: ${url})`));
}
feedparser.end(body);
feedparser.on('end', function () {
console.log('Done');
});
});
});
};
}
});
Promise.map([
'url',
'url',
'url',
'url'], (url) => fetch(url), { concurrency: 4 }) // note that concurrency limit
.then((feeds) => {
feeds.forEach(feed => {
dataArray = dataArray.concat(feed.records);
/*feed.records.forEach(record => {
dataArray.push(record);
});*/
});
console.log('inserting posts in the collection');
dataArray.forEach(post => {
Post.create({
title: post.title,
content: post.description,
created: post.date,
image: post['rss:image']['#'],
link: post.link
}, function (err, newPost) {
console.log(newPost.title);
});
});
console.log("Fetching posts from the collection");
Post.find({}, function (err, posts) {
if (err) {
console.log(err);
} else {
res.render('index/home', {
posts: posts
});
}
});
}).catch(function (error) {
console.log(error);
});
});
module.exports = router;
I haven't tested this. Please test it on your end. Let me know if there's an error or something.

async.eachSeries runs only once with async.waterfall inside for each iteration

I am new to async library. I have used async.eachSeries and async.waterfall for each iteration. I see, the async.waterfall runs only once.
Here is my code :
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});
Output:
124
JSON saved to debuginfo.json
async completed
Any help is really appreciated.
I found my mistake. I missed calling the callback after each iteration just after async is completed.
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
callback(null); // this is the change.
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});

Resources