I have seen multiple answers for the question but I am not able to solve this issue, so I am re-posting it with my code. I am new to this and need help to understand and fix this.
My Code
// Get logs from DB
router.get("/getlogs/:dbtype", function (req, res, next) {
var dbtype = req.params.dbtype;
if (dbtype == "mongodb") {
list = Logs.find(function (err, log) {
res.json(log);
})
} else if (dbtype == "mssql") {
var config = {
userName: 'user',
password: 'pass',
server: 'server',
options: {
instanceName: 'instance',
database: 'db',
}
};
// SQL Server connection
var connection = new Tedious.Connection(config);
connection.on('connect', function (err) {
// if (err) { console.log(err); res.json(err);}
// If no error, then good to proceed.
//console.log("SQL Server Connected SQL SQL");
//connection.on('debug', function(err) { console.log('debug:', err);});
var request = new Tedious.Request(`select top 1 * from table1`, function (err) {
//if (err) { console.log(err); res.json(err);}
});
request.on('row', function (columns) {
var row = {};
columns.forEach(function (column) {
row[column.metadata.colName] = column.value;
});
rows.push(row);
res.json(rows); **<-- this is where I get error**
});
request.on('end', function () {
res.json(rows);
})
connection.execSql(request);
});
}
});
It's because you are sending response every time you recive row.
Change this:
request.on('row', function (columns) {
var row = {};
columns.forEach(function (column) {
row[column.metadata.colName] = column.value;
});
rows.push(row);
res.json(rows);
});
to:
request.on('row', function (columns) {
var row = {};
columns.forEach(function (column) {
row[column.metadata.colName] = column.value;
});
rows.push(row);
});
request.on('doneInProc', function () {
res.json(rows);
});
Related
I am learning node.js and database. I am trying to stream heavy data about 7,700,000 rows and 96 columns from oracle to client. Later i use that data for virtual table. But in client it is showing only one row and then in node command error is displaying "Cannot set headers after they are sent to the client". How to stream data in client. Please help
var oracledb = require('oracledb');
const cors = require('cors');
var express = require('express');
var app = express();
app.use(cors());
oracledb.outFormat = oracledb.ARRAY;
oracledb.getConnection({
user: 'user',
password: 'password',
connectString: 'some string'
},
(err, connection) => {
if (err) {
console.error(err.message);
return;
}
var rowsProcessed = 0;
var startTime = Date.now();
var dataSize = 0;
var stream = connection.queryStream(
'SELECT * FROM table',
);
// stream.on('data', function (data) {
// rowsProcessed++;
// // console.log(JSON.stringify(data));
// // console.log(data);
// dataSize = dataSize + data.length;
// // oracleData.push(data);
// // console.log("pushing");
// // console.log(oracleData);
// // app.get('/data', (req, res) => {
// // res.send(data);
// // })
// // console.log(data);
// });
app.get('/data', (req, res) => {
stream.on('data', (data) => {
rowsProcessed++;
dataSize = dataSize + data.length;
res.send(JSON.stringify(data));
})
})
stream.on('end', function () {
var t = ((Date.now() - startTime) / 1000);
console.log('queryStream(): rows: ' + rowsProcessed +
', seconds: ' + t);
// console.log(dataSize + ' bytes');
connection.close(
function (err) {
if (err) {
console.error(err.message);
} else {
console.log("connection closed")
}
}
)
})
}
);
app.listen(5000, () => {
console.log('Listening at 5000')
})
I tried using above approach. But it is failing. How can I achieve the output?
The browser is freezing if I output entire data at single time that's why I am trying to use streaming and in the node command prompt it is displaying out of memory if I load entire data at single time.
Thank you.
The first thing you'll want to do is organize your app a little better. Separation of concerns is important, you should have a connection pool, etc. Have a look at this series for some ideas: https://jsao.io/2018/03/creating-a-rest-api-with-node-js-and-oracle-database/
Once you get the organization figured out, incorporate this example of streaming a large result set out.
const oracledb = require('oracledb');
async function get(req, res, next) {
try {
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
res.write('[');
stream.on('data', (row) => {
res.write(JSON.stringify(row));
res.write(',');
});
stream.on('end', () => {
res.end(']');
});
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
If you find you're doing this a lot, simplify things by creating a reusable transform stream:
const oracledb = require('oracledb');
const { Transform } = require('stream');
class ToJSONArray extends Transform {
constructor() {
super({objectMode: true});
this.push('[');
}
_transform (row, encoding, callback) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
this.push(',');
}
this._prevRow = row;
callback(null);
}
_flush (done) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
}
this.push(']');
delete this._prevRow;
done();
}
}
async function get(req, res, next) {
try {
const toJSONArray = new ToJSONArray();
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
stream.pipe(toJSONArray).pipe(res);
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
I'm trying to build a chatbot using Botpress. I'm a beginner, looking for your help. One of the requirements is to query database to answer questions. This is what I have tried so far:
dbconnect.js
var oracledb = require('oracledb');
var dbConfig = require('./dbconfig.js');
var db = function dbCall(sql, values) {
return new Promise(function(resolve, reject){
oracledb.getConnection(
{
user : dbConfig.user,
password : dbConfig.password,
connectString : dbConfig.connectString
},
function(err, connection) {
if (err) {
reject(err);
return;
}
connection.execute(
sql,
values,
{
maxRows: 1
},
function(err, result) {
if (err) {
console.error(err.message);
return;
}
resolve(result);
doRelease(connection);
}
);
});
});
}
// Note: connections should always be released when not needed
function doRelease(connection) {
connection.close(
function (err) {
if (err) {
console.error(err.message);
}
});
}
module.exports = db;
select.js
var dbConnect = require('../oracledb/dbconnect');
dbConnect('select code from table1' +
' where id=:id', {id:'value1'}).then(function (response) {
console.info(response.rows);
}).catch(function(error) {
console.info(error);
});
everything above works great, if I run select.js. How could I bring the response into the botpress chat window? I tried placing the select.js code in index.js event.reply, it doesn't work.
Thanks,
Babu.
I have resolved this by using the promise directly in the action.
return dbConnect('<SQL here>=:id', { id: Id })
.then(function(response) {
var res = response.rows
console.info(res);
const newState = { ...state, status: res}
return newState
})
.catch(function(error) {
console.info(error)
})
Note that response has the resultset.
When trying run below code I get Unhandled rejection Error: Can't set headers after they are sent when return GetData(); is invoked it starts execution of the function but immediately returns the control back with the error. Observed during debugging.
Basically the code tries to fetch key from MySQL DB if doesn't exist in redis DB
All database and redis related modules are written in separate file so as to reuse it.
somefile.js
var express = require('express');
var router = express.Router();
var dbModules = require('../common/database');
var redisModules = require("../common/redismodule");
function getSettings(request, response)
{
return GetData();
function GetData()
{
return redisModules.GetRedisValue("key")
.then(function (result)
{
if (!result)
return SetData();
else
return result;
})
.then(function (result)
{
response.status(200).send({ value : result });
})
.catch(function (e)
{
response.status(500).send();
};
}
function SetData()
{
return dbModules.executeQuery('query')
.then(function(results)
{
// some code
return 'some_key';
})
.then(function (result)
{
redisModules.setRedisValue('key', result);
});
}
}
database.js
File for handling database connections
var mysql = require('promise-mysql');
pool = mysql.createPool({
host: '',
user: '',
password: '',
database: '',
connectionLimit: 4
});
module.exports = {
getSqlConnection: function()
{
return pool.getConnection().disposer(function(connection)
{
console.log("came here in releasing connection function");
pool.releaseConnection(connection);
});
},
executeQuery: function(sqlQuery)
{
return Promise.using(module.exports.getSqlConnection(), function(connection)
{
return connection.query(sqlQuery)
.then(function(results)
{
return results;
});
});
}
};
redismodule.js
File for handling redis get, set concepts
var Promise = require('bluebird');
var constants = require('../common/contants');
var redisClient; // Global (Avoids Duplicate Connections)
module.exports =
{
OpenRedisConnection : function()
{
if (redisClient == null)
{
redisClient = require("redis").createClient(6379, 'localhost');
}
},
isRedisConnectionOpened : function()
{
if (redisClient && redisClient.connected == true)
{
return true;
}
else
{
if(redisClient)
redisClient.end(); // End and open once more
module.exports.OpenRedisConnection();
return true;
}
},
GetRedisValue: function (key)
{
return new Promise(function (resolve, reject)
{
if(!module.exports.isRedisConnectionOpened())
reject("Redis connection failure");
redisClient.get(key, function (error, result)
{
if (error)
{
reject(error);
}
else
{
if (result == null)
resolve(); // Key not present so create
else
resolve(result);
}
});
});
},
SetRedisValue: function (key, value)
{
return new Promise(function (resolve, reject)
{
if(!module.exports.isRedisConnectionOpened())
reject("Redis connection failure");
redisClient.set(key, value, 'EX', 1000,
function(err,reply)
{
if (reply == 'OK')
resolve(value); // Send the value
else
reject(err);
});
});
}
};
The execution starts when getSettings function is called.
I have just included all the code so that if its correct it might be useful for others.
Corrected Answer
somefile.js
var Promise = require('bluebird');
var dbModules = require('database');
var redisModules = Promise.promisifyAll(require("redismodule"));
async function getSettings(request, response) {
try {
var data = redisModules.GetRedisValue("key");
if (!data)
data = await SetData();
return response.status(200).send({
value: data
});
} catch (error) {
return response.status(500).send({
'error': 'Try after some time'
});
}
function SetData() {
let result = dbModules.executeQuery('query')
return redisModules.setRedisValue('key', result);
}
}
database.js
var mysql = require('promise-mysql');
var pool = mysql.createPool({
host: '',
user: '',
password: '',
database: '',
connectionLimit: 4
});
function getSqlConnection() {
return pool.getConnection().disposer(function (connection) {
console.log("came here in releasing connection function");
pool.releaseConnection(connection);
});
}
module.exports = {
executeQuery: function (sqlQuery) {
return Promise.using(getSqlConnection(), function (connection) {
return connection.query(sqlQuery)
.then(function (results) {
return results;
});
});
}
};
redismodule.js
var redisClient; // Global (Avoids Duplicate Connections)
// Making the below functions are private
function openRedisConnection() {
if (redisClient && redisClient.connected == true) {
return;
} else {
if (redisClient)
redisClient.end(); // End and open once more
redisClient = require("redis").createClient(6379,
process.env.REDIS_URL, {
auth_pass: process.env.REDIS_PASS
});
redisClient.selected_db = 1;
}
}
module.exports = {
GetRedisValue: function (key) {
openRedisConnection();
redisClient.get(key, function (error, result) {
if (error) {
return error;
} else {
if (result)
return result;
else
return null;
}
});
},
SetRedisValue: function (key, value) {
openRedisConnection();
redisClient.set(key, value, 'EX', 1000,
function (err, reply) {
if (reply == 'OK')
resolve(value); // Send the value
else
reject(err);
});
}
};
This is the way I see it :
somefile.js
var dbModules = require('../common/database');
var redisModules = require("../common/redismodule");
function getSettings(request, response) {
function getData() {
return redisModules.getRedisValue('key')
.then(function (result) {
return result || setData();
});
}
function setData() {
return dbModules.executeQuery('query')
.then(function(results) {
return redisModules.setRedisValue('key', results);
});
}
return getData()
.then(function(result) {
response.status(200).send({ value: result });
}).catch(function (e) {
response.status(500).send();
});
}
database.js
var mysql = require('promise-mysql');
var pool = mysql.createPool({
host: '',
user: '',
password: '',
database: '',
connectionLimit: 4
});
function getSqlConnection() {
return pool.getConnection().disposer(function(connection) {
console.log("came here in releasing connection function");
pool.releaseConnection(connection);
});
}
module.exports = {
'executeQuery': function(sqlQuery) {
return Promise.using(getSqlConnection(), function(connection) {
return connection.query(sqlQuery);
});
}
};
redismodule.js
var Promise = require('bluebird');
var redis = Promise.promisifyAll(require('redis'));
var redisClient = null;
function openRedisConnection() {
if (!redisClient || !redisClient.connected) {
if (redisClient) {
redisClient.end(); // End and open once more
}
redisClient = redis.createClient(6379, process.env.REDIS_URL, {
auth_pass: process.env.REDIS_PASS
});
redisClient.selected_db = 1;
}
return redisClient;
}
module.exports = {
'getRedisValue': function(key) {
return openRedisConnection().getAsync(key); // here we call the promise-returning .getAsync() method, created by Promise.promisifyAll()
},
'setRedisValue': function(key, value) {
return openRedisConnection().setAsync(key, value, 'EX', 1000); // here we call the promise-returning .setAsync() method, created by Promise.promisifyAll()
}
};
My main contributions are in somefile.js and redismodule.js. The third module, database.js has been tidied but nothing more than that.
Things like dbModules.executeQuery('query') and redisModules.getRedisValue('key') need to be addressed but I guess you know what you are doing there.
I am using node.js with socket.io to push real time notifications to users. However, currently I am just sending back a query result done in my socket.io code and sending it back to the client but I need to let socket know about the changes that occur and to either update with the changes or re-query the db to check for the new number and send that to the client.
For example if a user gets a friend request then the notification count will change and I want socket.io to push the new notification count number to the user.
here is my socket.io code in my app.js file:
io.on('connection', function(socket) {
var sessionID = socket.handshake.sessionID,
session = new connect.middleware.session.Session({ sessionStore: sessionStore }, socket.handshake.session)
console.log('socket: new ' + sessionID)
socket.broadcast.emit('arpNewConn', session.passport.user)
var intervalID = setInterval(function() {
socket.handshake.session.reload(function() {
socket.handshake.session.touch().save()
})
socket.emit('pulse', { heartbeat: new Date().toString(), timestamp: new Date().getTime() })
}, 300 * 1000)
socket.on('disconnect', function() {
console.log('socket: dump ' + sessionID)
socket.broadcast.emit('arpLostConn', session.passport.user)
clearInterval(intervalID)
})
socket.emit('entrance', {message: 'Message works'});
dbnotif.findOne(userID, function (err, user) {
if(err) throw err;
notify = user.notifications;
socket.emit('notify', {notific: notify});
});
});
Here is the client side:
div#CheckSocket
script(src='http://localhost:3000/socket.io/socket.io.js')
script.
$(document).ready(function () {
console.log('socket');
var socket = io.connect('http://localhost:3000/');
console.log('entered1');
socket.on('entrance', function (data) {
console.log('entered');
console.log(data.message);
});
socket.on('notify', function (data) {
console.log('noting');
console.log(data.notific);
if(data.notific !== 0)
$('.notifications').html(data.notific);
});
socket.on('reconnecting', function(data) {
setStatus('reconnecting');
console.log('entered2');
});
function setStatus(msg) {
console.log('connection status: ' + msg);
console.log('entered5');
}
});
Here is the example of adding a friend in the route file:
exports.addContactPost = function(req, res, err) {
async.waterfall([
function(callback) {
var success;
var newFriend = new Friend ({
userId: req.signedCookies.userid,
friend_id: mongoose.Types.ObjectId(req.body.otherUser),
friend_status: 1
});
newFriend.save(function(err){
if(err) {
console.log(err);
} else {
console.log("saved it");
success = true;
}
});
callback(null, success)
},
function(success, callback) {
//if(success === true) {
var success2;
var newFriend2 = new Friend ({
userId: mongoose.Types.ObjectId(req.body.otherUser),
friend_id: req.signedCookies.userid,
friend_status: 2
});
newFriend2.save(function(err){
if(err) {
res.send("request not received");
} else {
success2 = true;
}
});
callback(null, success2);
//} else {
// res.send("error with request sent");
//}
},
function(success2, callback) {
console.log('callback3');
//if(success2 === true) {
var success3;
Notification.findOneAndUpdate({userId: mongoose.Types.ObjectId(req.body.otherUser)}, {
$inc: {notifications: 1}
}, function(err, notify) {
if(err) {
res.send(err);
} else {
console.log(notify);
if(notify.added_notifications === true) {
// enable mail and include general u have got a new request... do not include name because not storing it
}
}
success3 = true;
callback(null, success3);
}],
function(err, results) {
res.json({response: true});
console.log("Add successful");
});
};
Notes: dbnotif is a model being called by mongoose,
userID is a global variable available to the file
I helped him solve this question offline, but we ended up using an EventEmitter as a proxy.
// main.js
var EventEmitter = require('events').EventEmitter;
var emitter = new EventEmitter();
Then add it to each request as middleware:
// elsewhere in main.js
app.use(function(req, res, next) {
req.emitter = emitter;
next();
});
Then in external routes file:
// routes.js
exports.addContactPost = function(req, res, err) {
req.emitter.emit( 'some-key', whatever, data, you, want );
};
I am having trouble understanding node.js.
Example, MongoDB access, here's what I've got (mydb.js):
var mongodb = require('mongodb'),
server = new mongodb.Server('staff.mongohq.com', 10030, {
auto_reconnect: true
}),
db = new mongodb.Db('mydb', server);
function authenticateAndGo(db, handle) {
db.authenticate('username', 'password', function(err) {
if (err) {
console.log(err);
return;
}
console.log('Database user authenticated');
var collection = new mongodb.Collection(db, 'test');
handle(collection);
});
}
function query(handle) {
db.open(function(err, db) {
if( err ) {
console.log(err);
return;
}
console.log('Database connected');
authenticateAndGo(db, handle);
});
};
exports.query = query;
So, if I want to use it later, I would
var mydb = require('./mydb');
mydb.query(function(collection) {
collection.find({}, {
limit: 10
}).toArray(function(err, docs) {
console.log(docs);
});
});
But, If I do multiple calls, like so:
var mydb = require('./mydb');
mydb.query(function(collection) {
collection.find({}, {
limit: 10
}).toArray(function(err, docs) {
console.log(docs);
});
});
mydb.query(function(collection) {
collection.find({}, {
limit: 10
}).toArray(function(err, docs) {
console.log(docs);
});
});
I get an exception:
Error: db object already connecting, open cannot be called multiple times
I think that there is really something fundamental that I do not understand about all this and it is probable that this question is stupid ...
Anyway, all help is welcome.
Thanks in advance.
mydb.js:
var mongodb= require('mongodb'),
server = new mongodb.Server('staff.mongohq.com', 10030, {
auto_reconnect: true
}),
db1 = new mongodb.Db('mydb', server);
// callback: (err, db)
function openDatabase(callback) {
db1.open(function(err, db) {
if (err)
return callback(err);
console.log('Database connected');
return callback(null, db);
});
}
// callback: (err, collection)
function authenticate(db, username, password, callback) {
db.authenticate(username, password, function(err, result) {
if (err) {
return callback (err);
}
if (result) {
var collection = new mongodb.Collection(db, 'test');
// always, ALWAYS return the error object as the first argument of a callback
return callback(null, collection);
} else {
return callback (new Error('authentication failed'));
}
});
}
exports.openDatabase = openDatabase;
exports.authenticate = authenticate;
use.js:
var mydb = require('./mydb');
// open the database once
mydb.openDatabase(function(err, db) {
if (err) {
console.log('ERROR CONNECTING TO DATABASE');
console.log(err);
process.exit(1);
}
// authenticate once after you opened the database. What's the point of
// authenticating on-demand (for each query)?
mydb.authenticate(db, 'usernsame', 'password', function(err, collection) {
if (err) {
console.log('ERROR AUTHENTICATING');
console.log(err);
process.exit(1);
}
// use the returned collection as many times as you like INSIDE THE CALLBACK
collection.find({}, {limit: 10})
.toArray(function(err, docs) {
console.log('\n------ 1 ------');
console.log(docs);
});
collection.find({}, {limit: 10})
.toArray(function(err, docs) {
console.log('\n------ 2 ------');
console.log(docs);
});
});
});
Result:
on success:
Database connected
Database user authenticated
------ 1 ------
[ { _id: 4f86889079a120bf04e48550, asd: 'asd' } ]
------ 2 ------
[ { _id: 4f86889079a120bf04e48550, asd: 'asd' } ]
on failure:
Database connected
{ [MongoError: auth fails] name: 'MongoError', errmsg: 'auth fails', ok: 0 }
[Original Answer]:
You're opening the db multiple times (once in each query). You should open the database just once, and use the db object in the callback for later use.
You're using the same variable name multiple times, and that might've caused some confusion.
var mongodb = require('mongodb'),
server = new mongodb.Server('staff.mongohq.com', 10030, {
auto_reconnect: true
}),
db1 = new mongodb.Db('mydb', server);
function authenticateAndGo(db, handle) {
db.authenticate('username', 'password', function(err) {
if (err) {
console.log(err);
return;
}
console.log('Database user authenticated');
var collection = new mongodb.Collection(db, 'test');
handle(collection);
});
}
function query(handle) {
db1.open(function(err, db2) {
if( err ) {
console.log(err);
return;
}
console.log('Database connected');
authenticateAndGo(db2, handle);
});
};
exports.query = query;
I've changed the above code a little (db1 for the original db, db2 for the opened db). As you can see, you're opening db1 multiple times, which is not good. extract the code for opening into another method and use it ONCE and use the db2 instance for all your queries/updates/removes/...
You can only call "open" once. When the open callback fires, you can then do your queries on the DB object it returns. So one way to handle this is to queue up the requests until the open completes.
e.g MyMongo.js
var mongodb = require('mongodb');
function MyMongo(host, port, dbname) {
this.host = host;
this.port = port;
this.dbname = dbname;
this.server = new mongodb.Server(
'localhost',
9000,
{auto_reconnect: true});
this.db_connector = new mongodb.Db(this.dbname, this.server);
var self = this;
this.db = undefined;
this.queue = [];
this.db_connector.open(function(err, db) {
if( err ) {
console.log(err);
return;
}
self.db = db;
for (var i = 0; i < self.queue.length; i++) {
var collection = new mongodb.Collection(
self.db, self.queue[i].cn);
self.queue[i].cb(collection);
}
self.queue = [];
});
}
exports.MyMongo = MyMongo;
MyMongo.prototype.query = function(collectionName, callback) {
if (this.db != undefined) {
var collection = new mongodb.Collection(this.db, collectionName);
callback(collection);
return;
}
this.queue.push({ "cn" : collectionName, "cb" : callback});
}
and then a sample use:
var MyMongo = require('./MyMongo.js').MyMongo;
var db = new MyMongo('localhost', 9000, 'db1');
var COL = 'col';
db.query(COL, function(collection) {
collection.find({}, {
limit: 10
}).toArray(function(err, docs) {
console.log("First:\n", docs);
});
});
db.query(COL, function(collection) {
collection.find({}, {
limit: 10
}).toArray(function(err, docs) {
console.log("\nSecond:\n", docs);
});
});
I simply call the open function once directly after the db init:
var mongodb = require('mongodb');
var server = new mongodb.Server('foo', 3000, {auto_reconnect: true});
var db = new mongodb.Db('mydb', server);
db.open(function(){});
After that I do not have to care about that anymore because of auto_reconnect is true.
db.collection('bar', function(err, collection) { [...] };