Node JS MSSQL Callbacks - node.js

No matter how many different posts, books, tutorials, etc I read I can't seem to grasp promises. I'm trying to execute a chunk of code in Node after the SQL requests have completed, however I can't seem to make it work... Any help at all would be greatly appreciated. I need console.log('done'); to run after the for loop has completed running through the req array:
app.post('/QueryWrite', function (req, res) {
var anyErrors = false;
sql.connect(config).then(function (err) {
if (err) {
console.log(err);
res.send(err);
anyErrors = true;
}
var i;
for (i = 0; i < req.body['query[]'].length; i++) {
var sqlQuery = req.body["query[]"][i];
let sqlRequest = new sql.Request();
sqlRequest.query(sqlQuery, function (err) {
if (err) {
console.log(err);
res.send(err);
anyErrors = true;
}
})
console.log(req.body.authenticatedAs + " wrote to DB: " + sqlQuery);
}
}).then(console.log('done'));

So here is kinda simplified example for database connection.
// database.js file
const mysql = require("mysql");
const utils = require("util");
const dbCon = mysql.createConnection({
debug: false,
host: "localhost",
user: "root",
database: "laravel_test_tweety",
password: "",
multipleStatements: true,
});
dbCon.query = utils.promisify(dbCon.query);
module.exports = dbCon;
So this is how i kinda use my database connections. After this lets try with server.js
//server.js
// ... here your code with creation of app and such,
// and we are importing dbCon from database.js
const dbCon = require("./database");
// server that listens for requests called `app`
app.post("/QueryWrite", function (req, res) {
/**
* in case if you are sending an object as queries
* we need them as array
* { query1: 'some query', query2: 'some query' }
* =>
* ['some query', 'some query']
*
* So that's why we are using `Object.values`
*/
let queries = Object.values(req.body["query[]"]);
if (queries) {
// So there are couple of options here
// Let's say you are only selecting stuff from db
let query = queries.join(";");
// This is gonna give you something like
// "SELECT 1; SELECT 2"
// If you want this to work you need `multipleStatements: true` on
// mysqlConfig from database.js
dbCon
.query(query)
.then((results) => {
console.log(results);
// if needed do some stuff here and send res back.
res.json(results);
})
.catch(err => {
console.error(err);
// do some other stuff
});
}
});
After everything from here, If you need to do insert or update, you also need to be able to send data, I can't really get around my head to that right now but maybe with in the array seperate those and query one by one?
I'm aware that this is not exactly what you want but atleast for select or delete queries this would work.

Related

Node.Js Retrieve specific data from SQL Server and encode it into JSON Array

I know this question have many duplicates, but I have already wasted too much time searching for the right solution.
First take a look at my Node.JS:
var express = require('express');
var app = express();
app.get('/', function (req, res) {
var sql = require("mssql");
// config for your database
var config = {
user: 'myuser',
password: 'mypass',
server: 'myip',
database: 'mydatabase'
};
sql.close();
// connect to your database
sql.connect(config, function (err) {
if (err) console.log(err);
var dataqu = '';
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query("select * from AR_Invoices", function (err, recordset) {
if (err) console.log(err)
res.json(recordset);
sql.close();
});
});
});
var server = app.listen(5000,'0.0.0.0', function () {
console.log('Server is running..');
});
This code runs fine, but the json result structure is like this :
{"recordsets":[[{"Tipe":"Invoices","InvoiceID":411891,"InvoiceNumber":"SR.1701.0001"}]],"recordset":[{"Tipe":"Invoices","InvoiceID":411891,"InvoiceNumber":"SR.1701.0001"}],"output":{},"rowsAffected":[1]}
I don't know why but for some reason the result is always resulting in duplicate.
And how to just select InvoiceID and InvoiceNumber ?
I already tested using recordset.InvoiceID or recordset[0].InvoiceID but all is always in vain, and the result always in duplicate.
Can anyone explain how to do this properly?
I want the final result became like this :
[
{ "InvoiceID":"1", "InvoiceNumber":"mynumber" }
]
For the future reference, i finally got how to do this here is my full code
var express = require('express');
var app = express();
var dateFormat = require('dateformat');
app.get('/', function (req, res) {
var sql = require("mssql");
// config for your database
var config = {
user: 'myuser',
password: 'mypassword',
server: 'myip',
database: 'mydb'
};
sql.close();
// connect to your database
sql.connect(config, function (err) {
if (err) console.log(err);
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query("select top 2 'Invoices' as Tipe,InvoiceID,InvoiceNumber,InvoiceDate,(select top 1DriverPicture from dbDigitalApp.dbo.tbdriver) as Blob from AR_Invoices", function (err, result) {
if (err) console.log(err)
var myarr = new Array();
for (var i = 0; i < result.recordset.length; ++i) {
var InvoiceNumber = result.recordset[i].InvoiceNumber;
var InvoiceDate = dateFormat(result.recordset[i].InvoiceDate, "dd mmmm yyyy");
var Blob = result.recordset[i].Blob;
myarr.push({'InvoiceNumber':InvoiceNumber,'InvoiceDate':InvoiceDate,'Blob':Buffer.from(Blob).toString('base64')});
}
res.json(myarr);
sql.close();
});
});
});
var server = app.listen(5000,'0.0.0.0', function () {
console.log('Server is running..');
});
and the result of above code is like this :
answers
with above code you can get specific field only and do whatever you want with those specific data, such as change date format or encode base64.
i don't know if this the cleanest way to do this since the node.js has its own function using res.json that can set all field of retrieved data without need to loop through it.
But at least here is my kind of solution, hope it will be helpful to there future people who wondering the same thing like me.

page renders before getting all the values sorted

I think the rendering takes place before the searching of the string on the files, i have tried different methods but don't seems to get this working. any help will be appreciated. im a noob on to the nodejs. im trying to get the id of the user and query and get all the data and there after see if he is in any of the lists given and finally render the page.
const j = [];
let name = '';
const filename = [];
var ext = '';
module.exports = function(app, express) {
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.post('/cusdetails', isLoggedIn, function (req, res) {
var cusid=req.body.cusid;
var insertQuerys = "SELECT * FROM customer WHERE cusid=? ORDER BY rowid DESC LIMIT 1";
connection.query(insertQuerys,[cusid],
function(err, rows){
rows.forEach( (row) => {
name=row.fncus;
});
fs.readdir('./views/iplist', function(err, files) {
if (err)
throw err;
for (var index in files) {
j.push(files[index])
}
j.forEach(function(value) {
var k = require('path').resolve(__dirname, '../views/iplist/',value);
fs.exists(k, function(fileok){
if(fileok) {
fs.readFile(k, function(err, content) {
if (err) throw err;
if (content.indexOf(name) > -1) {
ext = path.extname(k);
filename.push(path.basename(k, ext));
}
});
}
else {
console.log(" FileNotExist ");
}
});
});
});
console.log(filename);
res.render('cusdetails.ejs', {rows: rows, user:req.user , aml: filename });
});
})
You can create simple Promise wrapper and then use it inside async/await function to pause execution until resolved.
// use mysql2 package as it provides promise, less work to write promise wrappers
const mysql = require('mysql2/promise');
// create the connection to database
const connection = mysql.createConnection({
host: 'localhost',
user: 'root',
database: 'test'
});
// sample wrapper
function some(k) {
// more advisable to have local variables, why do you need this to be array?
var filename = [];
return new Promise((resolve, reject) => {
// doing this is also not recommended check nodejs documentation **fs.exists** for more info
fs.exists(k, function(fileok){
if(fileok) {
fs.readFile(k, function(err, content) {
if (err) reject(err);
if (content.indexOf(name) > -1) {
ext = path.extname(k);
filename.push(path.basename(k, ext));
resolve(filename)
}
});
}
else {
// reject(new Error("FileNotExist"))
console.log(" FileNotExist ");
}
});
})
}
// note the use of async
app.post('/cusdetails', isLoggedIn, async function (req, res) {
var cusid=req.body.cusid;
var insertQuerys = "SELECT * FROM customer WHERE cusid=? ORDER BY rowid DESC LIMIT 1";
// using await to pause excution, waits till query is finished
const [rows] = await connection.query(insertQuerys,[cusid])
rows.forEach( (row) => {
name=row.fncus;
});
// then you can
var result = await some(k)
...
Note however this way you loose the advantage of concurrent execution, as it's kindoff blocking. If the result of one call is not used in another, you can execute in parallel and await for result to achieve sequencing like
const [rows] = connection.query(insertQuerys,[cusid])
var result = some(k)
console.log(await rows) // do something
console.log(await result) // do something
JavaScript is asynchronous. This means that if you have a function with a callback (i.e. your query), the callback will be called asynchronously, at an unknown time, while the other code executes.
You need to look up some tutorials how to deal with callbacks, to get a proper understanding of it. Another method is using async/await and/or promises.
Basically, if you take the following code:
console.log("this will print first");
setTimeout(function () {
console.log("this will print last");
}, 1000);
console.log("this will print second");
If you run the code above, the top level is executed synchronously, so, it first calls console.log, then it executes setTimeout, which is synchronous. It sets a timeout, then says "I'm ready", and the code continues to the other console.log. After 1 second (1000 milliseconds), the callback in the setTimeout function is executed, and only then that console.log is called. You can not make the rest of the code wait this way, you need to restructure your code or read into promises.

node.js Global connection already exists. Call sql.close() first

I'm trying to create web services using node.js from an sql server database,in the frontend when i call those 2 webservices simultaneously it throws an error Global connection already exists. Call sql.close() first .
Any Solution ?
var express = require('express');
var router = express.Router();
var sql = require("mssql");
router.get('/Plant/:server/:user/:password/:database', function(req, res, next) {
user = req.params.user;
password = req.params.password;
server = req.params.server;
database = req.params.database;
// config for your database
var config = {
user: user,
password: password,
server: server,
database:database
};
sql.connect(config, function (err) {
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query("SELECT distinct PlantName FROM MachineryStateTable"
, function (err, recordset) {
if (err) console.log(err)
else {
for(i=0;i<recordset.recordsets.length;i++) {
res.send(recordset.recordsets[i])
}
}
sql.close();
});
});
});
router.get('/Dep/:server/:user/:password/:database/:plantname', function(req, res, next) {
user = req.params.user;
password = req.params.password;
server = req.params.server;
database = req.params.database;
plantname = req.params.plantname;
// config for your database
var config = {
user: user,
password: password,
server: server,
database:database
};
sql.connect(config, function (err) {
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query("SELECT distinct DepName FROM MachineryStateTable where PlantName= '"+plantname+"'"
, function (err, recordset) {
if (err) console.log(err)
else {
for(i=0;i<recordset.recordsets.length;i++) {
res.send(recordset.recordsets[i])
}
sql.close();
}
});
});
});
module.exports = router;
You have to create a poolConnection
try this:
new sql.ConnectionPool(config).connect().then(pool => {
return pool.request().query("SELECT * FROM MyTable")
}).then(result => {
let rows = result.recordset
res.setHeader('Access-Control-Allow-Origin', '*')
res.status(200).json(rows);
sql.close();
}).catch(err => {
res.status(500).send({ message: `${err}`})
sql.close();
});
From the documentation, close method should be used on the connection, and not on the required module,
So should be used like
var connection = new sql.Connection({
user: '...',
password: '...',
server: 'localhost',
database: '...'
});
connection.close().
Also couple of suggestions,
1. putting res.send in a loop isn't a good idea, You could reply back the entire recordsets or do operations over it, store the resultant in a variable and send that back.
2. Try using promises, instead of callbacks, it would make the flow neater
You must use ConnectionPool.
Next function returns a recordset with my query results.
async function execute2(query) {
return new Promise((resolve, reject) => {
new sql.ConnectionPool(dbConfig).connect().then(pool => {
return pool.request().query(query)
}).then(result => {
resolve(result.recordset);
sql.close();
}).catch(err => {
reject(err)
sql.close();
});
});
}
Works fine in my code!
if this problem still bother you, then change the core api.
go to node_modules\mssql\lib\base.js
at line 1723, add below code before if condition
globalConnection = null
In case someone comes here trying to find out how to use SQL Server pool connection with parameters:
var executeQuery = function(res,query,parameters){
new sql.ConnectionPool(sqlConfig).connect().then(pool =>{
// create request object
var request = new sql.Request(pool);
// Add parameters
parameters.forEach(function(p) {
request.input(p.name, p.sqltype, p.value);
});
// query to the database
request.query(query,function(err,result){
res.send(result);
sql.close();
});
})
}
Don't read their documentation, I don't think it was written by someone that actually uses the library :) Also don't pay any attention to the names of things, a 'ConnectionPool' doesn't seem to actually be a connection pool of any sort. If you try and create more than one connection from a pool, you will get an error. This is the code that I eventually got working:
const sql = require('mssql');
let pool = new sql.ConnectionPool(config); // some object that lets you connect ONCE
let cnn = await pool.connect(); // create single allowed connection on this 'pool'
let result = await cnn.request().query(query);
console.log('result:', result);
cnn.close(); // close your connection
return result;
This code can be run multiple times in parallel and seems to create multiple connections and correctly close them.

Do math with the result of two separate queries node.js

I pretty much exclusively program with Python but am trying to learn Node. My mind is so stuck in synchronisty that i'm making up words and banging my head against the wall trying to figure out callbacks. I realize a call back is a function passed to a function? I've successfully written very simple call backs but can't get this code to work the way i'd like it to.
Essentially, I need to multiply the results of these two queries, and then I'll be writing an if statement based on that math.
Hoping someone can show me how I can write a function that calls these functions, waits for the results, multiplies them together, and contains an if statement for me to do something with.
This needs to be done with node as i'm adding it to a chat bot developed with node.
var getSkuCount = function() {
pool.getConnection(function(err, connection) {
connection.query("select count(sku) from products_per_store where store_id = " + sID + " group by store_id", function (err, record) {
if (err) {
console.error('DATABASE ERROR:', err);
}
return record
connection.release();
});
});
};
var getAssetCount = function () {
console.log("getting total of scrapers attached to " + store_id);
pool.getConnection(function(err, connection) {
connection.query("SELECT count(*) FROM external_crawl_settings WHERE store_id = " + sID + " group by store_id", function (err, record) {
if (err) {
console.log(err);
return console.error('DATABASE ERROR:', err);
}
connection.release();
});
});
}
var skuCount = getSkuCount();
var assetCount = getAssetCount();
if skuCount * assetCount > 50000 {
do something
};
I've eliminated the global variables assetCount, skuCount and took a different approach in addressing all your questions. This solution requires 2 different files. 1 for managing connections and 1 for consolidating all your routes.
You need to have this in your index.js or similar server start up script for your app.
app-server.js // server start up file
'use strict';
let express = require('express');
let connectionManager = require('./connection-manager');
//read from your config file
let config = {
port: 7007,
host: 'host',
user: 'user',
password: 'password',
database: 'database',
connectionLimit: 'limit'
};
function startServer(config) {
let application = require('../'); // your application with all the routes
server = http.createServer(application);
return new Promise((resolve, reject) => {
server.listen(config.port, ()=> {
return resolve();
}).on('error', (err)=> {
return reject(err);
});
});
}
connectionManager.init(config).then(()=> {
return startServer(config);
}).then(()=> {
console.log(`server is up at ${config.port}`);
}).catch((err) => {
console.log('err while starting server', err.stack);
});
connection-manager.js // connection manager
'use strict';
let mysql = require('promise-mysql');
let connectionPool;
class Connections {
static init(config) {
return mysql.createPool({
host: config.host,
user: config.user,
password: config.password,
database: config.database,
connectionLimit: config.limit
}).getConnection().then((connection)=> {
connectionPool = connection;
});
}
static getConnection() {
// you can call this across your applications
return connectionPool;
}
static releaseConnection() {
//call this only if you want to shut the application
connectionPool.close(); // or equivalent method available
}
}
module.exports = Connections;
sample.js
'use strict';
let connection = require('./connection-manager').getConnection();
function compute(sid) {
let skuCount = connection.query('select count(sku) "cnt" from products_per_store where store_id = ' + sID + ' group by store_id');
let assetCount = connection.query('SELECT count(*) "cnt" FROM external_crawl_settings WHERE store_id = ' + sID + ' group by store_id');
return Promise.all([
skuCount,
assetCount
]).then((results)=> {
let skuCount = results[0];
let assetCount = results[1];
if (skuCount * assetCount > 50000) {
//do something
}
}).catch((err) => {
console.log('DATABASE ERROR:', err.stack);
});
}
Also, is there a limit on how many open connections you can have?
Since Connection pool handles the connection recycling for you, it depends on the hardware resources you have. But I could recommend you to start with the defaults, and keep increasing until you get the performance you want.
My slack-bot randomly crashes and I can't figure out the reason why.
Do you use process managers like pm2? . If so, Only on seeing it, can help you further to debug it.Process manager keeps track of all the exception, error you could normal get since they are managing the application.
Does a program end only when there's an uncaught error?
Yes. If you haven't handled process.on(uncaughtException), process.on(unhandledRejections). It is a good practice in node.js land to let the program crash and restart.
Could my bot be hitting a connection limit and crashing?
Can't say it. But you can get additional clues by inspecting your /var/log/mysql/error.log, error stack trace in logs, pm2 logs.
How do you release the connection?
You don't have to, if you are using any connection pool.
pool.getConnection().then(function(connection) {
let skuCount = connection.query('select count(sku) "cnt" from products_per_store where store_id = ' + sID + ' group by store_id');
let assetCount = connection.query('SELECT count(*) "cnt" FROM external_crawl_settings WHERE store_id = ' + sID + ' group by store_id');
return Promise.all([
skuCount,
assetCount
]).then((results)=> {
let skuCount = parseInt(results[0][0].cnt);
let assetCount = parseInt(results[1][0].cnt);
if (skuCount * assetCount > 50000) {
console.log('Too many inputs to run without permission');
}
console.log(skuCount*assetCount);
}).catch((err) => {
console.log('DATABASE ERROR:', err.stack);
});
}).catch(function(err) {
console.log(err);
});

Bulk insert into Postgres with brianc/node-postgres

I have the following code in nodejs that uses the pg (https://github.com/brianc/node-postgres)
My code to create subscriptions for an employee is as such.
client.query(
'INSERT INTO subscriptions (subscription_guid, employer_guid, employee_guid)
values ($1,$2,$3)', [
datasetArr[0].subscription_guid,
datasetArr[0].employer_guid,
datasetArr[0].employee_guid
],
function(err, result) {
done();
if (err) {
set_response(500, err, res);
logger.error('error running query', err);
return console.error('error running query', err);
}
logger.info('subscription with created');
set_response(201);
});
As you have already noticed datasetArr is an array. I would like to create mass subscriptions for more than one employee at a time. However I would not like to loop through the array. Is there a way to do it out of the box with pg?
I did a search for the same question, but found no solution yet.
With the async library it is very simple to use the query several times, and do the necessary error handling.
May be this code variant helps.
(for inserting 10.000 small json objects to an empty database it took 6 sec).
Christoph
function insertData(item,callback) {
client.query('INSERT INTO subscriptions (subscription_guid, employer_guid, employee_guid)
values ($1,$2,$3)', [
item.subscription_guid,
item.employer_guid,
item.employee_guid
],
function(err,result) {
// return any err to async.each iterator
callback(err);
})
}
async.each(datasetArr,insertData,function(err) {
// Release the client to the pg module
done();
if (err) {
set_response(500, err, res);
logger.error('error running query', err);
return console.error('error running query', err);
}
logger.info('subscription with created');
set_response(201);
})
It looks for me that the best way is the usage PostgreSQL json functions:
client.query('INSERT INTO table (columns) ' +
'SELECT m.* FROM json_populate_recordset(null::your_custom_type, $1) AS m',
[JSON.stringify(your_json_object_array)], function(err, result) {
if(err) {
console.log(err);
} else {
console.log(result);
}
});
To do Bulk insert into Postgresql from NodeJS, the better option would be to use 'COPY' Command provided by Postgres and pg-copy-streams.
Code snippet from : https://gist.github.com/sairamkrish/477d20980611202f46a2d44648f7b14b
/*
Pseudo code - to serve as a help guide.
*/
const copyFrom = require('pg-copy-streams').from;
const Readable = require('stream').Readable;
const { Pool,Client } = require('pg');
const fs = require('fs');
const path = require('path');
const datasourcesConfigFilePath = path.join(__dirname,'..','..','server','datasources.json');
const datasources = JSON.parse(fs.readFileSync(datasourcesConfigFilePath, 'utf8'));
const pool = new Pool({
user: datasources.PG.user,
host: datasources.PG.host,
database: datasources.PG.database,
password: datasources.PG.password,
port: datasources.PG.port,
});
export const bulkInsert = (employees) => {
pool.connect().then(client=>{
let done = () => {
client.release();
}
var stream = client.query(copyFrom('COPY employee (name,age,salary) FROM STDIN'));
var rs = new Readable;
let currentIndex = 0;
rs._read = function () {
if (currentIndex === employees.length) {
rs.push(null);
} else {
let employee = employees[currentIndex];
rs.push(employee.name + '\t' + employee.age + '\t' + employee.salary + '\n');
currentIndex = currentIndex+1;
}
};
let onError = strErr => {
console.error('Something went wrong:', strErr);
done();
};
rs.on('error', onError);
stream.on('error', onError);
stream.on('end',done);
rs.pipe(stream);
});
}
Finer details explained in this link
Create your data structure as:
[ [val1,val2],[val1,val2] ...]
Then convert it into a string:
JSON.stringify([['a','b'],['c']]).replace(/\[/g,"(").replace(/\]/g,")").replace(/"/g,'\'').slice(1,-1)
append it to the query and you are done!
Agreed it has string parsing costs but its way cheaper than single inserts.
Use an ORM; eg: Objection.
Also, Increase the Connection pool size based on your db server and the number of active connection you need.
someMovie
.$relatedQuery('actors')
.insert([
{firstName: 'Jennifer', lastName: 'Lawrence'},
{firstName: 'Bradley', lastName: 'Cooper'}
])
.then(function (actors) {
console.log(actors[0].firstName);
console.log(actors[1].firstName);
});

Resources