I'm trying to create web services using node.js from an sql server database,in the frontend when i call those 2 webservices simultaneously it throws an error Global connection already exists. Call sql.close() first .
Any Solution ?
var express = require('express');
var router = express.Router();
var sql = require("mssql");
router.get('/Plant/:server/:user/:password/:database', function(req, res, next) {
user = req.params.user;
password = req.params.password;
server = req.params.server;
database = req.params.database;
// config for your database
var config = {
user: user,
password: password,
server: server,
database:database
};
sql.connect(config, function (err) {
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query("SELECT distinct PlantName FROM MachineryStateTable"
, function (err, recordset) {
if (err) console.log(err)
else {
for(i=0;i<recordset.recordsets.length;i++) {
res.send(recordset.recordsets[i])
}
}
sql.close();
});
});
});
router.get('/Dep/:server/:user/:password/:database/:plantname', function(req, res, next) {
user = req.params.user;
password = req.params.password;
server = req.params.server;
database = req.params.database;
plantname = req.params.plantname;
// config for your database
var config = {
user: user,
password: password,
server: server,
database:database
};
sql.connect(config, function (err) {
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query("SELECT distinct DepName FROM MachineryStateTable where PlantName= '"+plantname+"'"
, function (err, recordset) {
if (err) console.log(err)
else {
for(i=0;i<recordset.recordsets.length;i++) {
res.send(recordset.recordsets[i])
}
sql.close();
}
});
});
});
module.exports = router;
You have to create a poolConnection
try this:
new sql.ConnectionPool(config).connect().then(pool => {
return pool.request().query("SELECT * FROM MyTable")
}).then(result => {
let rows = result.recordset
res.setHeader('Access-Control-Allow-Origin', '*')
res.status(200).json(rows);
sql.close();
}).catch(err => {
res.status(500).send({ message: `${err}`})
sql.close();
});
From the documentation, close method should be used on the connection, and not on the required module,
So should be used like
var connection = new sql.Connection({
user: '...',
password: '...',
server: 'localhost',
database: '...'
});
connection.close().
Also couple of suggestions,
1. putting res.send in a loop isn't a good idea, You could reply back the entire recordsets or do operations over it, store the resultant in a variable and send that back.
2. Try using promises, instead of callbacks, it would make the flow neater
You must use ConnectionPool.
Next function returns a recordset with my query results.
async function execute2(query) {
return new Promise((resolve, reject) => {
new sql.ConnectionPool(dbConfig).connect().then(pool => {
return pool.request().query(query)
}).then(result => {
resolve(result.recordset);
sql.close();
}).catch(err => {
reject(err)
sql.close();
});
});
}
Works fine in my code!
if this problem still bother you, then change the core api.
go to node_modules\mssql\lib\base.js
at line 1723, add below code before if condition
globalConnection = null
In case someone comes here trying to find out how to use SQL Server pool connection with parameters:
var executeQuery = function(res,query,parameters){
new sql.ConnectionPool(sqlConfig).connect().then(pool =>{
// create request object
var request = new sql.Request(pool);
// Add parameters
parameters.forEach(function(p) {
request.input(p.name, p.sqltype, p.value);
});
// query to the database
request.query(query,function(err,result){
res.send(result);
sql.close();
});
})
}
Don't read their documentation, I don't think it was written by someone that actually uses the library :) Also don't pay any attention to the names of things, a 'ConnectionPool' doesn't seem to actually be a connection pool of any sort. If you try and create more than one connection from a pool, you will get an error. This is the code that I eventually got working:
const sql = require('mssql');
let pool = new sql.ConnectionPool(config); // some object that lets you connect ONCE
let cnn = await pool.connect(); // create single allowed connection on this 'pool'
let result = await cnn.request().query(query);
console.log('result:', result);
cnn.close(); // close your connection
return result;
This code can be run multiple times in parallel and seems to create multiple connections and correctly close them.
Related
I'm just starting to use Mongodb without mongoose (to get away from the schemas), and wanted to create a simple module with various exported functions to use in the rest of my app. I've pasted the code below.
The problem I'm having is that the databasesList.databases comes back as undefined, and I'm not sure why. There should be 2 databases on my cluster, and one collection in each database.
As a tangential question, I thought maybe I would check the collections instead (now commented out), but though I found this page (https://docs.mongodb.com/manual/reference/method/db.getCollectionNames/) the function getCollectionNames seems not to exist. Now I'm wondering if I'm using the wrong documentation and that is why my databases are coming back undefined.
const client = new MongoClient(uri)
const connection = client.connect( function (err, database) {
if (err) throw err;
else if (!database) console.log('Unknown error connecting to database');
else {
console.log('Connected to MongoDB database server');
}
});
module.exports = {
getDatabaseList: function() {
console.log('start ' + client);
databasesList = client.db().admin().listDatabases();
//collectionList = client.db().getCollectionNames();
//console.log("Collections: " + collectionList);
console.log("Databases: " + databasesList.databases);
//databasesList.databases.forEach(db => console.log(` - ${db.name}`));
}
}```
your code is correct Just need to change few things.
module.exports = {
getDatabaseList: async function() {
console.log('start ' + client);
databasesList = await client.db().admin().listDatabases();
//collectionList = await client.db().getCollectionNames();
//console.log("Collections: " + collectionList);
console.log("Databases: " + databasesList.databases);
databasesList.databases.forEach(db => console.log(` - ${db.name}`));
}
}
You have to declare async function and use await also.
The async and await keywords enable asynchronous, promise-based behaviour to be written in a cleaner style, avoiding the need to explicitly configure promise chains.
You can use this modular approach to build your database access code:
index.js: Run your database application code, like list database names, collection names and read from a collection.
const connect = require('./database');
const dbFunctions = require('./dbFunctions');
const start = async function() {
const connection = await connect();
console.log('Connected...');
const dbNames = await dbFunctions.getDbNames(connection);
console.log(await dbNames.databases.map(e => e.name));
const colls = await dbFunctions.getCollNames(connection, 'test');
console.log(await colls.map(e => e.name));
console.log(await dbFunctions.getDocs(connection, 'test', 'test'));
};
start();
database.js:: Create a connection object. This connection is used for all your database access code. In general, a single connection creates a connection pool and this can be used throughout a small application
const { MongoClient } = require('mongodb');
const url = 'mongodb://localhost:27017/';
const opts = { useUnifiedTopology: true };
async function connect() {
console.log('Connecting to db server...');
return await MongoClient.connect(url, opts );
}
module.exports = connect;
dbFunctions.js:: Various functions to access database details, collection details and query a specific collection.
module.exports = {
// return list of database names
getDbNames: async function(conn) {
return await conn.db().admin().listDatabases( { nameOnly: true } );
},
// return collections list as an array for a given database
getCollNames: async function(conn, db) {
return await conn.db(db).listCollections().toArray();
},
// return documents as an array for a given database and collection
getDocs: async function(conn, db, coll) {
return await conn.db(db).collection(coll).find().toArray();
}
}
No matter how many different posts, books, tutorials, etc I read I can't seem to grasp promises. I'm trying to execute a chunk of code in Node after the SQL requests have completed, however I can't seem to make it work... Any help at all would be greatly appreciated. I need console.log('done'); to run after the for loop has completed running through the req array:
app.post('/QueryWrite', function (req, res) {
var anyErrors = false;
sql.connect(config).then(function (err) {
if (err) {
console.log(err);
res.send(err);
anyErrors = true;
}
var i;
for (i = 0; i < req.body['query[]'].length; i++) {
var sqlQuery = req.body["query[]"][i];
let sqlRequest = new sql.Request();
sqlRequest.query(sqlQuery, function (err) {
if (err) {
console.log(err);
res.send(err);
anyErrors = true;
}
})
console.log(req.body.authenticatedAs + " wrote to DB: " + sqlQuery);
}
}).then(console.log('done'));
So here is kinda simplified example for database connection.
// database.js file
const mysql = require("mysql");
const utils = require("util");
const dbCon = mysql.createConnection({
debug: false,
host: "localhost",
user: "root",
database: "laravel_test_tweety",
password: "",
multipleStatements: true,
});
dbCon.query = utils.promisify(dbCon.query);
module.exports = dbCon;
So this is how i kinda use my database connections. After this lets try with server.js
//server.js
// ... here your code with creation of app and such,
// and we are importing dbCon from database.js
const dbCon = require("./database");
// server that listens for requests called `app`
app.post("/QueryWrite", function (req, res) {
/**
* in case if you are sending an object as queries
* we need them as array
* { query1: 'some query', query2: 'some query' }
* =>
* ['some query', 'some query']
*
* So that's why we are using `Object.values`
*/
let queries = Object.values(req.body["query[]"]);
if (queries) {
// So there are couple of options here
// Let's say you are only selecting stuff from db
let query = queries.join(";");
// This is gonna give you something like
// "SELECT 1; SELECT 2"
// If you want this to work you need `multipleStatements: true` on
// mysqlConfig from database.js
dbCon
.query(query)
.then((results) => {
console.log(results);
// if needed do some stuff here and send res back.
res.json(results);
})
.catch(err => {
console.error(err);
// do some other stuff
});
}
});
After everything from here, If you need to do insert or update, you also need to be able to send data, I can't really get around my head to that right now but maybe with in the array seperate those and query one by one?
I'm aware that this is not exactly what you want but atleast for select or delete queries this would work.
I have a NODE.JS api using expressjs that connects to an SQL Server, and I want to use it in an angular project. I make use two files, a route file and a controllers file. My route file is as follows:
module.exports = (app) => {
const UsrContrllr = require('../Controllers/users.controllers');
//1. GET ALL USERS
app.get('/api/users', UsrContrllr.func1);
//2. POST NEW USER
app.post('/api/user/new', UsrContrllr.func2);
};
And my controllers file is given below:
const mssql = require('mssql');
exports.func1 = (req, res) =>
{
// Validate request
console.log(`Fetching RESPONSE`);
// create Request object
var request = new mssql.Request();
// query to the database and get the records
const queryStr = `SELECT * FROM USERS`;
request.query(queryStr, function (err, recordset) {
if (err) console.log(err)
else {
if (recordset.recordset.toString() === '') {
res.send('Oops!!! Required data not found...');
}
else {
// send records as a response
res.send(recordset);
}
};
});
};
exports.func2 = (req, res) =>
{
// Validate request
console.log(`INSERTING RECORD ${req}`);
// create Request object
var request = new mssql.Request();
// query to the database and get the records
const queryStr = `INSERT INTO GDUSERS (USERCODE, PASSWORD, LANGUAGE, USERCLASS, FIRSTNAME, LASTNAME, CONTACTNO) VALUES ('${req.body.usercode}', '${req.body.password}', 'EN', '0', '${req.body.firstname}', '${req.body.lastname}', '${req.body.contactno}');`;
request.query(queryStr, function (err, recordset) {
if (err) console.log(err)
else {
if (recordset.recordset.toString() == '') {
res.send('Oops!!! Required data not found...');
}
else {
// Send records as response
res.send(recordset);
}
};
});
};
The GET request works well, but when I try to run the POST request directly from the angular application, I get an error stating
Cannot GET URL/api/user/new
The angular code in my angular project is:
signup() {
let headers = new Headers({ 'Content-Type': 'application/json' });
let options = new RequestOptions({ headers: headers });
console.log(this.user); //User details come from a form
this.http.post(“URL", this.user, options)
.subscribe(
(err) => {
if(err) console.log(err);
console.log("Success");
});
}
I’m not sure whether the angular code I’m using, is right or not, and I don’t know where I’m going wrong. How does one exactly send a http POST request from an Angular project?
this i the way i handled my user signup with http.post calls. my approach is slightly different when signing up user because i am using a promise instead of observable (which i normally use for my servicecalls). but i will show you both ways.
createUser(user: User): Promise < string > {
const promise = new Promise < string > ((resolve, reject) => {
const userForPost = this.createUserForPost(user);
this.http.post(environment.backendUrl + '/api/user/signup', userForPost, this.config).toPromise < HttpConfig > ()
.then(createdUser => {
}).catch(error => {
console.log(error);
});
});
return promise;
}
here another example with an observable
createForumPost(forumPost: ForumPost) {
this.http.post < { message: string, forumPostId: string } > (environment.backendUrl + '/api/forumPosts', forumPost).subscribe((responseData) => {
const id = responseData.forumPostId;
forumPost.id = id;
});
}
i defined my URL somewhere else and then just use the environment.backedUrl + 'path' to define my path (the same as the path in your backend controller)
this is one of my first answers here on SO. i am sry if it is a bit messy
i hope i was able to help with my examples :)
I am new to mongoDb, as I am trying to query from different collection and in order to do that, when I am fetching data from category collection I mean when I am running select * from collection it is throwing error, MongoError: pool destroyed.
As per my understanding it is because of some find({}) is creating a pool and that is being destroyed.
The code which I am using inside model is below,
const MongoClient = require('mongodb').MongoClient;
const dbConfig = require('../configurations/database.config.js');
export const getAllCategoriesApi = (req, res, next) => {
return new Promise((resolve, reject ) => {
let finalCategory = []
const client = new MongoClient(dbConfig.url, { useNewUrlParser: true });
client.connect(err => {
const collection = client.db(dbConfig.db).collection("categories");
debugger
if (err) throw err;
let query = { CAT_PARENT: { $eq: '0' } };
collection.find(query).toArray(function(err, data) {
if(err) return next(err);
finalCategory.push(data);
resolve(finalCategory);
// db.close();
});
client.close();
});
});
}
When my finding here is when I am using
let query = { CAT_PARENT: { $eq: '0' } };
collection.find(query).toArray(function(err, data) {})
When I am using find(query) it is returning data but with {} or $gte/gt it is throwing Pool error.
The code which I have written in controller is below,
import { getAllCategoriesListApi } from '../models/fetchAllCategory';
const redis = require("redis");
const client = redis.createClient(process.env.REDIS_PORT);
export const getAllCategoriesListData = (req, res, next, query) => {
// Try fetching the result from Redis first in case we have it cached
return client.get(`allstorescategory:${query}`, (err, result) => {
// If that key exist in Redis store
if (false) {
res.send(result)
} else {
// Key does not exist in Redis store
getAllCategoriesListApi(req, res, next).then( function ( data ) {
const responseJSON = data;
// Save the Wikipedia API response in Redis store
client.setex(`allstorescategory:${query}`, 3600, JSON.stringify({ source: 'Redis Cache', responseJSON }));
res.send(responseJSON)
}).catch(function (err) {
console.log(err)
})
}
});
}
Can any one tell me what mistake I am doing here. How I can fix pool issue.
Thanking you in advance.
I assume that toArray is asynchronous (i.e. it invokes the callback passed in as results become available, i.e. read from the network).
If this is true the client.close(); call is going to get executed prior to results having been read, hence likely yielding your error.
The close call needs to be done after you have finished iterating the results.
Separately from this, you should probably not be creating the client instance in the request handler like this. Client instances are expensive to create (they must talk to all of the servers in the deployment before they can actually perform queries) and generally should be created per running process rather than per request.
I am trying to figure out the best way to pass a mysql connection (using node-mysql) between my routes for express.js. I am dynamically adding each route (using a for each file loop in routes), meaning I can't just pass in the connection to routes that need it. I either need to pass it to every route or none at all. I didn't like the idea of passing it to ones that dont need it so I created a dbConnection.js that the routes can individually import if they need. The problem is that I dont think I am doing it correctly. As of now, my dbConnection.js contains:
var mysql = require('mysql');
var db = null;
module.exports = function () {
if(!db) {
db = mysql.createConnection({
socketPath: '/tmp/mysql.sock',
user: '*********',
password: '*********',
database: '**********'
});
}
return db;
};
And I am importing it into each route using:
var db = require('../dbConnection.js');
var connection = new db();
But I would like to do it like this:
var connection = require('../dbConnection.js');
When I try it like this, however, I get an error saying connection has no method 'query' when I try to make a query.
I find it more reliable to use node-mysql's pool object. Here's how I set mine up. I use environment variable for database information. Keeps it out of the repo.
database.js
var mysql = require('mysql');
var pool = mysql.createPool({
host: process.env.MYSQL_HOST,
user: process.env.MYSQL_USER,
password: process.env.MYSQL_PASS,
database: process.env.MYSQL_DB,
connectionLimit: 10,
supportBigNumbers: true
});
// Get records from a city
exports.getRecords = function(city, callback) {
var sql = "SELECT name FROM users WHERE city=?";
// get a connection from the pool
pool.getConnection(function(err, connection) {
if(err) { console.log(err); callback(true); return; }
// make the query
connection.query(sql, [city], function(err, results) {
connection.release();
if(err) { console.log(err); callback(true); return; }
callback(false, results);
});
});
};
Route
var db = require('../database');
exports.GET = function(req, res) {
db.getRecords("San Francisco", function(err, results) {
if(err) { res.send(500,"Server Error"); return;
// Respond with results as JSON
res.send(results);
});
};
your solution will work if use db() instead of new db(), which returns an object and not the db connection
var db = require('../dbConnection.js');
//var connection = new db();
var connection = db();