increase number response per second - node.js

I have an android game that has 40,000 users online. And each user send request to server every 5 second.
I write this code for test request:
const express = require('express')
const app = express()
const pg = require('pg')
const conString = 'postgres://postgres:123456#localhost/dbtest'
app.get('/', function (req, res, next) {
pg.connect(conString, function (err, client, done) {
if (err) {
return next(err)
}
client.query('SELECT name, age FROM users limit 1;', [], function (err, result) {
done()
if (err) {
return next(err)
}
res.json(result.rows)
})
})
})
app.listen(3000)
Demo
And for test this code with 40,000 requests I write this ajax code:
for (var i = 0; i < 40000; i++) {
var j = 1;
$.ajax({
url: "http://85.185.161.139:3001/",
success: function(reponse) {
var d = new Date();
console.log(j++, d.getHours() + ":" + d.getMinutes() + ":" + d.getSeconds());
}
});
}
SERVER detail(I know this is poor)
Questions:
this code (node js)only response 200 requests per second!
how can improve my code for increase number response per second?
this way(ajax) for simulate 40,000 online users is correct or not?
if i use socket is better or not?

You should use Divide&Conquer algorithm for solving such problems. Find the most resource inefficient operation and try to replace or reduce an amount of calls to it.
The main problem that I see here is that server open new connection to database on each request which possibly takes most of the time and resources.
I suggest to open connection when the server boots up and reuse it in requests.
const express = require('express')
const app = express()
const pg = require('pg')
const conString = 'postgres://postgres:123456#localhost/dbtest'
const pgClient
pg.connect(conString, function (err, client, done) {
if (err) {
throw err
}
pgClient = client
})
app.get('/', function (req, res, next) {
pgClient.query('SELECT name, age FROM users limit 1;', [], function (err, result) {
if (err) {
return next(err)
}
res.json(result.rows)
})
})
app.listen(3000)
For proper stress load testing better use specialized utilities such as ab from Apache. Finally, sockets are better for rapid, small data transfer but remember it has problems with scaling and in most cases became very inefficient at 10K+ simultaneous connections.
EDIT: As #robertklep pointed out, better use client pooling in this case, and retrieve client from pool.

Related

many queries postgres (node), no parallel queries?

I am running a node server with the postgres-node (pg) package.
I wrote a program, which requests n-queries (for instance 20,000) at once to my postgres database.
When I do this with several clients who want to query 20,000 at once too, there is no parallelity. That means, the requests of the second client will be queued until the first client finished all his queries.
Is this a normal behavior for postgres? If yes, how can I prevent that one user gets all the ressources (and the others have to wait) if there is no parallelity?
This is my code:
const express = require('express');
const app = express();
const { Pool } = require("pg");
const pool = new Pool();
benchmark(){
pool.connect((err, client, done) => {
if (err) throw err;
client.query("SELECT * from member where m_id = $1", [1], (err, res) => {
done();
if (err) {
console.log(err.stack);
} else {
console.log(res.rows[0]);
}
});
});
}
app.get('/', function(req, res) {
for(let i=0;i<20000;i++){
benchmark();
}
});
First you need to create a connection pool, here's an example with node's pg in a separate module (node-pg-sql.js) for convenience:
node-pg-sql.js:
const { Pool } = require('pg');
const pool = new Pool(fileNameConfigPGSQL);
module.exports = {
query: (text, params, callback) => {
const start = Date.now()
return pool.query(text, params, (err, res) => {
const duration = Date.now() - start
// console.log('executed query', { text, duration, rows: res.rowCount })
callback(err, res)
})
},
getClient: (callback) => {
pool.connect((err, client, done) => {
const query = client.query.bind(client)
// monkey patch
client.query = () => {
client.lastQuery = arguments
client.query.apply(client, arguments)
}
// Timeout 5 sek
const timeout = setTimeout(() => {
// console.error('A client has been checked out for more than 5 seconds!')
// console.error(`The last executed query on this client was: ${client.lastQuery}`)
}, 5000)
const release = (err) => {
// 'done' Methode - returns client to the pool
done(err)
// clear Timeouts
clearTimeout(timeout)
// reset der Query-Method before Monkey Patch
client.query = query
}
callback(err, client, done)
})
}
}
In your postgresql.conf (on linux normally under /var/lib/pgsql/data/postgresql.conf) set max-connection to the desired value:
max_connection = 300
Keep in mind:
Each PostgreSQL connection consumes RAM for managing the connection or the client using it. The more connections you have, the more RAM you will be using that could instead be used to run the database.
While increasing your max-connections, you need to increase shared_buffers and kernel.shmmax as well in order for the client-connection increase to be effective .
Whenever you want to run a query from in one of your routes/endpoints just require the separate client-pool-file like:
const db = require('../../../node-pg-sql');
module.exports = (router) => {
router.get('/someRoute', (req, res) => {
console.log(`*****************************************`);
console.log(`Testing pg..`);
let sqlSelect = `SELECT EXISTS (
SELECT 1
FROM pg_tables
WHERE schemaname = 'someschema'
)`;
db.query(sqlSelect, (errSelect, responseSelect) => {
if (errSelect) {
/* INFO: Error while querying table */
console.log(`*****************************************`);
console.log(`ERROR WHILE CHECKING CONNECTION: ${errSelect}`);
}
else {
// INFO: No error from database
console.log(`*****************************************`);
console.log(`CONNECTION TO PGSQL WAS SUCCESSFUL..`);
res.json({ success: true, message: responseSelect, data:responseSelect.rows[0].exists });
}
})
});
}
EDIT:
"there is no parallelity.."
Node is asynchronous, you can either work with promises or spawn more clients/pools and tune your max-connections (as explained in my answer, but keep performance of your host-machine in mind), but with multiple clients running around 20.000 queries, they won't resolve with a result instantly or parallel. What is the exact goal you try to achieve?
"Is this a normal behavior for postgres?"
This is due to node's event-loop as well as due to certain performance-limitation of the host-machine running the Postgres.

How to efficiently forward request to multiple endpoints using nodejs?

I built a nodejs server to act as an adapter server, which upon receiving a post request containing some data, extracts the data from the request body and then forwards it to a few other external servers. Finally, my server will send a response consisting of the responses from each of the external server (success/fail).
If there's only 1 endpoint to forward to, it seems fairly straightforward. However, when I have to forward to more than one servers, I have to rely on things like Promise.All(), which has a fail-fast behaviour. That means if one promise is rejected (an external server is down), all other promises will also be rejected immediately and the rest the servers will not receive my data.
May be this ain't be the exact solution. But, what I am posting could be the work around of your problem.
Few days back I had the same problem, as I wanted to implement API versioning. Here is the solution I implemented, please have a look.
Architecture Diagram
Let me explain this diagram
Here in the diagram is the initial configuration for the server as we do. all the api request come here will pass to the "index.js" file inside the release directory.
index.js (in release directory)
const express = require('express');
const fid = require('./core/file.helper');
const router = express.Router();
fid.getFiles(__dirname,'./release').then(releases => {
releases.forEach(release => {
// release = release.replace(/.js/g,'');
router.use(`/${release}`,require(`./release/${release}/index`))
})
})
module.exports = router
code snippet for helper.js
//requiring path and fs modules
const path = require('path');
const fs = require('fs');
module.exports = {
getFiles: (presentDirectory, directoryName) => {
return new Promise((resolve, reject) => {
//joining path of directory
const directoryPath = path.join(presentDirectory, directoryName);
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
// console.log(files);
//handling error
if (err) {
console.log('Unable to scan directory: ' + err);
reject(err)
}
//listing all files using forEach
// files.forEach(function (file) {
// // Do whatever you want to do with the file
// console.log(file);
// });
resolve(files)
});
})
}
}
Now, from this index file all the index.js inside each version folder is mapped
Here is the code bellow for "index.js" inside v1 or v2 ...
const express = require('express');
const mongoose = require('mongoose');
const fid = require('../../core/file.helper');
const dbconf = require('./config/datastore');
const router = express.Router();
// const connection_string = `mongodb+srv://${dbconf.atlas.username}:${dbconf.atlas.password}#${dbconf.atlas.host}/${dbconf.atlas.database}`;
const connection_string = `mongodb://${dbconf.default.username}:${dbconf.default.password}#${dbconf.default.host}:${dbconf.default.port}/${dbconf.default.database}`;
mongoose.connect(connection_string,{
useCreateIndex: true,
useNewUrlParser:true
}).then(status => {
console.log(`Database connected to mongodb://${dbconf.atlas.username}#${dbconf.atlas.host}/${dbconf.atlas.database}`);
fid.getFiles(__dirname,'./endpoints').then(files => {
files.forEach(file => {
file = file.replace(/.js/g,'');
router.use(`/${file}`,require(`./endpoints/${file}`))
});
})
}).catch(err => {
console.log(`Error connecting database ${err}`);
})
module.exports = router
In each of this index.js inside version folder is actually mapped to each endpoints inside endpoints folder.
code for one of the endpoints is given bellow
const express = require('express');
const router = express.Router();
const userCtrl = require('../controllers/users');
router.post('/signup', userCtrl.signup);
router.post('/login', userCtrl.login);
module.exports = router;
Here in this file actually we are connecting the endpoints to its controllers.
var config = {'targets':
[
'https://abc.api.xxx',
'https://xyz.abc',
'https://stackoverflow.net'
]};
relay(req, resp, config);
function relay(req, resp, config) {
doRelay(req, resp, config['targets'], relayOne);
}
function doRelay(req, resp, servers, relayOne) {
var finalresponses = [];
if (servers.length > 0) {
var loop = function(servers, index, relayOne, done) {
relayOne(req, servers[index], function(response) {
finalresponses.push[response];
if (++index < servers.length) {
setTimeout(function(){
loop(servers, index, relayOne, done);
}, 0);
} else {
done(resp, finalresponses);
}
});
};
loop(servers, 0, relayOne, done);
} else {
done(resp, finalresponses);
}
}
function relayOne(req, targetserver, relaydone) {
//call the targetserver and return the response data
/*return relaydone(response data);*/
}
function done(resp, finalresponses){
console.log('ended');
resp.writeHead(200, 'OK', {
'Content-Type' : 'text/plain'
});
resp.end(finalresponses);
return;
}
It sounds like you are trying to design a reverse proxy. If you are struggling to get custom code to work, there is a free npm library which is very robust.
I would recommend node-http-proxy
I have posted link below, which will lead you directly to the "modify response", since you mentioned modification of the API format in your question. Be sure to read the entire page though.
https://github.com/http-party/node-http-proxy#modify-a-response-from-a-proxied-server
Note: this library is also very good because it can support SSL, and proxies to both localhost (servers on the same machine) and servers on other machines (remote).
Promise.all() from MDN
It rejects with the reason of the first promise that rejects.
To overcome the problem, you'll need to catch() each request you've made.
e.g.
Promise.all([
request('<url 1>').catch(err => /* .. error handling */),
request('<url 2>').catch(err => /* .. error handling */),
request('<url 3>').catch(err => /* .. error handling */)
])
.then(([result1, result2, result3]) => {
if(result1.err) { }
if(result2.err) { }
if(result3.err) { }
})

How could I improve my nodeJS program

I used promises, callbacks and external API for the firt time but I'm not sure that's the best way to use them.
My program traslates words from a langage to another using a langae Pivot and systran.io API.
the function Translate will translate word and send response via a callback.
then in the POST request I used promises to chain tasks.
var express = require('express');
var request = require('request');
var router = express.Router();
router.post("/", function(req, res) {
console.log
var resultat
var promise = new Promise((resolve, reject) => {
translate(req.query.source, "en", req.query.content, function(resa) {
resolve(resa);
})
}).then(function(resolve) {
console.log(resolve);
translate("en", req.query.target, resolve, function(resa2) {
console.log(resa2);
})
});
});
function translate(source, target, content, callback) {
let result;
result = request("https://api-platform.systran.net/translation/text/translate?input=" + content + "&source=" + source + "&target=" + target + "&key=xxxxxxxx-783f-4f90-aea4-7fb357016647", function(err, data, body) {
body = JSON.parse(body);
console.log(body);
callback(body.outputs[0].output)
})
}
module.exports = router;
Is there a best way to write my program which is already working ?

node.js Global connection already exists. Call sql.close() first

I'm trying to create web services using node.js from an sql server database,in the frontend when i call those 2 webservices simultaneously it throws an error Global connection already exists. Call sql.close() first .
Any Solution ?
var express = require('express');
var router = express.Router();
var sql = require("mssql");
router.get('/Plant/:server/:user/:password/:database', function(req, res, next) {
user = req.params.user;
password = req.params.password;
server = req.params.server;
database = req.params.database;
// config for your database
var config = {
user: user,
password: password,
server: server,
database:database
};
sql.connect(config, function (err) {
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query("SELECT distinct PlantName FROM MachineryStateTable"
, function (err, recordset) {
if (err) console.log(err)
else {
for(i=0;i<recordset.recordsets.length;i++) {
res.send(recordset.recordsets[i])
}
}
sql.close();
});
});
});
router.get('/Dep/:server/:user/:password/:database/:plantname', function(req, res, next) {
user = req.params.user;
password = req.params.password;
server = req.params.server;
database = req.params.database;
plantname = req.params.plantname;
// config for your database
var config = {
user: user,
password: password,
server: server,
database:database
};
sql.connect(config, function (err) {
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query("SELECT distinct DepName FROM MachineryStateTable where PlantName= '"+plantname+"'"
, function (err, recordset) {
if (err) console.log(err)
else {
for(i=0;i<recordset.recordsets.length;i++) {
res.send(recordset.recordsets[i])
}
sql.close();
}
});
});
});
module.exports = router;
You have to create a poolConnection
try this:
new sql.ConnectionPool(config).connect().then(pool => {
return pool.request().query("SELECT * FROM MyTable")
}).then(result => {
let rows = result.recordset
res.setHeader('Access-Control-Allow-Origin', '*')
res.status(200).json(rows);
sql.close();
}).catch(err => {
res.status(500).send({ message: `${err}`})
sql.close();
});
From the documentation, close method should be used on the connection, and not on the required module,
So should be used like
var connection = new sql.Connection({
user: '...',
password: '...',
server: 'localhost',
database: '...'
});
connection.close().
Also couple of suggestions,
1. putting res.send in a loop isn't a good idea, You could reply back the entire recordsets or do operations over it, store the resultant in a variable and send that back.
2. Try using promises, instead of callbacks, it would make the flow neater
You must use ConnectionPool.
Next function returns a recordset with my query results.
async function execute2(query) {
return new Promise((resolve, reject) => {
new sql.ConnectionPool(dbConfig).connect().then(pool => {
return pool.request().query(query)
}).then(result => {
resolve(result.recordset);
sql.close();
}).catch(err => {
reject(err)
sql.close();
});
});
}
Works fine in my code!
if this problem still bother you, then change the core api.
go to node_modules\mssql\lib\base.js
at line 1723, add below code before if condition
globalConnection = null
In case someone comes here trying to find out how to use SQL Server pool connection with parameters:
var executeQuery = function(res,query,parameters){
new sql.ConnectionPool(sqlConfig).connect().then(pool =>{
// create request object
var request = new sql.Request(pool);
// Add parameters
parameters.forEach(function(p) {
request.input(p.name, p.sqltype, p.value);
});
// query to the database
request.query(query,function(err,result){
res.send(result);
sql.close();
});
})
}
Don't read their documentation, I don't think it was written by someone that actually uses the library :) Also don't pay any attention to the names of things, a 'ConnectionPool' doesn't seem to actually be a connection pool of any sort. If you try and create more than one connection from a pool, you will get an error. This is the code that I eventually got working:
const sql = require('mssql');
let pool = new sql.ConnectionPool(config); // some object that lets you connect ONCE
let cnn = await pool.connect(); // create single allowed connection on this 'pool'
let result = await cnn.request().query(query);
console.log('result:', result);
cnn.close(); // close your connection
return result;
This code can be run multiple times in parallel and seems to create multiple connections and correctly close them.

Express stops working after reloading page 10 times

I am using express as my webserver for node and everything seems to be working correctly. The only problem I am encoutering is when I load a specific page ('/learn' route) 10 times repeatedly. Once I do this, express seems to stop working, although no error is logged to the console and nothing wrong is displayed on the page. It just keeps waiting for the host in the browser. What is weird is that the problem doesn't occur if I go from the page with the problem to another page, and then back again. I can repeat this as much as I want without error. Here is my route with the problem:
var bcrypt = require('bcrypt');
var pool = require('../database.js').pool;
module.exports = function(app) {
app.get('/learn', function(req, res, next) {
var query = 'SELECT * FROM questions INNER JOIN answers ON questions.questionID = answers.questionID';
pool.getConnection(function(err, connection) {
connection.query(query, function(err, rows) {
if (err) {
throw err;
}
var data = {
name: req.session.name,
problems: rows,
};
res.render('learn.html', data);
});
});
});
app.post('/learn/checkAnswer', function(req, res) {
//get posted form data
var questionID = req.body.questionID;
var selectedAnswer = req.body.selectedAnswer;
//query database
pool.getConnection(function(err, connection) {
var query = connection.query('SELECT correctAnswer FROM questions WHERE questionID = ?', questionID, function(err, rows) {
res.send({
correctAnswer: rows[0].correctAnswer
});
});
});
});
};
I'm not sure if this makes a difference, but I am using handlebars as my rendering engine instead of jade, as well as node-mysql for my database.
10 is the default size of the node-mysql pool. And since you're not ending the connections retrieved with pool.getConnection, the 11th request will wait indefinitely for a free connection.
Easy to fix:
connection.query(query, function(err, rows) {
connection.end(); // end the connection as soon as possible,
// so it's returned to the pool and can be reused.
if (err) ...
});

Resources