Connecting to PostgresSQL hosted online using NodeJS - node.js

I'm currently trying to create a functional query from the database to post it into a created a csv file, however I am unable to connect to the PSQL host programmatically. So what I am trying to do is :-
Connect to DB and query results
Push results to an Excel File
Continue()
SFTP Results to myself on SFTP Server and place file in directory.
I am able to connect to the PostgresDB manually by the following in CLI:-
ssh username#xx.xx.xx.xx //Doesnt need password because my id_rsa key is stored on the Server
psql -U username -h LOCALHOST -p 5432 -d databasename pass- password (Manually input)
Furthermore, connecting through Visual Studio Code works as well however I need to connect to the server (Remote Connection) and then connect to the Database using a postgres Driver.
After investigating it, I figured I firstly need to connect using SSH to the server, then and only then I will be allowed to access the Database.
This is how I approached it through Code :-
Index.js
const serverConnectionParams = require('./src/config/serverConn');
function testConnectionServer() {
try {
serverConnectionParams.connectToServer();
} catch (err) {
console.error(err);
}
}
testConnectionServer();
serverConn.js
const { Client } = require('ssh2');
const { readFileSync } = require('fs');
const databaseConnectionParams = require('./databaseConn');
function connectToServer() {
const conn = new Client();
conn.on('ready', () => {
console.log('Client :: ready');
conn.exec('uptime', (err, stream) => {
if (err) throw err;
databaseConnectionParams.auth(); *// This is the database connection param*
stream.on('data', (data) => {
console.log('STDOUT: ' + data);
}).stderr.on('data', (data) => {
console.log('STDERR: ' + data);
});
});
}).connect({
host: 'xx.xx.xx.xx',
username: 'username',
privateKey: readFileSync('src/key/id_rsa')
});
}
exports.connectToServer = connectToServer;
databaseConn.js
const { readFileSync } = require('fs');
const envParam = require('./env.js');
const { Sequelize } = require('sequelize');
const sequelize = new Sequelize(envParam.database, envParam.username, envParam.password, {
host: envParam.host,
dialect: envParam.dialect,
ssl: true,
pool: {
max: envParam.pool.max,
min: envParam.pool.min,
acquire: envParam.pool.aquire,
idle: envParam.pool.idle
}
});
async function auth() {
try {
console.log('trying to connect')
sequelize.validate();
} catch (error) {
console.error('Unable to connect to the database:', error);
}
}
exports.auth = auth;
env.js
const env = {
database: 'databasename',
username: 'username',
password: 'password',
host: 'ip#',
dialect: 'postgres',
pool: {
max: 5,
min: 0,
aquire: 30000,
idle: 10000
}
};
module.exports = env;
After running my node index.js I receive the following error statement :-
Client :: ready
trying to connect
STDOUT: 10:43:09 up 1:21, 1 user, load average: 5.71, 6.03, 5.15
C:\Users\~\node_modules\sequelize\lib\dialects\postgres\connection-manager.js:184
reject(new sequelizeErrors.ConnectionError(err));
^
ConnectionError [SequelizeConnectionError]: no pg_hba.conf entry for host "xx.xx.xx.xx", user "username", database "databasename", SSL off
at Client._connectionCallback
{
parent: error: no pg_hba.conf entry for host "xx.xx.xx.xx", user "username", database "databasename", SSL off
at Parser.parseErrorMessage
{
length: 154,
severity: 'FATAL',
code: '28000',
detail: undefined,
hint: undefined,
position: undefined,
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'auth.c',
line: '490',
routine: 'ClientAuthentication'
},
original: error: no pg_hba.conf entry for host "x.x.x.x", user "username", database "password", SSL off
at Parser.parseErrorMessage (C:\Users\~\node_modules\pg-protocol\dist\parser.js:287:98)
at Parser.handlePacket (C:\Users\~\node_modules\pg-protocol\dist\parser.js:126:29)
at Parser.parse (C:\Users\~\node_modules\pg-protocol\dist\parser.js:39:38)
at Socket.<anonymous> (C:\Users\~\node_modules\pg-protocol\dist\index.js:11:42)
at Socket.emit (node:events:394:28)
at addChunk (node:internal/streams/readable:315:12)
at readableAddChunk (node:internal/streams/readable:289:9)
at Socket.Readable.push (node:internal/streams/readable:228:10)
at TCP.onStreamRead (node:internal/stream_base_commons:199:23) {
length: 154,
severity: 'FATAL',
code: '28000',
detail: undefined,
hint: undefined,
position: undefined,
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'auth.c',
line: '490',
routine: 'ClientAuthentication'
}
}
Investigating the Error Code: 28000
Found this link explaining the issue as an authentication attempt failure
https://help.heroku.com/DR0TTWWD/seeing-fatal-no-pg_hba-conf-entry-errors-in-postgres
Also found several solutions online regarding pg_hba.conf needs to use md5 and then restart postgress (Not tried, as i cannot restart the postgress service)
error: Ident authentication failed for user
Found another solution explaining it was an SSL issue (Tried it and it didnt work)
Node.js, PostgreSQL error: no pg_hba.conf entry for host
After using SSL it would change the error code to the following :-
SequelizeConnectionError: self signed certificate
Found a solution to that here:-
SequelizeConnectionError: self signed certificate
After I put that it would give me a different error that rejectUnauthorized is depreciated and very old version (Cant seem to reproduce the error code as of the moment)
So my hands are tied at the moment, any help will be great!
I've also tried using different Javascript modules instead of sequelize however they all have the same authentication issue.
I also tried to pass my id_rsa key, however it wouldnt solve my issue at all.
My assumptions are even though I am passing the connToDatabase function inside the SSH connection, it is still searching for the ip# in the incorrect location. (Ip# of DB on the server is 192.168.31.4)
But when using that IP# it will say ERR Connection Timed out
Another Assumption I have is that the Database has many restrictions from connecting and require further more params.
UPDATE:
I tried editting the pg_hba.conf file through remote access on VSC however it would give me error cannot read file.
Any help would be great!

I had completely forgotten that I posted this question.
The solution to it was pretty straight forward, after investigating it for a while I realized I was making a mistake in connection params.
For others who require help with such issues I will post the solution in a simple manner.
Basically, I needed first to SSH to the server and add a tunnel to my connection to connect to the database. Then and only then will my sequelize params for the database pass through because I have completely connected to the server and internal postgresql database.
So TLDR
SSH -> Tunnel -> Sequelize
ssh(10.x.x.1, etc) -> addTunnel(localhost, etc) -> sequelize(databaseName, etc)
EDIT: Added Code for reference
const Ssh2Promise = require('ssh2-promise');
const { readFileSync } = require('fs');
async function connectToServer(callback) {
const ssh = new Ssh2Promise({
host: '10.x.x.1',
username: 'usernameofssh',
privateKey: readFileSync('src/key/id_rsa'), //This is only for RSA Fingerprint, if you do not have said fingerprint you can use "passphrase" with your password instead
});
const tunnel = await ssh.addTunnel({
remoteAddr: '192.168.0.1', //This is the database connection ip#, once connected to it you can fetch from LOCALHOST. Incase its AWS it would be test.test-test.amazonaws.com
remotePort: 1234, //Port for connection
localPort: 1234,
});
//Don't forget to throw it in a try catch for feedback
await auth(callback); //Send it wherever you need it to go
}

Related

SSL Routines error when connecting to local DB

So I have this piece of code that is responsible for checking if a connection is valid:
const sequelize = new Sequelize(req.body.database, req.body.user, req.body.password, {
host: req.body.host,
dialect: 'mssql',
port: req.body.port,
dialectOptions: {
options: {
encrypt: true,
}
}
});
sequelize.authenticate().then((err) => {
res.send('Connected');
})
.catch((err) => {
console.log(err.message)
res.send(err.message);
});
This worked perfectly when I was connecting to a local DB on my network. However, when I was trying to connect to a DB through a VPN (simulating a local network) I got the following error:
Failed to connect to 10.1.90.20:1433 - 4C300000:error:0A000102:SSL
routines:ssl_choose_client_version:unsupported
protocol:c:\ws\deps\openssl\openssl\ssl\statem\statem_lib.c:1983:
I read somewhere that adding
cryptoCredentialsDetails: {
minVersion: 'TLSv1'
}
in the options will resolve this but it just gave me a new error:
Failed to connect to 10.1.90.20:1433 - 70290000:error:0A0C0103:SSL
routines:tls_process_key_exchange:internal
error:c:\ws\deps\openssl\openssl\ssl\statem\statem_clnt.c:2255:
Any idea how to fix this?
Keep in mind that both Databases are local however one of them is on a remote network accessible through SSL VPN.

NodeJS/PostgreSQL Errors

I am setting up a Node app that has connections to multiple databases. I am using a map to create Pools for all my databases as such:
const stnPool = new Map();
async function getOtherDb(stnName){
if(!stnPool.has(stnName)){
stnPool.set(stnName, new Pool({
host: 'localhost',
database: stnName.toLowerCase(),
user: USERNAME,
password: PASSWORD,
port: 5432,
max: 2000,
idleTimeoutMillis: 0,
connectionTimeoutMillis: 0,
}))
}
return stnPool.get(stnName);
}
PostgreSQL currently has 10 'station' databases. I have 10 remote servers that connect to my server and upload data every 3 seconds. I also have an X number of clients that connect to view the uploaded data in realtime. Bot Server and Client connections connect to upload/request data via Websockets.
To establish aan query a Pool connection:
var uvDb = await db.getOtherDb("wx_uv")
if( uvDb != -1 ) {
const dbT = await uvDb.connect()
... do various db queries ...
dbT.release()
}
After a few hours of getting Server uploads and Client requests I get this error:
/.../node/node_modules/pg-protocol/dist/parser.js:287
const message = name === 'notice' ? new messages_1.NoticeMessage(length, messageValue) : new messages_1.DatabaseError(messageValue, length, name);
^
error: parallel worker failed to initialize
at Parser.parseErrorMessage (/.../node/node_modules/pg-protocol/dist/parser.js:287:98)
at Parser.handlePacket (/.../node/node_modules/pg-protocol/dist/parser.js:126:29)
at Parser.parse (/.../node/node_modules/pg-protocol/dist/parser.js:39:38)
at Socket.<anonymous> (/.../node/node_modules/pg-protocol/dist/index.js:11:42)
at Socket.emit (node:events:365:28)
at addChunk (node:internal/streams/readable:314:12)
at readableAddChunk (node:internal/streams/readable:289:9)
at Socket.Readable.push (node:internal/streams/readable:228:10)
at TCP.onStreamRead (node:internal/stream_base_commons:190:23) {
length: 163,
severity: 'ERROR',
code: '55000',
detail: undefined,
hint: 'More details may be available in the server log.',
position: undefined,
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'parallel.c',
line: '826',
routine: 'WaitForParallelWorkersToFinish'
}
Anyone have any ideas on what causes this?
=======UPDATE=======
Tried adding a try/catch to release the connection as well:
if( uvDb != -1 ) {
const dbT = await uvDb.connect()
try{
... do various db queries ...
dbT.release()
} catch (err) {
dbT.release()
}
}
But now I get other errors...
"error: lost connection to parallel worker"
"error: parallel worker failed to initialize"

Is it possible to connect to mssql with windows auth mode from a nodejs app running on linux?

I'm trying to connect to a mssql with Windows authentication mode (can't change that) from nodejs running on a linux machine.
I tried many things, all of them resulted in nearly the same error, here is an attempt using tedious with this simple code running on a linux machine with nodejs:
let tedious = require('tedious');
let Connection = tedious.Connection;
const config = {
userName: 'myUserName',
password: 'myPassword',
server: 'MyServ',
options: {
database: 'MyDbName'
}
}
function handleConnection(err: any) {
if (err) console.error("error connecting :-(", err);
else console.log("successfully connected!!")
}
let connection = new Connection(config);
connection.on('connect', handleConnection);
I get this error
error connecting :-( { ConnectionError: Login failed for user ''.
at ConnectionError (./node_modules/tedious/lib/errors.js:13:12)
at Parser.tokenStreamParser.on.token (./node_modules/tedious/lib/connection.js:848:51)
at Parser.emit (events.js:198:13)
at Parser.parser.on.token (./node_modules/tedious/lib/token/token-stream-parser.js:37:14)
at Parser.emit (events.js:198:13)
at addChunk (./node_modules/readable-stream/lib/_stream_readable.js:298:12)
at readableAddChunk (./node_modules/readable-stream/lib/_stream_readable.js:280:11)
at Parser.Readable.push (./node_modules/readable-stream/lib/_stream_readable.js:241:10)
at Parser.Transform.push (./node_modules/readable-stream/lib/_stream_transform.js:139:32)
at doneParsing (./node_modules/tedious/lib/token/stream-parser.js:122:14) message: 'Login failed for user \'\'.', code: 'ELOGIN' }
The credentials I used do have SQL rights (tested with ODBC on windows machine).
Am I doing something wrong or is it just impossible ?
#ADyson thank you a lot for your informations, you managed to pinpoint the solution to my poorly formulated problem caused by my total lack of knowledge on the subject, really thank you again. the solution was to use domain login this snippet worked :
const config = {
user: MyUserName,
password: MyPassword,
server: 'MyServAdress',
database: 'MyDbName,
domain: 'MyDomain'
}
const sql = require('mssql');
sql.connect(config).then((pool: any) => {
console.log('connected!');
}).catch((err: any) => {
console.log(err);
});
Yes indeed, it's possible to receive data form a linux client using windows authentication only enabled. MS SQL Server and NodeJS Linux Server are in the same network. The linux Server isn't domain-joined:
I used this to run execute my query:
const sql = require('mssql')
const config = {
server: 'SERVER',
database: 'DATABASE',
user: 'USER',
password: 'PASSWORD',
domain: 'DOMAIN',
options: {
enableArithAbort: true // required, otherwise deprecation warning
}
}
sql.connect(config)
.then((conn) => {
console.log('MSSQL: connected');
conn.query(`SELECT ..`)
.then(data => console.log(data))
.then(() => conn.close())
}).catch(err => { console.log(err) });

Node-Postgres SequelizeConnectionError: password authentication failed for user

I am developping a backend application with node and sequelize. My database is from postgresql.
When lauching the app, the database connection works fine, but when it tries to communicate with the database to read or update, it fails with a connection error:
password authentication failed for user "wushin".
Seems really weird to me because database connection has already been done, and password has been validated. Do you guys know what's happening ? Maybe an issue with pg module but I tried different versions.
Versions
Node: 10.17.0
Sequelize: 5.21.3
Postgres: 10.11
pg module: 7.17.1
-> This code works fine:
const sequelize = new Sequelize(process.env.DATABASE_DEV_URL)
sequelize
.authenticate()
.then(() => {
console.log('Connection has been established successfully.\n')
})
.catch(err => {
console.error('Unable to connect to the database:', err)
})
-> But this promise fails with SequelizeConnectionError:
models.Question.findAll()
.then(data => {
console.log('-> Succeeded data fetching\n')
console.log(data)
})
.catch(err => {
console.log('-> Failed data fetching\n')
console.log('Error', err)
})
Logs:
yarn run v1.19.2
$ node index.js
Example app listening on port 4000 or something!
Executing (default): SELECT 1+1 AS result
Connection has been established successfully.
- Trying to fetch data:
-> Failed data fetching
Error:
{ SequelizeConnectionError: password authentication failed for user "wushin"
at connection.connect.err (/home/wushin/Projects/GuessGame/theguessgame-api/node_modules/sequelize/lib/dialects/postgres/connection-manager.js:182:24)
at Connection.connectingErrorHandler (/home/wushin/Projects/GuessGame/theguessgame-api/node_modules/pg/lib/client.js:194:14)
at Connection.emit (events.js:198:13)
at Socket.<anonymous> (/home/wushin/Projects/GuessGame/theguessgame-api/node_modules/pg/lib/connection.js:128:12)
at Socket.emit (events.js:198:13)
at addChunk (_stream_readable.js:287:12)
at readableAddChunk (_stream_readable.js:268:11)
at Socket.Readable.push (_stream_readable.js:223:10)
at TCP.onStreamRead [as onread] (internal/stream_base_commons.js:94:17)
name: 'SequelizeConnectionError'
It seems that you pass no configurations to Sequelize but the host. The minimum configurations are host, port, databasename, dialect username, and password.
From the docs:
const Sequelize = require('sequelize');
// Option 1: Passing parameters separately const sequelize = new
Sequelize('database', 'username', 'password', {
host: 'localhost',
dialect: /* one of 'mysql' | 'mariadb' | 'postgres' | 'mssql' */
});
// Option 2: Passing a connection URI const sequelize = new
Sequelize('postgres://user:pass#example.com:5432/dbname');
I finally fixed this. The issue was that with sequelize, requiring the models calls an index.js that is supposed to do the sequelize connection for you, using the config repository sequelize creates.
My connection to sequelize was working well but the one that was launched by requiring models had some bad information on my database.
Therefore I could not use the imported model to fetch data on the database.
I inserted good config information :
require('dotenv').config()
module.exports = {
development: {
url: process.env.DATABASE_URL,
dialect: 'postgres',
},
test: {
url: process.env.DATABASE_TEST_URL,
dialect: 'postgres',
},
production: {
url: process.env.DATABASE_PROD_URL,
dialect: 'postgres',
},
}
And completely removed the line that I wrote myself:
const sequelize = new Sequelize(process.env.DATABASE_DEV_URL)
It is now the models/index.js that connects to the database with :
const sequelize = new Sequelize(process.env.DATABASE_URL)
sequelize
.authenticate()
.then(() => {
console.log('Connection has been established successfully.\n')
})
.catch(err => {
console.error('Unable to connect to the database:', err)
})

Node.js "Error: read ECONNRESET"

I am trying to connect to mongo via Node.js and I'm getting the following error:
connection error: { Error: read ECONNRESET
at exports._errnoException (util.js:1022:11)
at TCP.onread (net.js:610:25) name: 'MongoError', message: 'read ECONNRESET' }
(node:3868) UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 1): MongoError: read ECONNRESET
Here is the code I'm using to connect:
const mongoose = require('mongoose');
mongoose.Promise = global.Promise;
module.exports = (config) => {
const fs = require('fs');
let key = fs.readFileSync('../server/test/keys/cert_key.pem');
let ca = [fs.readFileSync('../server/test/keys/ca.pem')];
let o = {
server: {
sslValidate: true,
sslCA: ca,
sslKey: key,
sslCert: key
}
};
mongoose.connect(config.connectionString, o);
let database = mongoose.connection;
database.on('error', console.error.bind(console, 'connection error:'))
database.once('open', (error) => {
if (error) {
console.log(error);
return;
}
console.log('MongoDB ready!')
});
require('./../models/Role').initialize();
require('./../models/User');
require('./../models/Item');
require('./../models/character/Character_item');
require('./../models/Article');
require('./../models/Friend_request');
require('./../models/Code');
require('./../models/Ring');
require('./../models/Ring_shop');
require('./../models/Comment');
};
If I try to connect to mongo from Robo3T, it works. So it seems the problem is in my Node code. Any suggestions?
Accordin mongoDB node driver documentation you must pass ssl: true in the server option in order to connect via SSL otherwise SSL is ignored.
My standard ssl db options is the following :
server: {
ssl: true,
sslValidate:true,
sslCA: ca,
ca: ca,
sslKey: key,
sslCert: key
}
I was having the same issue as I was working on the cloud provided by MongoDB. The reason it was happening with me was that I have configured my home IP to be connected and not my work IP. After adding my work IP in the cloud settings I was able to connect
Confirm that your mongodb database's file is not damaged。If .wt file has damaged, you'll got that error. The .wt filen in your storage path. eg:
storage:
dbPath: E:\Data\mongodb
You can use mongorestore command to repair data if you backuped before. otherwise you can drop the database and re-init a new one.

Resources