NodeJS/PostgreSQL Errors - node.js

I am setting up a Node app that has connections to multiple databases. I am using a map to create Pools for all my databases as such:
const stnPool = new Map();
async function getOtherDb(stnName){
if(!stnPool.has(stnName)){
stnPool.set(stnName, new Pool({
host: 'localhost',
database: stnName.toLowerCase(),
user: USERNAME,
password: PASSWORD,
port: 5432,
max: 2000,
idleTimeoutMillis: 0,
connectionTimeoutMillis: 0,
}))
}
return stnPool.get(stnName);
}
PostgreSQL currently has 10 'station' databases. I have 10 remote servers that connect to my server and upload data every 3 seconds. I also have an X number of clients that connect to view the uploaded data in realtime. Bot Server and Client connections connect to upload/request data via Websockets.
To establish aan query a Pool connection:
var uvDb = await db.getOtherDb("wx_uv")
if( uvDb != -1 ) {
const dbT = await uvDb.connect()
... do various db queries ...
dbT.release()
}
After a few hours of getting Server uploads and Client requests I get this error:
/.../node/node_modules/pg-protocol/dist/parser.js:287
const message = name === 'notice' ? new messages_1.NoticeMessage(length, messageValue) : new messages_1.DatabaseError(messageValue, length, name);
^
error: parallel worker failed to initialize
at Parser.parseErrorMessage (/.../node/node_modules/pg-protocol/dist/parser.js:287:98)
at Parser.handlePacket (/.../node/node_modules/pg-protocol/dist/parser.js:126:29)
at Parser.parse (/.../node/node_modules/pg-protocol/dist/parser.js:39:38)
at Socket.<anonymous> (/.../node/node_modules/pg-protocol/dist/index.js:11:42)
at Socket.emit (node:events:365:28)
at addChunk (node:internal/streams/readable:314:12)
at readableAddChunk (node:internal/streams/readable:289:9)
at Socket.Readable.push (node:internal/streams/readable:228:10)
at TCP.onStreamRead (node:internal/stream_base_commons:190:23) {
length: 163,
severity: 'ERROR',
code: '55000',
detail: undefined,
hint: 'More details may be available in the server log.',
position: undefined,
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'parallel.c',
line: '826',
routine: 'WaitForParallelWorkersToFinish'
}
Anyone have any ideas on what causes this?
=======UPDATE=======
Tried adding a try/catch to release the connection as well:
if( uvDb != -1 ) {
const dbT = await uvDb.connect()
try{
... do various db queries ...
dbT.release()
} catch (err) {
dbT.release()
}
}
But now I get other errors...
"error: lost connection to parallel worker"
"error: parallel worker failed to initialize"

Related

Postgres: Error 28P01 and non-asked user login

I have an NodeJS app that runs ok in my dev machine, but in production have a wierd behaviour: it asks for a user that I didn't call!
Here is my .env file:
PGUSER=postgres
PGHOST=my.domain
PGPASSWORD=my.passwd
PGDATABASE=my.dbase
PGPORT=5432
As I said, it runs ok in my machine but when I try to run it in my AWS Lighsail VPS it crashes:
/home/ubuntu/apps/bounce/node_modules/pg-protocol/dist/parser.js:287
const message = name === 'notice' ? new messages_1.NoticeMessage(length, messageValue) : new messages_1.DatabaseError(messageValue, length, name);
^
error: password authentication failed for user "ubuntu"
at Parser.parseErrorMessage (/home/ubuntu/apps/bounce/node_modules/pg-protocol/dist/parser.js:287:98)
at Parser.handlePacket (/home/ubuntu/apps/bounce/node_modules/pg-protocol/dist/parser.js:126:29)
at Parser.parse (/home/ubuntu/apps/bounce/node_modules/pg-protocol/dist/parser.js:39:38)
at Socket.<anonymous> (/home/ubuntu/apps/bounce/node_modules/pg-protocol/dist/index.js:11:42)
at Socket.emit (node:events:527:28)
at addChunk (node:internal/streams/readable:315:12)
at readableAddChunk (node:internal/streams/readable:289:9)
at Socket.Readable.push (node:internal/streams/readable:228:10)
at TCP.onStreamRead (node:internal/stream_base_commons:190:23) {
length: 102,
severity: 'FATAL',
code: '28P01',
detail: undefined,
hint: undefined,
position: undefined,
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'auth.c',
line: '330',
routine: 'auth_failed'
}
The wierd thing: I didn't called "ubuntu" user in my code - I´m using "postgres", as in my .env file. I tried setting an user/passwd "ubuntu" and tested it , with PgAdmin and BeeKeeper - in booth apps I can access Postgres, but I couldn't do it through my Nodejs app hosted online.
My pg_hba.conf is here:
local all postgres peer
local all ubuntu trust
local all all md5
host all all 0.0.0.0/0 md5
And my connectin file is here:
require('dotenv').config();
const Pool = require('pg').Pool;
const conecta = new Pool({
user: process.env.PGUSER,
password: process.env.PGPASSWORD,
database: process.env.PGDATABASE,
host: process.env.PGHOST,
port: process.env.PGPORT
});
module.exports = conecta;
Why it insists in "ubuntu" user? And why my NodeJS app can't connect if PgAdmin and BeeKeeper can?
Simple answer: use a literal call to the .env file inside config():
require('dotenv').config({ path: '/path/to/your/.env' });

PG Pool: relation does not exist

I recently started coding in javascript. Been trying to use pg module to interact with my postgres db. But it keeps saying relation does not exist even though everything is okay and the table does exist. Any help will be appreciated.
const Pool = require('pg').Pool
const pool = new Pool({
user: 'postgres',
host: 'localhost',
database: 'test',
password: 'test#123',
port: '5432',
})
pool.query("SELECT * FROM clients", (error, results) => {
if (error) throw error
console.log(results)
})
module.exports = pool```
Error:
```error: relation "clients" does not exist
at Parser.parseErrorMessage (C:\Users\Maaz\Desktop\Code\acube_backend\node_modules\pg-protocol\dist\parser.js:287:98)
at Parser.handlePacket (C:\Users\Maaz\Desktop\Code\acube_backend\node_modules\pg-protocol\dist\parser.js:126:29)
at Parser.parse (C:\Users\Maaz\Desktop\Code\acube_backend\node_modules\pg-protocol\dist\parser.js:39:38)
at Socket.<anonymous> (C:\Users\Maaz\Desktop\Code\acube_backend\node_modules\pg-protocol\dist\index.js:11:42)
at Socket.emit (node:events:513:28)
at addChunk (node:internal/streams/readable:315:12)
at readableAddChunk (node:internal/streams/readable:289:9)
at Socket.Readable.push (node:internal/streams/readable:228:10)
at TCP.onStreamRead (node:internal/stream_base_commons:190:23) {
length: 106,
severity: 'ERROR',
code: '42P01',
detail: undefined,
hint: undefined,
position: '15',
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'parse_relation.c',
line: '1384',
routine: 'parserOpenTable'
}

Azure Functions EventhubTrigger - database inserts

i implemented some code in azure functions which is triggered by a even hub.
If triggered i want to insert the data to a azure sql database.
I got my code running and the bulk insert is working but i often get a RequestError Timeout.
Could somebody give me some advice how to implement this use case the right way with azure functions. The function is actually triggered pretty often because of data which is send to event hub.
[2022-03-30T13:13:07.464Z] Executing 'Functions.hotDatatoSql' (Reason='(null)', Id=a329b220-6ef6-4d75-9c15-beed6d7375cb)
[2022-03-30T13:13:07.466Z] Trigger Details: PartionId: 0, Offset: 85929768072-85930225256, EnqueueTimeUtc: 2022-03-29T16:46:03.5990000Z-2022-03-29T16:47:49.0860000Z, SequenceNumber: 351947-352202, Count: 256
[2022-03-30T13:13:23.109Z] RequestError: Timeout: Request failed to complete in 15000ms
[2022-03-30T13:13:23.111Z] Executed 'Functions.hotDatatoSql' (Succeeded, Id=a329b220-6ef6-4d75-9c15-beed6d7375cb, Duration=15717ms)
[2022-03-30T13:13:23.113Z] at BulkLoad.done [as callback] (C:\Home\Software\azure_functions\node_modules\mssql\lib\tedious\request.js:307:19)
[2022-03-30T13:13:23.142Z] at Parser.<anonymous> (C:\Home\Software\azure_functions\node_modules\tedious\lib\connection.js:2910:26)
[2022-03-30T13:13:23.145Z] at Object.onceWrapper (node:events:509:28)
[2022-03-30T13:13:23.148Z] at Parser.emit (node:events:390:28)
[2022-03-30T13:13:23.150Z] at Readable.<anonymous> (C:\Home\Software\azure_functions\node_modules\tedious\lib\token\token-stream-parser.js:32:12)
[2022-03-30T13:13:23.152Z] at Readable.emit (node:events:390:28)
[2022-03-30T13:13:23.153Z] at endReadableNT (node:internal/streams/readable:1343:12)
[2022-03-30T13:13:23.162Z] at processTicksAndRejections (node:internal/process/task_queues:83:21) {
[2022-03-30T13:13:23.164Z] code: 'ETIMEOUT',
[2022-03-30T13:13:23.167Z] originalError: RequestError: Timeout: Request failed to complete in 15000ms
My Azure Function Code:
const mssql = require('mssql');
const { get } = require('./pool-manager')
const config = {
user: "...",
password: "...",
server: '....',
database: '...',
pool: {
max: 10,
min: 0,
idleTimeoutMillis: 30000
},
options: {
encrypt: true,
trustServerCertificate: true
}
};
const table = new mssql.Table('dbo.testTable');
table.columns.add('row1', mssql.VarChar(512), {nullable: true});
table.columns.add('row2', mssql.DateTime2, {nullable: true});
table.columns.add('row3', mssql.NVarChar(mssql.MAX), {nullable: true});
module.exports = async function (context, eventHubMessages) {
const pool = await get('default', config);
eventHubMessages.forEach((message, index) => {
table.rows.add(message.id, message.time, JSON.stringify(message.data));
});
const request = new mssql.Request(pool);
try{
let result = await request.bulk(table);
//console.log(result);
}
catch(err){
console.log(err);
}
};
To achieve the above requirements, As suggested by #Peter Bons we need to increase the timeout as default timeout is 15 seconds .
For example to increase the idletimeout:
const config = {
user: '...',
password: '...',
server: 'localhost',
database: '...',
pool: {
max: 10,
min: 0,
idleTimeoutMillis: 300000
}
}
For more information you can refer this SO THREAD as well.

Connecting to PostgresSQL hosted online using NodeJS

I'm currently trying to create a functional query from the database to post it into a created a csv file, however I am unable to connect to the PSQL host programmatically. So what I am trying to do is :-
Connect to DB and query results
Push results to an Excel File
Continue()
SFTP Results to myself on SFTP Server and place file in directory.
I am able to connect to the PostgresDB manually by the following in CLI:-
ssh username#xx.xx.xx.xx //Doesnt need password because my id_rsa key is stored on the Server
psql -U username -h LOCALHOST -p 5432 -d databasename pass- password (Manually input)
Furthermore, connecting through Visual Studio Code works as well however I need to connect to the server (Remote Connection) and then connect to the Database using a postgres Driver.
After investigating it, I figured I firstly need to connect using SSH to the server, then and only then I will be allowed to access the Database.
This is how I approached it through Code :-
Index.js
const serverConnectionParams = require('./src/config/serverConn');
function testConnectionServer() {
try {
serverConnectionParams.connectToServer();
} catch (err) {
console.error(err);
}
}
testConnectionServer();
serverConn.js
const { Client } = require('ssh2');
const { readFileSync } = require('fs');
const databaseConnectionParams = require('./databaseConn');
function connectToServer() {
const conn = new Client();
conn.on('ready', () => {
console.log('Client :: ready');
conn.exec('uptime', (err, stream) => {
if (err) throw err;
databaseConnectionParams.auth(); *// This is the database connection param*
stream.on('data', (data) => {
console.log('STDOUT: ' + data);
}).stderr.on('data', (data) => {
console.log('STDERR: ' + data);
});
});
}).connect({
host: 'xx.xx.xx.xx',
username: 'username',
privateKey: readFileSync('src/key/id_rsa')
});
}
exports.connectToServer = connectToServer;
databaseConn.js
const { readFileSync } = require('fs');
const envParam = require('./env.js');
const { Sequelize } = require('sequelize');
const sequelize = new Sequelize(envParam.database, envParam.username, envParam.password, {
host: envParam.host,
dialect: envParam.dialect,
ssl: true,
pool: {
max: envParam.pool.max,
min: envParam.pool.min,
acquire: envParam.pool.aquire,
idle: envParam.pool.idle
}
});
async function auth() {
try {
console.log('trying to connect')
sequelize.validate();
} catch (error) {
console.error('Unable to connect to the database:', error);
}
}
exports.auth = auth;
env.js
const env = {
database: 'databasename',
username: 'username',
password: 'password',
host: 'ip#',
dialect: 'postgres',
pool: {
max: 5,
min: 0,
aquire: 30000,
idle: 10000
}
};
module.exports = env;
After running my node index.js I receive the following error statement :-
Client :: ready
trying to connect
STDOUT: 10:43:09 up 1:21, 1 user, load average: 5.71, 6.03, 5.15
C:\Users\~\node_modules\sequelize\lib\dialects\postgres\connection-manager.js:184
reject(new sequelizeErrors.ConnectionError(err));
^
ConnectionError [SequelizeConnectionError]: no pg_hba.conf entry for host "xx.xx.xx.xx", user "username", database "databasename", SSL off
at Client._connectionCallback
{
parent: error: no pg_hba.conf entry for host "xx.xx.xx.xx", user "username", database "databasename", SSL off
at Parser.parseErrorMessage
{
length: 154,
severity: 'FATAL',
code: '28000',
detail: undefined,
hint: undefined,
position: undefined,
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'auth.c',
line: '490',
routine: 'ClientAuthentication'
},
original: error: no pg_hba.conf entry for host "x.x.x.x", user "username", database "password", SSL off
at Parser.parseErrorMessage (C:\Users\~\node_modules\pg-protocol\dist\parser.js:287:98)
at Parser.handlePacket (C:\Users\~\node_modules\pg-protocol\dist\parser.js:126:29)
at Parser.parse (C:\Users\~\node_modules\pg-protocol\dist\parser.js:39:38)
at Socket.<anonymous> (C:\Users\~\node_modules\pg-protocol\dist\index.js:11:42)
at Socket.emit (node:events:394:28)
at addChunk (node:internal/streams/readable:315:12)
at readableAddChunk (node:internal/streams/readable:289:9)
at Socket.Readable.push (node:internal/streams/readable:228:10)
at TCP.onStreamRead (node:internal/stream_base_commons:199:23) {
length: 154,
severity: 'FATAL',
code: '28000',
detail: undefined,
hint: undefined,
position: undefined,
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'auth.c',
line: '490',
routine: 'ClientAuthentication'
}
}
Investigating the Error Code: 28000
Found this link explaining the issue as an authentication attempt failure
https://help.heroku.com/DR0TTWWD/seeing-fatal-no-pg_hba-conf-entry-errors-in-postgres
Also found several solutions online regarding pg_hba.conf needs to use md5 and then restart postgress (Not tried, as i cannot restart the postgress service)
error: Ident authentication failed for user
Found another solution explaining it was an SSL issue (Tried it and it didnt work)
Node.js, PostgreSQL error: no pg_hba.conf entry for host
After using SSL it would change the error code to the following :-
SequelizeConnectionError: self signed certificate
Found a solution to that here:-
SequelizeConnectionError: self signed certificate
After I put that it would give me a different error that rejectUnauthorized is depreciated and very old version (Cant seem to reproduce the error code as of the moment)
So my hands are tied at the moment, any help will be great!
I've also tried using different Javascript modules instead of sequelize however they all have the same authentication issue.
I also tried to pass my id_rsa key, however it wouldnt solve my issue at all.
My assumptions are even though I am passing the connToDatabase function inside the SSH connection, it is still searching for the ip# in the incorrect location. (Ip# of DB on the server is 192.168.31.4)
But when using that IP# it will say ERR Connection Timed out
Another Assumption I have is that the Database has many restrictions from connecting and require further more params.
UPDATE:
I tried editting the pg_hba.conf file through remote access on VSC however it would give me error cannot read file.
Any help would be great!
I had completely forgotten that I posted this question.
The solution to it was pretty straight forward, after investigating it for a while I realized I was making a mistake in connection params.
For others who require help with such issues I will post the solution in a simple manner.
Basically, I needed first to SSH to the server and add a tunnel to my connection to connect to the database. Then and only then will my sequelize params for the database pass through because I have completely connected to the server and internal postgresql database.
So TLDR
SSH -> Tunnel -> Sequelize
ssh(10.x.x.1, etc) -> addTunnel(localhost, etc) -> sequelize(databaseName, etc)
EDIT: Added Code for reference
const Ssh2Promise = require('ssh2-promise');
const { readFileSync } = require('fs');
async function connectToServer(callback) {
const ssh = new Ssh2Promise({
host: '10.x.x.1',
username: 'usernameofssh',
privateKey: readFileSync('src/key/id_rsa'), //This is only for RSA Fingerprint, if you do not have said fingerprint you can use "passphrase" with your password instead
});
const tunnel = await ssh.addTunnel({
remoteAddr: '192.168.0.1', //This is the database connection ip#, once connected to it you can fetch from LOCALHOST. Incase its AWS it would be test.test-test.amazonaws.com
remotePort: 1234, //Port for connection
localPort: 1234,
});
//Don't forget to throw it in a try catch for feedback
await auth(callback); //Send it wherever you need it to go
}

Node.js and PostgresSQL with Express

I've written a code to fetch the data from postgres database using express and NODE.js. the database is connected with the backend code but still it is showing error like relation does not exit.
Please check the code and error that I got:
var express = require('express');
var pg = require("pg");
var app = express();
var connectionString = "postgres://postgres:postgres#localhost:5432/testDatabase?currentSchema=testSchema";
app.get('/', function(req, res, next) {
pg.connect(connectionString, function(err, client, done) {
if (err) {
console.log("Not able to get Connection " + err);
res.status(400).send(err);
} else {
console.log("Connected.!");
}
const
query = {
// give the query a unique name
name : 'fetch-user-details',
text : 'SELECT * FROM loginTable',
}
client.query(query, function(err, result) {
done(); // closing the connection;
if (err) {
console.log(err);
res.status(400).send(err);
}
res.status(200).send(result.rows);
});
});
});
app.listen(4000, function() {
console.log('Server is running.. on Port 4000');
});
and the error response that I got is like:
Server is running.. on Port 4000
Connected.!
{ error: relation "logintable" does not exist
at Connection.parseE (C:\Users\sumit_srivastava\Desktop\Java_WorkSpace\SimpleNodeAppln\node_modules\pg\lib\connection.js:554:11)
at Connection.parseMessage (C:\Users\sumit_srivastava\Desktop\Java_WorkSpace\SimpleNodeAppln\node_modules\pg\lib\connection.js:381:17)
at Socket.<anonymous> (C:\Users\sumit_srivastava\Desktop\Java_WorkSpace\SimpleNodeAppln\node_modules\pg\lib\connection.js:117:22)
at emitOne (events.js:116:13)
at Socket.emit (events.js:211:7)
at addChunk (_stream_readable.js:263:12)
at readableAddChunk (_stream_readable.js:250:11)
at Socket.Readable.push (_stream_readable.js:208:10)
at TCP.onread (net.js:594:20)
name: 'error',
length: 120,
severity: 'ERROR',
code: '42P01',
detail: undefined,
hint: undefined,
position: '15',
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'src\\backend\\parser\\parse_relation.c',
line: '986',
routine: 'parserOpenTable' }
just run this db dump at my Postgres
CREATE SCHEMA "testSchema";
ALTER SCHEMA "testSchema" OWNER TO postgres;
SET search_path = "testSchema", pg_catalog;
SET default_tablespace = '';
SET default_with_oids = false;
CREATE TABLE "loginTable" ( "ID" numeric NOT NULL, "Name" text );
ALTER TABLE "loginTable" OWNER TO postgres;
this was the problem
change your query to this
query = {
// give the query a unique name
name : 'fetch-user-details',
text : `SELECT * FROM "testSchema".logintable`,
}
you should mention the schema name
tasted on
Ubuntu
postgres 10.6
pg: 6.4.2
node v10.14.1

Resources