I have tried pg and other modules and packages to connect Redshift PostgreSQL through Node and Meteor.
This is my recent code written in node. It is unable to connect to Redshift. The client.connect function never responds.
But if I try to connect to some other PostgreSQL server, like localhost or some other remote server, then the code works as expected.
Same is the problem with Meteor.
var pg = require('pg');
var conString = "postgres://User:Password#EndPoint/Database";
//var conString = "postgres://postgres:postgres#localhost/postgres";
console.log("Started...");
var client = new pg.Client(conString);
console.log("Client", client);
client.connect(function(err) {
if(err) {
return console.error('could not connect to postgres', err);
}
client.query('SELECT NOW() AS "theTime"', function(err, result) {
if(err) {
return console.error('error running query', err);
}
console.log(result.rows[0].theTime);
//output: Tue Jan 15 2013 19:12:47 GMT-600 (CST)
client.end();
});
});
console.log("End...");
I want to connect through Meteor. But if not possible through Meteor,
Node.js will also work.
Related
I am using nodemon to restart my node application when ever changes were made. My problem is every time both web-server and DB server are restarting after code changes were made. I am using Oracle DB. below is my app.js code:
const webServer = require('./services/web-server.js');
const database = require('./services/database.js');
const dbConfig = require('./config/database.js');
const defaultThreadPoolSize = 4;
async function startup() {
console.log('Starting application');
//Initializing web server module
try {
console.log('Initializing web server module');
await webServer.initialize();
} catch (err) {
console.error(err);
process.exit(1); // Non-zero failure code
}
//Initializing the Oracle DB
try {
console.log('Initializing database module');
await database.initialize();
} catch (err) {
console.error(err);
process.exit(1); // Non-zero failure code
}
//Stopping Oracle DB
/*try {
console.log('Closing database module');
await database.close();
} catch (err) {
console.log('Encountered error', e);
err = err || e;
}*/
}
startup();
services/web-server.js, I am creating a http server like below:
httpServer = http.createServer(app);
services/database.js, I am creating a pool for Oracle:
const pool = await oracledb.createPool(dbConfig.hrPool);
Please suggest me how can I restart only web-server with nodemon? I don't want DB connection to restart always...
Your database server doesn't restart every time you restart your node application.
What's happening is your node is recreating the connection to the database every time it's restarted, which is normal and can't be avoided.
You can check this by connecting to your Oracle server and doing queries on it while your node application is stopped.
I cannot connect to MongoDB.
This is my code:
var MongoClient = require('mongodb').MongoClient,format =require('util').format;
MongoClient.connect('mongodb://127.0.0.1:27017', function(err, db){
if(err){
throw err;
}else{
console.log("daniel is connected");
}
})
I get in the console log that I am connected when I command 'npm start', however when I go to my URL 'http://localhost:3000/api' which is the URL I set for the DB, I get an error page displaying 'localhost refused to connect.'.
We have this unique integration which we are working out from AWS Lambda Function -> Oracle 11g RAC(On Prem).
We have chosen AWS Lambda with a runtime of node v8 and hence tried by default to use node-oracledb as the driver. There were many challenges to establish the connections when a fork of node-oracledb --> oracledb-for-lambda was able to make this work between a function within AWS to a simple oracle DB within AWS.
However the code broke with following error when tried out with original environment where it connects to an on-premise Oracle 11g RAC cluster. Following is the error:
ORA-21561: OID generation failed
VPC[{AWS Node Lambda}] -> Direct Connect -> On prem n/w -> Oracle RAC Cluster
Additional Notes:
Added HOSTALIASES file for name resolution
var oracledb = require('oracledb-for-lambda');
var os = require('os');
var fs = require('fs');
'use strict';
str_host = os.hostname() + ' localhost\n';
fs.writeFileSync(process.env.HOSTALIASES,str_host , function(err){
if(err) throw err;
});
var connAttr = {
user: "user",
password: "pass",
connectString: "connection string"
};
oracledb.getConnection(connAttr, function (err, connection) {
if (err) {
log.error("Error Log>>>>>: " + err.message);
return;
}
log.info('Connection was successful!' + connection);
connection.close(
function (err) {
if (err) {
log.error('Error while closing connection'+err.message);
return;
}
});
});
Make sure the connection string which you have give is in below format
//server-ip:port/database_name
I'm trying to implement a call back on NodeJS EC2 server that's interacting with AWS RDS Postgresql. I'm not quite sure how it's done. There seems to be a EventEmitter method within AWS-SDK's RDS module. It's designed for all RDS instance types like MySQL, Aurora, etc. Not specifically for postgres. All I'm trying to do is to get some kind of callback after an INSERT or DELETE query.
It is not specific if your postgres is RDS or standalone on EC2.
You will need
var pg = require('pg');
var dbe={"result":null};
function Q(sqlQuery,callback) {
/* async, vulnerable, simple */
var conString = "postgres://"+dbUser+":"+dbPass+"#"+dbHost+":"+dbPort+"/"+dbName+"?ssl=true";
pg.connect(conString, function(err, client, done) {
if(err) {
return console.error('error fetching client from pool', err);
}
client.query(sqlQuery, function(err, result) {
done();//call `done()` to release the client back to the pool
if(err) {
return console.error('error running query', err);
}
dbe.result = result;
//console.log(JSON.parse(result.setEncoding('utf8');));
callback();
});
});
}
And calling
var res = Q('select now()', function(a) {console.log(dbe.result)});
or similar - I don't have a playground to test atm
The basic idea of the following code is I read messages off an ActiveMQ Artemis installation and insert them into a MongoDB instance.
It works well for up to a hundred or so messages per second but crashes if I throw a few thousand at it. My first guess would be the constant opening and closing of database connections. Should I also think about using an in-memory store and doing bulk database inserts?
The code is all running in node using the mqtt and mongodb npm packages. The code below, the database and the queue are all running in docker containers if it makes any difference.
var mqtt = require('mqtt'),
client = mqtt.connect('mqtt://mq:1883', {
username: "*************",
password: "*************"
}),
MongoClient = require('mongodb').MongoClient,
ObjectId = require('mongodb').ObjectID,
assert = require('assert'),
url = 'mongodb://db:27017/uo-readings';
client.on('connect', function () {
client.subscribe('readings');
});
client.on('error', function(error){
console.log(error)
});
client.on('message', function (topic, message) {
console.log(message.toString());
MongoClient.connect(url, function(err, db) {
assert.equal(null, err);
console.log("Connected correctly to server.");
db.collection('readings').insertOne(JSON.parse(message.toString()), function(err, result) {
assert.equal(err, null);
console.log("Inserted a document into the readings collection.");
});
client.end(function(){
console.log("Closing Connection.");
db.close();
});
});
});
See #Jonathan Muller's comment above