nodejs error on OpenShift - node.js

I am trying to run the following script on OpenShift SSH terminal
var deployd = require('deployd');
var server = deployd({
port: process.env.PORT || 5000,
env: 'production',
db: {
host: 'ds030607.mongolab.com',
port: '30607',
name: 'kheapdata',
credentials: {
username: 'admin',
password: 'admin'
}
}
});
server.listen();
server.on('listening', function() {
console.log("Server is listening");
});
server.on('error', function(err) {
console.error(err);
process.nextTick(function() { // Give the server a chance to return an error
process.exit();
});
});
like this node production.js but geting this error
{ [Error: listen EACCES] code: 'EACCES', errno: 'EACCES', syscall: 'listen' }
Please me to resolve this issue.
here is the new script
//production.js
var deployd = require('deployd');
var server = deployd({
// port: process.env.PORT || 5000,
env: 'production',
db: { host: '127.5.233.2',
port: '27017',
name: 'test',
credentials: { username: 'admin', password: '4mA51PDYnL89' } } });
server.listen('8080','127.5.233.1');
server.on('listening', function() { console.log("Server is listening"); });
server.on('error', function(err) { console.error(err); process.nextTick(function() {
// Give the server a chance to return an error
process.exit();
});
});

It looks like you're using the wrong env variable port: process.env.PORT || 5000. Try sshing into your gear and run env to locate the right environment variable. Since it looks like you're trying to connect to mongo try running env | grep -i mongo in your gear.

Looks like you need to bind to process.env.OPENSHIFT_NODEJS_IP and process.env.OPENSHIFT_NODEJS_PORT, looks like your script is trying to bind to 0.0.0.0 which is not allowed, and possibly the wrong port also. Also looks like you are connecting to an outside mongodb service, so you won't have any mongo environment variables in your gear.

Related

Heroku error: "connect ECONNREFUSED 127.0.0.1:3306" when trying to deploy Express + React application

I was trying to deploy my Express + React application to Heroku. Heroku connected successfully with my Github account, then clicking "Deploy Branch" led to "Your app was successfully deployed". But when I went to view my website, it showed:
"Application error An error occurred in the application and your page could not be served. If you are the application owner, check your logs for details".
Here are my logs:
Starting process with command `npm start`
> myproject# start /app
> node backend/index.js
My project SQL server listening on PORT 4000
/app/backend/index.js:22
if (err) throw err;
^
Error: connect ECONNREFUSED 127.0.0.1:3306
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1141:16)
And the index.js which connects to MySQL:
const express = require('express');
const cors = require('cors');
const mysql = require('mysql');
const app = express();
app.use(cors());
app.get('/', (req, res) => {
res.send('go to /my-project to see my project')
});
const pool = mysql.createPool({
connectionLimit: 10,
host: 'localhost',
user: 'root',
password: 'myjs123#',
database: 'my-project',
debug: false
});
pool.getConnection((err, connection) => {
if (err) throw err;
app.get('/my-project', (req, res) => {
connection.query(SELECT_ALL_FACTS_QUERY, (err, results) => {
if (err) {
return res.send(err)
}
else {
return res.json({
data: results
})
};
});
});
});
const SELECT_ALL_FACTS_QUERY = 'SELECT * FROM `my-project`.`my-table`;';
app.listen(4000, () => {
console.log('My project SQL server listening on PORT 4000');
});
What did I do wrong and how could I deploy it?
I think in the below code the localhost should not be used, the localhost will not work in deployment.
const pool = mysql.createPool({
connectionLimit: 10,
//here
host: 'localhost',
user: 'root',
password: 'myjs123#',
database: 'my-project',
debug: false
});
And another mistake I found is you should use an environment variable to store
port numbers. In production, the port number is assigned by Heroku, if not assigned you
can assign. So your code should be
let port=process.env.PORT||4000
app.listen(port, () => {
console.log(`App running on port ${port} `);
});
you need to add (add-ons) to your heroku account
and connect it to your app.
For example, you can use (JAWS_DB mysql)
By having the following code in your connection:
// import the Sequelize constructor from the library
const Sequelize = require('sequelize');
require('dotenv').config();
let sequelize;
// when deployed on Heroku
if (process.env.JAWSDB_URL) {
sequelize = new Sequelize(process.env.JAWSDB_URL);
} else {
// localhost
sequelize = new Sequelize(process.env.DB_NAME,
process.env.DB_USER,
process.env.DB_PASSWORD, {
host: 'localhost',
dialect: 'mysql',
port: 3306
});
}
module.exports = sequelize;
It passed this stage after I removed if (err) throw err;, still not sure why this happened.
Nithin's answer was taken into account too.
the same errorhappened to me while i was trying to connect to heroku cli and i jus read the heroku config for proxy and that was the case. problem solved by configuring the http and https proxy like
set HTTP_PROXY=http://proxy.server.com:portnumber
or set HTTPS_PROXY=https://proxy.server.com:portnumber

problem about connecting to my Postgres with node.js through knex

I am facing this problem with connecting to my Postgres with node.js through knex. I am trying this for the first time and I ask humbly to help me solving the issue. please help me.
My code is the following. Every time I make a request, PostgreSQL doesn't connect so nothing happens.
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
app.use(bodyParser.json());
const bcrypt = require('bcrypt-nodejs');
const cors = require('cors');
const knex = require('knex')
const db = knex({
client: 'pg',
connection: {
host: '127.0.0.1',
user: 'postgres',
password: '',
database: 'smart-brain'
}
});
db.select('*').from('users').then(console.log).catch(console.log);
app.use(cors());
app.post('/signin', (req, res) => {
if (req.body.email === database.users[0].email &&
req.body.password === database.users[0].password) {
res.json('success');
} else {
res.status(400).json('error logging in');
}
})
app.post('/register', (req, res) => {
const {
name,
email,
password
} = req.body;
db('users')
.returning('*')
.insert({
email: email,
name: name,
joined: new Date()
})
.then(respons => {
res.json(response);
}).
catch(err => res.status(400).json('unable to register'))
})
app.listen(3000, () => {
console.log('app is running on the port 3000');
});
and the response is these on npm
Error: connect ECONNREFUSED 127.0 .0 .1: 5432
at TCPConnectWrap.afterConnect[as oncomplete](net.js: 1141: 16) {
errno: 'ECONNREFUSED',
code: 'ECONNREFUSED',
syscall: 'connect',
address: '127.0.0.1',
port: 5432
}
If you are in Ubuntu, then go to the following folder.
/etc/postgresql/{your_pg_version}/main
Or If you are in Windows, then go to the following folder,
C:\Program Files\PostgreSQL\{your_pg_version}\data\
Open the file pg_hba.conf to write with SuperUser/Administrative permission,
Go to the bottom, and put trust at the end of following lines.
# Database administrative login by Unix domain socket
local all postgres trust
# "local" is for Unix domain socket connections only
local all all trust
# IPv4 local connections:
host all all 127.0.0.1/32 trust
After that, restart your PostgreSQL server and try again with your code.

Connect to a remote server mongoDB via ssh through mongoose in nodeJS using tunnel-ssh

I was trying to connect to a remote server mongoDB through SSH and made the configurations as provided
import tunnel from 'tunnel-ssh';
const config = {
username: 'username',
Password: 'password',
host: process.env.SSH_SERVER, //192.168.9.104
port: 22,
dstHost: 'localhost',
dstPort: process.env.DESTINATION_PORT, //27017
localHost: '127.0.0.1',
localPort: 27018
};
This is the config that has been defined where i need to connect to the remote server 192.168.9.104. So the particular is chosen as the SSH host. Username and password for the same is provided. and the connection made is as follows.
class DB {
initDB() {
tunnel(config, (error, server) => {
if (error) {
console.log('SSH connection error: ' + error);
}
const url = 'mongodb://127.0.0.1:27018/myDBname';
mongoose.connect(url, { useNewUrlParser: true });
mongoose.plugin(toJson);
mongoose.plugin(setProperties);
var db = mongoose.connection;
db.on('error', console.error.bind(console, 'DB connection error:'));
db.once('open', function() {
console.log('DB connection successful');
});
});
}
}
But when the db.init() function is called following error pops up
events.js:183
throw er; // Unhandled 'error' event
^
Error: All configured authentication methods failed
I am not able to figure out where the config goes wrong. i have tried using 127.0.0.1 for dstHost. as well as put the 192.168.9.104 as the dstHost as well but the error persists. kevin lee suggests a similar approach. this question is used as an example
There was an error with the documentation which suggested the config as mentioned above with the key "Password" but it should be "password" so the config would look something like this
const config = {
username: 'username',
password: 'password',
host: process.env.SSH_SERVER, //192.168.9.104
port: 22,
dstHost: 'localhost',
dstPort: process.env.DESTINATION_PORT, //27017
localHost: '127.0.0.1',
localPort: 27018
};
Rest of the implementation is spot on and tested.

heroku eureka-js-client node- Spring Cloud Netflix Support -

What I want to achieve is:
registering a node.js service in my eureka-server in heroku.
So far i can register a regular eureka-client in my eureka-server in heroku, without problems. But i am getting really confused with the configuration when try with node.js service...
I thought eureka-js-client was the solution, no success so far...
Code below.
const express = require('express');
const path = require('path');
const port = process.env.PORT || 3090;
const app = express();
app.use(express.static(__dirname));
const bodyParser = require('body-parser');
app.use(bodyParser.urlencoded({extended: false}));
app.use(bodyParser.json());
app.get('*', (req, res) => {
res.sendFile(path.resolve(__dirname, 'index.html'))
});
const Eureka = require('eureka-js-client').Eureka;
const eureka = new Eureka({
instance: {
app: 'sheila',
hostName: 'localhost',
ipAddr: '127.0.0.1',
statusPageUrl: 'http://localhost:5000',
healthCheckUrl: 'http://localhost:5000/health',
port: {
'$': 5000,
'#enabled': 'true',
},
vipAddress: 'myvip',
dataCenterInfo: {
'#Class': 'com.netflix.appinfo.InstanceInfo$DefaultDataCenterInfo',
name: 'MyOwn',
},
},
eureka: {
host: 'localhost',
port: 8761,
servicePath: '/eureka/apps/',
},
});
eureka.logger.level('debug');
eureka.start(function(error){
console.log(error || 'complete');
});
// ------------------ Server Config --------------------------------------------
var server = app.listen(5000, function () {
var port = server.address().port;
console.log('Listening at %s', port);
});
First i've tried locally after running docker run -it -p 8761:8761 springcloud/eureka on my docker console. But i get this error:
Problem making eureka request { [Error: connect ECONNREFUSED 127.0.0.1:8761]
code: 'ECONNREFUSED',
errno: 'ECONNREFUSED',
syscall: 'connect',
address: '127.0.0.1',
port: 8761 }
if i run it as it is from a heroku service, It does not execute :( :(
I also tried by substituting the host for the url of my heroku eureka server, but then i get 401 or 404 errors. The eureka server requires a password which i added in my heroku client js .
Any ideas?
You need to include an instanceId which should be a unique identifier of this instance.
const eureka = new Eureka({
instance: {
instanceId:'sheila',
app: 'sheila',
hostName: 'localhost',
ipAddr: '127.0.0.1',
statusPageUrl: 'http://localhost:5000',
healthCheckUrl: 'http://localhost:5000/health',
port: {
'$': 5000,
'#enabled': 'true',
},
vipAddress: 'myvip',
dataCenterInfo: {
'#Class': 'com.netflix.appinfo.InstanceInfo$DefaultDataCenterInfo',
name: 'MyOwn',
},
},
eureka: {
host: 'localhost',
port: 8761,
servicePath: '/eureka/apps/',
},
});

How to run multiple server labels on same port using node and hapi?

I am trying to host api and web servers on the same node code stack. I have used labels in order to apply configurations independently to each server but only one server works. Below is the code:
var hapi = require('hapi');
// server definition
var server = new hapi.Server();
var runningPort = process.env.PORT || 3000;
// setting up connection
server.connection({
host: '0.0.0.0',
port: runningPort,
labels: ['api']
});
server.connection({
host: '0.0.0.0',
port: runningPort,
labels: ['web']
});
var webServer = server.select('web');
var apiServer = server.select('api');
// registering view engine
webServer.views({
engines: { html: require('handlebars') },
relativeTo: __dirname,
path: './views',
layoutPath: './views/layout',
layout: 'default',
partialsPath: './views/partials'
});
// registering hapi auth cookie and application authentication
webServer.register(
{
register: require('hapi-auth-cookie')
},
function (err) {
if (err) {
throw err;
}
var cache = webServer.cache({ segment: 'sessions', expiresIn: 3 * 24 * 60 * 60 * 1000 });
webServer.app.cache = cache;
webServer.auth.strategy('session', 'cookie', true, {
password: 'secret',
cookie: 'sid-example',
redirectTo: '/account/login',
isSecure: false
});
});
// registrations for api server
apiServer.register(
{
register: require('lout')
},
function (err) {
if (err) {
throw err;
}
});
apiServer.register(require('hapi-auth-bearer-token'), function (err) {
apiServer.auth.strategy('simple', 'bearer-access-token', {
allowQueryToken: true, // optional, true by default
allowMultipleHeaders: false, // optional, false by default
accessTokenName: 'access_token', // optional, 'access_token' by default
validateFunc: function( token, callback ) {
// For convenience, the request object can be accessed
// from `this` within validateFunc.
var request = this;
// Use a real strategy here,
// comparing with a token from your database for example
if(token === "1234"){
//## user object to be looked up here
callback(null, true, { token: token })
} else {
callback(null, false, { token: token })
}
}
});
});
//To do something to request before they passed on to routes
apiServer.ext('onRequest', function (request, reply) {
//## we can get user object here off of the authToken
utils.log('info', 'apiCall', {method: request.method, path: request.path})
return reply.continue();
});
// register routes
webServer.route(webRoutes);
apiServer.route(apiRoutes);
server.start(function () {
console.log('Web servers running at: ', 'localhost:' + runningPort);
console.log('Api server running at: ', 'localhost:' + runningPort);
});
Currently only api routes work.
As mentioned by the commenters, you can't create 2 connections to the same port on the same network interface. This goes all the way back to the listen syscall giving a EADDRINUSE error if two sockets try to listen on the same port.
Creating two connections on separate ports or separate network interfaces is perfectly ok though:
server.connection({
host: '127.0.0.1',
port: 4000,
labels: ['api']
});
server.connection({
host: '127.0.0.1',
port: 4001,
labels: ['web']
});
As Matt already said, you can't run different connections on the same port. Make sure to apply different ports, like
server.connection({
host: 'localhost',
port: process.env.PORT || port
});
server.connection({
host: 'localhost',
port: process.env.PORT + 1 || port + 1
});
If you want to read more on running an API and your web server within the same hapi project, you might scan through those tutorials:
https://futurestud.io/tutorials/hapi-how-to-run-separate-frontend-and-backend-servers-within-one-project
https://futurestud.io/tutorials/hapi-how-to-use-server-labels
Hope that helps!

Resources