Edit 2 - Final Notes
Solution below worked for me, however you still need to manually specify the mongo connection credentials as shown above for the application to run correctly, otherwise you will get a mongo auth error.
Edit 1 - Added full keystone.js
keystone.js
// Simulate config options from your production environment by
// customising the .env file in your project's root folder.
require('dotenv').load();
// Require keystone
var keystone = require('keystone');
var mongoDbConnectionString = process.env.OPENSHIFT_MONGODB_DB_URL || 'mongodb://localhost/********';
var host = process.env.OPENSHIFT_NODEJS_IP || "127.0.0.1";
var port = process.env.OPENSHIFT_NODEJS_PORT || process.env.OPENSHIFT_INTERNAL_PORT || 3000;
// Initialise Keystone with your project's configuration.
// See http://keystonejs.com/guide/config for available options
// and documentation.
keystone.init({
'name': '********',
'brand': '********',
'less': 'public',
'static': 'public',
'favicon': 'public/favicon.ico',
'views': 'templates/views',
'view engine': 'jade',
'emails': 'templates/emails',
'mongo': mongoDbConnectionString,
'host': host,
'port': port,
'auto update': true,
'session': true,
'auth': true,
'user model': 'User',
'cookie secret': '********'
});
// Load your project's Models
keystone.import('models');
// Setup common locals for your templates. The following are required for the
// bundled templates and layouts. Any runtime locals (that should be set uniquely
// for each request) should be added to ./routes/middleware.js
keystone.set('locals', {
_: require('underscore'),
env: keystone.get('env'),
utils: keystone.utils,
editable: keystone.content.editable
});
// Load your project's Routes
keystone.set('routes', require('./routes'));
// Setup common locals for your emails. The following are required by Keystone's
// default email templates, you may remove them if you're using your own.
keystone.set('email locals', {
logo_src: '/images/logo-email.gif',
logo_width: 194,
logo_height: 76,
theme: {
email_bg: '#f9f9f9',
link_color: '#2697de',
buttons: {
color: '#fff',
background_color: '#2697de',
border_color: '#1a7cb7'
}
}
});
// Setup replacement rules for emails, to automate the handling of differences
// between development a production.
// Be sure to update this rule to include your site's actual domain, and add
// other rules your email templates require.
keystone.set('email rules', [{
find: '/images/',
replace: (keystone.get('env') == 'production') ? 'http://www.your-server.com/images/' : 'http://localhost:3000/images/'
}, {
find: '/keystone/',
replace: (keystone.get('env') == 'production') ? 'http://www.your-server.com/keystone/' : 'http://localhost:3000/keystone/'
}]);
// Load your project's email test routes
keystone.set('email tests', require('./routes/emails'));
// Configure the navigation bar in Keystone's Admin UI
keystone.set('nav', {
'posts': ['posts', 'post-categories'],
'creations': 'galleries',
'contact us': 'enquiries',
'users': 'users'
});
// Start Keystone to connect to your database and initialise the web server
console.log('%s: IP Set.', host);
console.log('%s: Port Set.', port);
keystone.start();
I am trying to build my first Keystone.js, got it running fine locally on my machine.
Now, i am trying to push my site to Openshift and failing miserably.
I have gotten mongo to connect by adding this to keystone.js:
var mongoDbConnectionString = process.env.OPENSHIFT_MONGODB_DB_URL || 'mongodb://localhost/*********';
However i cannot get the thing to run correctly as it seems to be having issues binding to the ip and port i am feeding it on Openshift, i am using the following code:
var host = process.env.OPENSHIFT_NODEJS_IP || process.env.OPENSHIFT_INTERNAL_IP || "127.0.0.1";
var port = process.env.OPENSHIFT_NODEJS_PORT || process.env.OPENSHIFT_INTERNAL_PORT || 3000;
Combined with:
keystone.init({
'name': '*********',
'brand': '*********',
'less': 'public',
'static': 'public',
'favicon': 'public/favicon.ico',
'views': 'templates/views',
'view engine': 'jade',
'emails': 'templates/emails',
'mongo': mongoDbConnectionString,
'host': host,
'port': port,
'auto update': true,
'session': true,
'auth': true,
'user model': 'User',
'cookie secret': '*********'
});
But i keep getting:
==> app-root/logs/nodejs.log <==
Error: listen EACCES
at errnoException (net.js:901:11)
at Server._listen2 (net.js:1020:19)
at listen (net.js:1061:10)
at Server.listen (net.js:1135:5)
....
with 'node keystone.js'
DEBUG: Sending SIGTERM to child...
Now i have checked the env on the Openshift instance and they seem to be correct variable, i am getting port 8080 and what seems like a correct IP address.
I've also tried hard coding the port and address parts but it seems to make no difference, and also not really workable for local testing.
I'm obviously missing something simple here, so help greatly appreciated!
Thanks
Edit 1 - Ignoring server.js code as per #GarethJeanne's comment
If your entry point is keystone.js and your not using server.js at all (which renders my previous answer mute), then you need to make sure you're using Keystone 0.3.3 or newer.
This issue was addressed in Pull Request 1127, which was merged on Feb. 28, and first included in version 0.3.3, which was released on Mar. 8.
I don't believe this is a Keystone issue. You're obviously not using Keystone's out-of-the-box initialization and startup, so Keystone is handling the creation or initialization of your Express app.
Since you're not sharing all of your code (e.g. self.initializeServer()), I will assume you're correctly setting up Express and connecting it to your Keystone app instance.
I will likewise assume self.app is an instance of Express.
From the code you submitted, the only problem I see is that you're passing 'self.port' and 'self.ipaddress' to self.app.listen() with single quotes around each argument.
This would definitely cause a listen EACCES error.
Try removing the single quotes surrounding 'self.port' and 'self.ipaddress' on the self.app.listen() call.
self.start = function() {
// Start the app on the specific interface (and port).
self.app.listen(self.port, self.ipaddress, function() {
console.log('%s: Node server started on %s:%d ...',
Date(Date.now() ), self.ipaddress, self.port);
});
};
I can't guarantee the rest of your app will work, but it should, at the very least, eliminate the listen EACCES error.
Related
I'm trying to create an API using nodeJS, express and azure-mobile-apps to do some data synchronisation between an Ionic3 mobile app (which use an SQLite local database) and a Microsoft SQL Database.
My API has to create a synchronisation connection for each mobile application. Each application will be linked to a distant database. For example, if user_01 wants to synchronise his data, he's going to be linked to his client_01 database. So each time it'll have to, the API will create a new process running on a different port.
here is an example : https://zupimages.net/up/19/36/szhu.png
The problem is that i'm not able to create more than one connection with azure-mobile-apps. The first one always works, but the second, third etc are still using the first connection that i have instantiated. I've looked into the app stack and everything seems fine.
Is that an issue with azure-mobile-app, or did I misunderstand something with express ?
Thanks for your responses !
var azureMobileApps = require('azure-mobile-apps');
var express = require('express');
module.exports = {
async createConnection(client) {
try {
let app = express();
mobileApp = azureMobileApps({
homePage: true,
swagger: true,
data: {
server: '****',
user: client.User,
password: client.Password,
port: '1443',
database: client.Database,
provider: 'mssql',
dynamicSchema: false,
options: {
encrypt: false
}
}
});
await mobileApp.tables.import('./tables');
await mobileApp.tables.initialize();
app.listen(global.portCounter);
app.use(mobileApp);
console.log(app._router.stack);
console.log('Listening on port ',global.portCounter);
global.portCounter++;
} catch (error) {
console.log(error);
}
}
}
It's working now. The thing is, it's impossible to do multiple connection with the azure-mobile-apps SDK for nodeJS.
I had to use worker-thread which seems to isolate the memory in a sub-proccess.
Hope it can help somebody one day
I'm facing issue on creating sendtransaction API using bitcore SendFrom method here please lookup my API ("http://localhost:3000/bitcoin/api/sendfrom?"""2NFuJDmdvKWP2zB5EfsXqNuYz9AW65tBrAy" 0.001 6 "donation" "seans outpost"") and its getting like "error:A wallet phrase needed and has not set" Anyone can please tell me how to create sendtrasaction API for bitcoin using nodejs and bitcore already i created generateNewaddress and getbalance,getaccounts API's i tested it in postman. please find below nodejs code:
var bitcoinapi = require('bitcoin-node-api');
var express = require('express');
var app = express();
var port =3000;
//Username and password relate to those set in the bitcoin.conf file
var wallet = {
host: 'localhost',
port: 18332,
user: 'test',
pass: 'test123'
};
bitcoinapi.setWalletDetails(wallet);
bitcoinapi.setAccess('default-safe'); //Access control
app.use('/bitcoin/api', bitcoinapi.app); //Bind the middleware to any chosen url
app.listen(3000);
console.log('server is running at port ' +port);
please find below screens:
Did you install and setup BitcoinCore?
You need to run the deamon in BitcoinCore (your "wallet" settings)
Btw. you can use the api of the deamon directly (it's not anymore up to date, but gives you an idea: https://en.bitcoin.it/wiki/Original_Bitcoin_client/API_calls_list )
I am working with cors-anywhere on my localhost. I have the following server.js file...
var host = process.env.HOST || '0.0.0.0';
var port = process.env.PORT || 1234;
var cors_proxy = require('cors-anywhere');
cors_proxy.createServer({
httpProxyOptions: {
secure: false
}
}).listen(port, host, function() {
console.log('listening...');
});
This works and is proxying the initial request when going to:
http://localhost:1234/https://proxy-domain/page
The issue is that on the page I am browsing/being proxied to there are dependent files (i.e., css, javascript, images, etc) that are not being loaded because they are not being proxied appropriately. Looking at my browsers network tab the dependent files are trying to be downloaded from...
http://localhost:12345/sample-image.gif
Really the url should be...
https://proxy-domain/sample-image.gif
How can I configure cors-anywhere to proxy all subsequent requests to the appropriate target url?
According to the documentation you can set redirectSameOrigin to true, which will redirect requests to the same origin instead of proxying them.
cors_proxy.createServer({
redirectSameOrigin: true,
httpProxyOptions: {
secure: false
}
})
I'm in the process of trying to get an application which I'd built on the old OpenShift Online 2 free service up and running on the new OpenShift Online 3 Starter, and I'm having a bit of trouble.
The application uses websocket, and in the old system all that was required was for the client to connect to my server on port 8443 (which was automatically routed to my server). That doesn't seem to work in the new setup however - the connection just times out - and I haven't been able to find any documentation about using websocket in the new system.
My first thought was that I needed an additional rout, but 8080 is the only port option available for routing as far as I can see.
The app lives here, and the connection is made on line 21 of this script with the line:
this.socket = new WebSocket( 'wss://' + this.server + ':' + this.port, 'tabletop-protocol' );
Which becomes, in practice:
this.socket = new WebSocket( 'wss://production-instanttabletop.7e14.starter-us-west-2.openshiftapps.com:8443/', 'tabletop-protocol' );
On the back end, the server setup is unchanged from what I had on OpenShift 2, aside from updating the IP and port lookup from env as needed, and adding logging to help diagnose the issues I've been having.
For reference, here's the node.js server code (with the logic trimmed out):
var http = require( "http" );
var ws = require( "websocket" ).server;
// Trimmed some others used by the logic...
var ip = process.env.IP || process.env.OPENSHIFT_NODEJS_IP || '0.0.0.0';
var port = process.env.PORT || process.env.OPENSHIFT_NODEJS_PORT || 8080;
/* FILE SERVER */
// Create a static file server for the client page
var pageHost = http.createServer( function( request, response ){
// Simple file server that seems to be working, if a bit slowly
// ...
} ).listen( port, ip );
/* WEBSOCKET */
// Create a websocket server for ongoing communications
var wsConnections = [];
wsServer = new ws( { httpServer: pageHost } );
// Start listening for events on the server
wsServer.on( 'request', function( request ){
// Server logic for the app, but nothing in here ever gets hit
// ...
} );
In another question it was suggested that nearly anything - including this -
could be related to the to the ongoing general issues with US West 2, but other related problems I was experiencing seem to have cleared, and that issue has been posted for a week with no update, so I figured I'd dig deeper into this on the assumption that it's something I'm doing wrong instead of them.
Anyone know more about this and what I need to do to make it work?
So I'm like 99% sure I'm just screwing up something dumb here.
I'm trying to set up catbox to cache objects to redis. I have redis up and running and I can hit it with RDM (sql pro like utility for redis) but Hapi is not cooperating.
I register the redis catbox cache like so:
const server = new Hapi.Server({
cache: [
{
name: 'redisCache',
engine: require('catbox-redis'),
host: 'redis',
partition: 'cache',
password: 'devpassword'
}
]
});
I am doing this in server.js After this block of code I go on to register some more plugins and start the server. I also export the server at the end of the file
module.exports = server;
Then in my routes file, I am attempting to set up a testing route like so:
{
method: 'GET',
path: '/cacheSet/{key}/{value}',
config: { auth: false },
handler: function(req, res) {
const testCache = server.cache({
cache: 'redisCache',
expireIn: 1000
});
testCache.set(req.params.key, req.params.value, 1000, function(e) {
console.log(e);
res(Boom.create(e.http_code, e.message));
})
res(req.params.key + " " + req.params.value);
}
},
Note: My routes are in an external file, and are imported into server.js where I register them.
If I comment out all the cache stuff on this route, the route runs fine and returns my params.
If I run this with the cache stuff, at first I got "server not defined". So I then added
const server = require('./../server.js');
to import the server.
Now when I run this, I get "server.cache is not a function" and a 500 error.
I don't understand what I'm doing wrong. My guess is that I'm importing server, but perhaps it's the object without all the configs set so it's unable to use the .cache method. However this seems wrong because .cache should always be a default method with the default memory cache, so even if my cache registration isn't active yet, server.cache should theoretically still be a method.
I know it has to be something basic I'm messing up, but what?
I was correct. I was doing something stupid. It had to do with how I was exporting my server. I modified my structure to pull out the initial server creation and make it more modular. Now I am simply exporting JUST the server like so:
'use strict';
const Hapi = require('hapi');
const server = new Hapi.Server({
cache: [
{
name: 'redisCache',
engine: require('catbox-redis'),
host: 'redis',
partition: 'cache',
password: 'devpassword'
}
]
});
module.exports = server;
I then import that into my main server file (now index.js previously server.js) and everything runs fine. I can also import this into any other file (in this case my routes file) and access the server for appropriate methods.
Redis is happily storing keys and Hapi is happily not giving me errors.
Leaving here in case anyone else runs into a dumb mistake like this.