Express POST returns 404 on Apache - node.js

There is a React app that has express requesting api/login information from Mongodb and checking the password input against it, otherwise it doesn't allow you to access the website.
Locally everything works great. When we moved all the build files to the apache server the console returns POST https://websitepath.com/api/login 404 (Not Found)
Any idea of what could be a problem and why it works locally but doesn't work on apache? Node is installed and Express is running there successfully on port 4000.
Here is the code for index.js
var express = require('express');
var bodyParser= require('body-parser')
var MongoClient = require('mongodb').MongoClient
var sha1 = require('sha1');
var db;
const PORT = 4000;
var app = express();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({
extended: true
}));
app.use('/api/login', function (req, res) {
if (!req.body.password) return res.status(400).send('bad_request!')
db.collection('user').find().toArray(function(err, results) {
if (err) return res.status(500).send('something_wrong!');
var checker = false;
results.forEach(function (entry) {
if (entry.password === sha1(req.body.password)) checker = true;
})
if (checker) {
res.send('success')
} else {
return res.status(403).send('Unfortunately password is incorrect. Please try again.');
}
})
})
MongoClient.connect('mongodb://localhost:27017/test', (err, database) => {
if (err) return console.log(err)
db = database
app.listen(PORT, function() {
console.log('Express server is up on port ' + PORT);
});
})
Here is the code for the AuthService.js
import axios from 'axios';
import qs from 'qs';
const AuthService = {
isLogged: false,
login(data, cb) {
axios.post('/api/login', qs.stringify(data)).then(
(res) => {
this.isLogged = true;
cb(null, res);
}
).catch((error) => {
console.error('error occured', error);
cb(error.response.data);
})
},
}
export default AuthService;

Your question doesn't mention proxying the node.js application, so I'm guessing that's where the problem is - specifically, the node application is not being proxied.
In short, what you appear to be trying to do is something like this:
Apache is listening on port 443 (the HTTPS port) and serving web pages at various paths (presumably, everything except paths starting with /api).
You want the web server to also serve the paths used by your node.js API (eg. /api/login and others) on port 443.
But two distinct applications (Apache and your node.js app) cannot both listen on port 443 - Apache is binding it and serving its own pages. If you try to change the port on your node.js application, it will fail to start and give you an error indicating that port 443 is already bound by another application.
There is a simple test for this: navigate to http://websitepath.com:4000/api/login. If you can see your API login page (ie. the node.js application is listening on port 4000), that means the problem is NOT with your node application, it's with Apache's proxy configuration.
The solution to this is setting up Apache as a proxy. This will allow Apache to serve its own pages and forward the request to another service based on the path. So you could certainly set it up so paths that start with /api/... are forwarded to http://localhost:4000/api/... and any other paths are served by Apache directly.
Setting up a proxy is not terribly difficult, but it depends a lot on your specific circumstances, so I'm not going to attempt to explain all the ins & outs. I'd recommend starting with the mod_proxy documentation. There are also about a million tutorials out there; Digital Ocean's documentation is good - I've used it in the past.

Related

How to run different server on same port?

I have created 2 server with express and node.js and now I want to run both the server on the same port which is localhost:8000 with different end points.
How it can be done or what can be the approach for it?
Attaching the server code for reference:-
Server1:-
const express = require("express");
const cors = require("cors");
const app = express();
const axios = require('axios');
app.use(cors());
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = 0;
const PORT = 8000;
app.get("/WeatherForcast", function (req, res) {
axios.get('https://localhost:7173/WeatherForecast')
.then(response => {
res.status(200).json({ success: true, data: response.data});
})
.catch(error => {
console.log(error);
});
});
app.listen(PORT, function () {
console.log(`Server is running on ${PORT}`);
});
Server2:-
const express = require("express");
const cors = require("cors");
const app = express();
const axios = require('axios');
app.use(cors());
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = 0;
const PORT = 8000;
app.get("/UserData", function (req, res) {
axios.get('https://localhost:7173/UserData')
.then(response => {
res.status(200).json({ success: true, data: response.data});
})
.catch(error => {
console.log(error);
});
});
app.listen(PORT, function () {
console.log(`Server is running on ${PORT}`);
});
Currently when I run it, one server runs and for other server an error is displayed that port 8000 is already in use.
You can't run two servers on the same port. The OS and TCP stack won't allow it.
The easiest solution is to use two endpoints on one server.
If you have to have two separate servers, then you would run them both on separate ports (neither of which is the public port) and then use something like nginx to proxy each separate path to the appropriate server.
So, the user's request goes to the proxy and the proxy examines the path of the request and then forwards it to one of your two servers based on the path of the request (as setup in the proxy configuration).
Two different servers can not be hosted on same port as it will give the error i.e "this port is currently is use" something like this.
The thing that can be done is instead of creating multiple server you can create a single server and define different endpoints to manage the code flow.
it is not possible to run different servers on same port

node.js server with websockets on elastic beanstalk ALB without socket.io

I'm trying to get a node.js server (using express) working using websockets in elastic beanstalk (EB) using application load balancer (ALB) but without using socket.io (because peerjs-server is the server I'm trying to get running and it's not written with socket.io).
I've seen a couple of articles suggesting you have to use socket.io (or another lib that doesn't just rely on websockets), but Amazon says ALB supports websockets directly.
My server is both a create-react-app server and a peerjs-server. It runs fine in dev on port 9000 for both the web UI and the peerjs ws connections.
I've tried all of the different approaches I've found, but I haven't gotten this to work, and I've even seen things written that suggest it can't be done, but it seems like I'm close. Has anyone gotten this to work, and if so, how?
Okay, I got it to work. Here's what I've done to get everything working on port 9000.
In EB, create an application, then begin creating an environment.
In the configuration of the environment, go into the Software section and tell it that you're going to use npm run prod to start your server.:
Now, go into the Load Balancers section and, as shown in the pic below:
add a listener on port 9000
create a process on port 9000 and enable stickiness (I called mine peerjsServer)
add Rules for each URL path you want to use to access the server, and have each of those rules assigned to the process you created (peerjsServer). Also point the default to that process, so that health checks on 80 get through successfully to your server
You may need to hop over to the EC2 dashboard in the AWS UI in order to make sure that the necessary Security Groups are defined there. I think I created the first two on the and the last two were default creations, but I don't recall. Anyway, they need to have port 9000 open for inbound and outbound (port 80 is there by default all the time):
Back to the EB configuration, go to the Instances section and make sure that your instance has the Security Groups assigned to it:
I ran react-scripts build to make the /build directory containing the production version of the server UI(this is create-react-apps stuff that I'm not covering here).
In my code, I start the server using a server.js that makes a server that runs both the peerjs-server ws server and an http server.
const express = require("express");
const bodyParser = require("body-parser");
const path = require('path');
const passport = require("passport");
const users = require("./routes/api/users");
const games = require("./routes/api/games");
const Game = require("./src/models/Game");
const WebSocket = require('ws');
const ExpressPeerServer = require('peerjs-server').ExpressPeerServer;
const app = express();
const url = require('url');
const port = process.env.PEERSERVERPORT || 9000;
// WebSocket for making db updates to client.
const wsserver = require('./wsserver').wsserver
// Server that gets all requests: lobby UI, peerserver, db websocket
const server = require('http').createServer(app);
wsserver.on('connection', function connection(ws) {
ws.on('message', msg => {
ws.send(msg)
})
ws.on('close', () => {
console.log('WebSocket was closed')
})
});
server.on('upgrade', function upgrade(request, socket, head) {
const pathname = url.parse(request.url).pathname;
if (pathname === '/ws') {
wsserver.handleUpgrade(request, socket, head, function done(ws) {
wsserver.emit('connection', ws, request);
});
}
});
// Bodyparser middleware
app.use(
bodyParser.urlencoded({
extended: false
})
);
app.use(bodyParser.json());
// Passport middleware
app.use(passport.initialize());
// Passport config
require("./config/passport")(passport);
// Routes -- the /api/* path is defined in the EB load balancer
app.use("/api/users", users);
app.use("/api/games", games);
// This is the create-react-apps /build directory of UI code
app.use(express.static(path.join(__dirname, 'build')));
app.get('/login', function (req, res) {
res.sendFile(path.join(__dirname, 'build', 'index.html'));
});
// These are the paths that are defined in the EB load balancer
app.get('/logout', function (req, res) {
res.sendFile(path.join(__dirname, 'build', 'index.html'));
});
app.get('/register', function (req, res) {
res.sendFile(path.join(__dirname, 'build', 'index.html'));
});
app.get('/dashboard', function (req, res) {
res.sendFile(path.join(__dirname, 'build', 'index.html'));
});
// Peer server for making WebRTC connections between game clients.
const options = {
debug: true
}
const peerserver = ExpressPeerServer(server, options);
app.use('/peerserver', peerserver);
app.use(express.static(path.join(__dirname, 'src')));
app.get('*', function(req, res) {
res.sendFile(path.join(__dirname, 'src', 'index.html'));
});
peerserver.on('disconnect', (client) => {
console.log('Delete the game if the host client disconnects');
Game.deleteMany({ hostPeerId: client })
.then(() => {
wsserver.clients.forEach(function each(client) {
if (client.readyState === WebSocket.OPEN) {
client.send("refreshGames");
}
})
})
.catch(err => console.log(err));
});
server.listen( port, () => console.log(`Server is up and running on port ${port} !`))
And then in my package.json, I set the script for prod to run the above server.js to start the server, and the npm run prod I put in the Software section earlier is what calls this to make the server go:
...
"scripts": {
"start": "set PORT=8081&& react-scripts start",
"prod": "node server.js",
...
After doing all that, I now have a running EB server using ALB and handling both websocket (ws) and UI (http) traffic on port 9000.

AWS EC2 NodeJS doesn't respond

I've got an AWS EC2 MEAN instance up and running (partially). The app is a RESTful JSON service and as far as I can tell is up and running as expected:
var app = require('./app');
var port = process.env.PORT || 3000;
var server = app.listen(port, function() {
console.log('Express server listening on port ' + port);
});
console output:
node server.js
Express server listening on port 3000
Db.prototype.authenticate method will no longer be available in the
next major release 3.x as MongoDB 3.6 will only allow auth against
users in the admin db and will no longer allow multiple credentials on
a socket. Please authenticate using MongoClient.connect with auth
credentials.
I've also added the Inbound Security Group for port 3000
testing the API out in the browser is where I run into problems... If I attempt to GET a list of objects using http://ec2-XX-XX-XX-XX.com:3000/belts the call eventually times out. However when I try a GET for a single object using http://ec2-XX-XX-XX-XX.com:3000/belts/some_id_here I get a valid 200 response with the expected object.
Of course everything works as expected locally. What am I missing?
Thanks in advance
//edit with requested code formatted :)
//app.js
var express = require('express');
var app = express();
var BeltController = require('./controller/BeltController');
app.use('/belts', BeltController);
//Belt Controller
router.get('/', function (req, res) {
Belt.find({}, function (err, belts) {
if (err) {
return res.status(500).send("There was a problem finding the Belt. " + err);
}
res.status(200).send(belts);
});
});

nodejs app works locally but not when deployed

I'm using iisnode to host a node app. I'm having trouble actually deploying it under my domain name. Here's the main file with two different starting points. The un-commented code is just a simple server that works correctly when accessed via my domain (so iisnode is mapping and handling the node app correctly). The commented code is the entry point for the express app I am working on, and this works when I view from a local host, but when attempting to access via my domain I receive a 'cannot GET application.js' error.
var http = require('http');
http.createServer(function (req, res) {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Hello, world!');
}).listen(process.env.PORT);
//require('./app/init');
//var server = require('./app/server');
//module.exports = server.start(process.env.NODE_ENV);
Here is my server.js file. I think its a routing issue, I've substitued a console.log function for the indexRoute function, and it never fires. But I still don't understand why this works correctly accessing via localhost but not under my domain.
var express = require('express');
var routes = require('./routes');
var app = express();
function createApplication(environment) {
app.get('/', routes.indexRoute);
app.listen(process.env.PORT);
return app;
}
module.exports.start = createApplication;
I can message a git link for full app if anyone is interested.
Try specifying that you want to listen from all IP addresses, not just localhost by adding '0.0.0.0' as a parameter to listen. Also add a callback to see what happened.
app.listen(process.env.PORT, '0.0.0.0', function(err) {
console.log("Started listening on %s", app.url);
});

Pure node.js/expressjs http redirection with single node file

While the title may be difficult to understand I hope I can explain it well enough.
Here is my setup:
I own the domain www.mysite.com though Godaddy. It uses no-ip to point to a specific ip (lets say 1.2.3.4)
Set up some some subdomains (via A records):
www.dev.mysite.com -- points to 1.2.3.4
www.pi.mysite.com -- points to 1.2.3.4
www.mysite.com -- also points to 1.2.3.4
This setup is all hosted on a raspberry pi running node.js (with express for webserver).
My node file (server.js) to kick everything off looks like this:
var express = require("express"), http = require("http");
var server = express();
// proxy server
http.createServer(function (request, response) {
if (request.headers.host === "mysite") {
console.log("Request made to mysite.com");
res.redirect("http://localhost:5555");
} else if (request.headers.host === "dev.mysite") {
console.log("Request made to dev.mysite.com");
res.redirect("http://localhost:5556");
} else if (request.headers.host === "pi.mysite") {
console.log("Request made to dev.mysite.com");
// DROP ALL TRAFFIC EXCEPT FOR PORT 21
} else {
console.log("Request made to something else");
}
}).listen(80);
process.setuid("pi"); // go back to being a basic user
// main server
server.use(express.static(__dirname + "/main"));
server.listen(5555, function(err) {
console.log("server listening on port 5555");
});
// dev server
server.use(express.static(__dirname + "/dev"));
server.listen(5556, function() {
console.log("server listening on port 5556");
});
So here are my issues that I have. I think they all reside in the above file
server.use() needs to somehow be inside of each server.listen() because each server has its own folder it runs out of
My res.redirect()'s don't work
I have no clue how to drop all traffic except for port 21. The point here was to give me a simple url so I could ssh in. But since pi.mysite.com points to same ip as www.mysite.com, I don't want to deliver my homepage. So I figure I will just drop all traffic to that subdomain.
Notes:
I would also like to mention that I have tried both nginx and http-proxy for the traffic routing. I had issues with both of them and figured I would try this inline method to make it simpler. Turns out it's isn't so simple.
I would like to keep this all in 1 file (server.js) so that I can put "node server.js" into a startup script to run on boot.
I know this isn't the most efficient way to do probably any of this. I am learning. Bear with me
I also know that I should not be running this as root (with sudo). I do it so I can bind my listener on port 80. I use setuid("user") to get rid of this issue (hopefully)
That's all I can think of right now. Please let me know how I can fix and improve this process. I will update the question with more info as needed. Thanks
You should use two different objects for the dev and main server. Here is server.js with the proxy server part snipped:
server.js
var express = require("express"), http = require("http");
var server1 = express();
var server2 = express();
// main server
server1.use(express.static(__dirname + "/main"));
server1.listen(5555, function(err) {
console.log("server listening on port 5555");
});
// dev server
server2.use(express.static(__dirname + "/dev"));
server2.listen(5556, function() {
console.log("server listening on port 5556");
});
As far as the proxy server goes, you are listening with http.createServer, this is not express, so express's res.redirect() function will not be available. Do this instead:
http.createServer(function (request, response) {
if (request.headers.host === "mysite") {
console.log("Request made to mysite.com");
response.writeHead(302, { Location: "http://localhost:5555" });
response.end();
} else if (request.headers.host === "dev.mysite") {
console.log("Request made to dev.mysite.com");
res.redirect("http://localhost:5556");
} else if (request.headers.host === "pi.mysite") {
console.log("Request made to dev.mysite.com");
// DROP ALL TRAFFIC EXCEPT FOR PORT 21
response.end();
} else {
console.log("Request made to something else");
}
}).listen(80);
That's going to send a redirect to your browser. I'm not sure that's what you really wanted.

Resources