Nginx, Express.js and Node on port 80 - node.js

I run my Express.js app on port 8000 and I want to work with localhost rather than localhost:8000 in my dev environment.
I'm on Mac OSX El Capitan and using nginx 1.8.0
I get 502 Bad Gateway and ERR_CONNECTION_TIMED_OUT error.
I stop nginx via sudo pkill nginx and start with sudo nginx. I also use pm2 and start it via sudo pm2 start bin/www --watch, there's no problem here. I can access my app at localhost:8000
I installed nginx via homebrew.
/etc/hosts
##
# Host Database
#
# localhost is used to configure the loopback interface
# when the system is booting. Do not change this entry.
##
127.0.0.1 localhost
255.255.255.255 broadcasthost
::1 localhost
Here's my nginx.conf in /usr/local/etc/nginx
user myusernameishere staff;
worker_processes 1;
events {
worker_connections 1024;
}
http {
include mime.types;
default_type application/octet-stream;
include /usr/local/etc/nginx/sites-enabled/*;
}
sites-enabled/default.conf
server{
listen 80;
server_name 127.0.0.1 localhost;
location / {
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
proxy_pass http://127.0.0.1:8000;
}
}
/usr/local/etc/nginx/nginx.conf.default
#user nobody;
worker_processes 1;
#error_log logs/error.log;
#error_log logs/error.log notice;
#error_log logs/error.log info;
#pid logs/nginx.pid;
events {
worker_connections 1024;
}
http {
include mime.types;
default_type application/octet-stream;
#log_format main '$remote_addr - $remote_user [$time_local] "$request" '
# '$status $body_bytes_sent "$http_referer" '
# '"$http_user_agent" "$http_x_forwarded_for"';
#access_log logs/access.log main;
sendfile on;
#tcp_nopush on;
#keepalive_timeout 0;
keepalive_timeout 65;
#gzip on;
server {
listen 8080;
server_name localhost;
#charset koi8-r;
#access_log logs/host.access.log main;
location / {
root html;
index index.html index.htm;
}
#error_page 404 /404.html;
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root html;
}
# proxy the PHP scripts to Apache listening on 127.0.0.1:80
#
#location ~ \.php$ {
# proxy_pass http://127.0.0.1;
#}
# pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
#
#location ~ \.php$ {
# root html;
# fastcgi_pass 127.0.0.1:9000;
# fastcgi_index index.php;
# fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name;
# include fastcgi_params;
#}
# deny access to .htaccess files, if Apache's document root
# concurs with nginx's one
#
#location ~ /\.ht {
# deny all;
#}
}
# another virtual host using mix of IP-, name-, and port-based configuration
#
#server {
# listen 8000;
# listen somename:8080;
# server_name somename alias another.alias;
# location / {
# root html;
# index index.html index.htm;
# }
#}
# HTTPS server
#
#server {
# listen 443 ssl;
# server_name localhost;
# ssl_certificate cert.pem;
# ssl_certificate_key cert.key;
# ssl_session_cache shared:SSL:1m;
# ssl_session_timeout 5m;
# ssl_ciphers HIGH:!aNULL:!MD5;
# ssl_prefer_server_ciphers on;
# location / {
# root html;
# index index.html index.htm;
# }
#}
include servers/*;
}

Upgrade is a header from HTTP v1.1, you may need to include proxy_http_version 1.1; when using it.

Try this for your sites_enabled/default.conf:
upstream backend {
server localhost:8000;
}
server{
listen 80;
server_name 127.0.0.1 localhost;
location / {
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
proxy_pass http://backend;
}
}

Related

nginx not passing request to node server, if trailing slash is missing in the url

I have setup nginx with wordpress and it is working fine. now i have created a react application, which is running in port 3000. i want my nginx server to pass the request to react server if certain location match.
below is the nginx configuration with wordpress and react app.
listen 80;
server_name aaroogya.org;
return 301 https://aaroogya.org$request_uri;
}
server {
# listen 80;
root /var/www/wordpress;
index index.php index.html index.htm index.nginx-debian.html;
server_name aaroogya.org www.aaroogya.org;
#location = /favicon.ico { log_not_found off; access_log off; }
#location = /robots.txt { log_not_found off; access_log off; allow all; }
#server_name testbed2.covidhelp.in;
location /covidhelp{
#root /var/www/;
# index index.html;
add_header Access-Control-Allow-Origin http://127.0.0.1:3000/;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-NginX-Proxy true;
proxy_pass http://127.0.0.1:3000/ ;
proxy_redirect off;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_redirect off;
proxy_set_header X-Forwarded-Proto $scheme;
}
location ~* \.(css|gif|ico|jpeg|jpg|js|png)$ {
expires max;
log_not_found off;
}
location / {
#try_files $uri $uri/ =404;
try_files $uri $uri/ /index.php$is_args$args;
}
location ~ \.php$ {
include snippets/fastcgi-php.conf;
fastcgi_pass unix:/var/run/php/php7.2-fpm.sock;
}
location ~ /\.ht {
deny all;
}
listen 443 ssl; # managed by Certbot
ssl_certificate /etc/letsencrypt/live/aaroogya.org/fullchain.pem; # managed by Certbot
ssl_certificate_key /etc/letsencrypt/live/aaroogya.org/privkey.pem; # managed by Certbot
include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
}
when i visit https://www.aaroogya.org/covidhelp/
it redirect the request to react server but when i tried to load all the static file like bundle.js then it's not working.
for e.g
https://www.aaroogya.org/covidhelp/static/js/main.chunk.js -- not working example
https://www.aaroogya.org/covidhelp/static/js/main.chunk.js/ -- added a trailing slash and its working fine.
I've resolved the issue with 2 steps.
Check /var/log/nginx/error.log
connect() failed(111: Connection refused) while connecting to upstream, client: * .*.*.*, server: * .*.*.*, request: "GET / HTTP/1.1", upstream: "http://127.0.0.1:8000/", host: "*.*.*.*"
Upstream was still 127.0.0.1:8000 even if I set upstream to 127.0.0.1:3000 in nginx conf file.
Replace server 127.0.0.1:8000 with server 127.0.0.1:3000 in /etc/nginx/conf.d/virtual.conf and restart nginx.
Below:
server {
listen 80;
server_name SERVER_IP_ADDRESS;
location / {
proxy_pass http://127.0.0.1:3000;
}
}
Then:
sudo /etc/init.d/nginx restart
Finally, it works with no 502 error.

NGINX reverse proxy / port-forwarding rule to send http traffic to port 3000 for my Node Express application causes the application to be unusable

I had a Node JS server running with Express, that is being used as a web server. It connects to my database to run queries for the end user.
I have a VPS set up on Digital Ocean, with a Node App running on port 3000. When I access the Node app on ip:3000 it runs fine and as fast as to be expected. If I set up a reverse proxy with nginx, or a firewall rule that forwards traffic from port 80 to port 3000, parts of the page seem to run extremely slowly, or not at all. I can't seem to find a link as to why, as some of the database queries run fine, but some don't load at all and cause the page to hang. If I access the site using port 3000, the site still continues to run fine, even with nginx running. It's only the access from port 80 that is slow.
My NGINX conf is:
user www-data;
worker_processes auto;
pid /run/nginx.pid;
include /etc/nginx/modules-enabled/*.conf;
events {
worker_connections 768;
# multi_accept on;
}
http {
##
# Basic Settings
##
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
# server_tokens off;
# server_names_hash_bucket_size 64;
# server_name_in_redirect off;
include /etc/nginx/mime.types;
default_type application/octet-stream;
##
# SSL Settings
##
ssl_protocols TLSv1 TLSv1.1 TLSv1.2; # Dropping SSLv3, ref: POODLE
ssl_prefer_server_ciphers on;
##
# Logging Settings
##
access_log /var/log/nginx/access.log;
error_log /var/log/nginx/error.log;
##
# Gzip Settings
# server_names_hash_bucket_size 64;
# server_name_in_redirect off;
include /etc/nginx/mime.types;
default_type application/octet-stream;
##
# SSL Settings
##
ssl_protocols TLSv1 TLSv1.1 TLSv1.2; # Dropping SSLv3, ref: POODLE
ssl_prefer_server_ciphers on;
##
# Logging Settings
##
access_log /var/log/nginx/access.log;
error_log /var/log/nginx/error.log;
##
# Gzip Settings
##
gzip on;
# gzip_vary on;
# gzip_proxied any;
# gzip_comp_level 6;
# gzip_buffers 16 8k;
# gzip_http_version 1.1;
# gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/$
##
# Virtual Host Configs
##
server_names_hash_bucket_size 64;
include /etc/nginx/conf.d/*.conf;
include /etc/nginx/sites-enabled/*;
}
#mail {
# # See sample authentication script at:
# # http://wiki.nginx.org/ImapAuthenticateWithApachePhpScript
#
# # auth_http localhost/auth.php;
# # pop3_capabilities "TOP" "USER";
# # imap_capabilities "IMAP4rev1" "UIDPLUS";
#
# server {
# listen localhost:110;
# protocol pop3;
# proxy on;
# }
#
# server {
# listen localhost:143;
# protocol imap;
# proxy on;
# }
#}
My example.com file is (where 'example.com' is my site address):
server {
listen 80;
listen [::]:80;
root /var/www/example.com/html;
index index.html index.htm index.nginx-debian.html;
server_name example.com www.example.com;
location / {
proxy_pass http://localhost:3000;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
}
}
I recommend using PM2 to start instance of your node app in production https://github.com/Unitech/pm2
Try following NGINX configurations
upstream prod_nodejs_upstream {
server 127.0.0.1:3000;
keepalive 64;
}
server {
listen 80;
server_name example.com;
root /home/www/example;
location / {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-NginX-Proxy true;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_max_temp_file_size 0;
proxy_pass http://prod_nodejs_upstream/;
proxy_redirect off;
proxy_read_timeout 240s;
}
}
Once these changes applied you must restart NGINX using commands sudo nginx -t and then sudo systemctl restart nginx
Please update configuration with as below and share output of file so that time taken by upstream can be measured
upstream prod_nodejs_upstream {
server 127.0.0.1:3000;
keepalive 64;
}
server {
listen 80;
server_name example.com;
root /home/www/example;
location / {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-NginX-Proxy true;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_max_temp_file_size 0;
proxy_pass http://prod_nodejs_upstream/;
proxy_redirect off;
proxy_read_timeout 240s;
}
log_format apm '"$time_local" client=$remote_addr '
'method=$request_method request="$request" '
'request_length=$request_length '
'status=$status bytes_sent=$bytes_sent '
'body_bytes_sent=$body_bytes_sent '
'referer=$http_referer '
'user_agent="$http_user_agent" '
'upstream_addr=$upstream_addr '
'upstream_status=$upstream_status '
'request_time=$request_time '
'upstream_response_time=$upstream_response_time '
'upstream_connect_time=$upstream_connect_time '
'upstream_header_time=$upstream_header_time';
}

redirect example.com:3000 to example.com

redirect (301) https://example.com:3000 to https://example.com, while 3000 port accessible only through IP:3000 and NOT through example.com:3000
Using Express.js app on port 3000.
Using nginx to proxy localhost:3000 with example.com.
And now https://example.com:3000 is not accessible (in chrome:
ERR_CONNECTION_CLOSED), but IP:3000 is accessible.
The problem is - search engines indexed almost all
https://example.com:3000 pages and these pages aren't accessible.
As 3000 port is already taken by nodejs, in nginx I cannot write:
server {
listen 3000;
server_name example.com;
return 301 https://example.com$request_uri;
}
nginx conf:
upstream nodejs {
ip_hash;
server localhost:3000;
}
server {
listen 80 default_server;
listen [::]:80 default_server;
server_name example.com;
return 301 https://$server_name$request_uri;
}
server {
listen 443 ssl default_server;
server_name example.com;
listen [::]:443 ssl default_server;
include snippets/ssl-example.com.conf;
include snippets/ssl-params.conf;
include /etc/nginx/snippets/letsencrypt-acme-challenge.conf;
location = /robots.txt {
root /root;
allow all;
log_not_found off;
access_log off;
}
location ~* \.(?:css|js)$ {
root /root;
expires 9d;
add_header Cache-Control "public, max-age=7200";
add_header X-Content-Type-Options nosniff;
add_header X-Frame-Options "SAMEORIGIN";
add_header X-XSS-Protection "1; mode=block";
add_header X-Robots-Tag none;
add_header X-Download-Options noopen;
add_header X-Permitted-Cross-Domain-Policies none;
# Optional: Don't log access to assets
access_log off;
}
location ~* ^.+\.(jpg|jpeg|gif|png|ico|zip|tgz|gz|rar|bz2|pdf|txt|tar|wav|bmp|rtf|flv|swf)$
{
root /root;
expires 365d;
access_log off;
}
# #nodejs
location / {
add_header Cache-Control "private";
add_header Vary "Cookie, User-Agent";
proxy_redirect off;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_cache_bypass $http_upgrade;
include /etc/nginx/proxy_params;
proxy_pass http://nodejs;
}
}
How https://example.com:3000 => https://example.com
and restrict outer access to 3000 port (remain only localhost:3000)?
Add one more server block like the following:
server {
listen EXTERNAL_IP:3000 ;
server_name example.com;
include snippets/ssl-example.com.conf;
include snippets/ssl-params.conf;
include /etc/nginx/snippets/letsencrypt-acme-challenge.conf;
return 301 https://$server_name$request_uri;
}
Note, that application should listen to the only 127.0.0.1:3000, or you may face "address already in use".
In this case all incoming connections will be established with nginx, which redirects users accordingly to your rule.
In case you want to restrict access to port 3000, you may use any firewall. Example for iptables:
iptables -I INPUT -p tcp -i eth1 --dport 3000 -j DROP
But this will close access to https://example.com:3000 too.

Upstream Node server closing connection to nginx

I'm using nginx as a proxy for a Node server that's rate-limiting requests. The rate is one request every 30 seconds; most requests return a response fine, but if a request is kept open for an extended period of time, I get this:
upstream prematurely closed connection while reading response header from upstream
I cannot figure out what might be causing this. Below is my nginx configuration:
# For more information on configuration, see:
# * Official English Documentation: http://nginx.org/en/docs/
# * Official Russian Documentation: http://nginx.org/ru/docs/
user nginx;
worker_processes auto;
error_log /var/log/nginx/error.log;
pid /run/nginx.pid;
# Load dynamic modules. See /usr/share/nginx/README.dynamic.
# include /usr/share/nginx/modules/*.conf;
events {
worker_connections 1024;
}
http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
include /etc/nginx/mime.types;
default_type application/octet-stream;
# Load modular configuration files from the /etc/nginx/conf.d directory.
# See http://nginx.org/en/docs/ngx_core_module.html#include
# for more information.
include /etc/nginx/conf.d/*.conf;
server {
listen 80 default_server;
listen [::]:80 default_server;
server_name _;
root /srv/www/main/htdocs;
# Load configuration files for the default server block.
include /etc/nginx/default.d/*.conf;
location /vcheck {
proxy_pass http://127.0.0.1:8080$is_args$query_string;
# proxy_buffer_size 128k;
# proxy_buffers 4 256k;
# proxy_busy_buffers_size 256k;
# proxy_http_version 1.1;
# proxy_set_header Upgrade $http_upgrade;
# proxy_set_header Connection 'upgrade';
# proxy_set_header Host $host;
# proxy_cache_bypass $http_upgrade;
# proxy_redirect off;
proxy_read_timeout 600s;
}
location ~ \.php$ {
include fastcgi.conf;
fastcgi_split_path_info ^(.+\.php)(/.+)$;
fastcgi_pass unix:/var/run/php-fpm/php-fpm.sock;
fastcgi_index routes.php$is_args$query_string;
}
location / {
if (-f $request_filename) {
expires max;
break;
}
if ($request_filename !~ "\.(js|htc|ico|gif|jpg|png|css)$") {
rewrite ^(.*) /routes.php last;
}
}
}
}
Is there a reason why Node could be closing the connection early?
EDIT: I'm using Node's built-in HTTP server.
Seems like You've to extend response timeout of nodejs application.
So if it's expressjs app so I can guess You try this one:
install: npm i --save connect-timeout
use:
var timeout = require('connect-timeout');
app.use(timeout('60s'));
But I recommend to not to keep connection waiting and fix issue in nodejs app, find why it's halting so long.
Seems like nodejs app has issues that cannot respond and request is getting lost keeping nginx waiting.

Getting nginx and node.js to play nice

I have an nginx/node.js server I'm trying to configure. Basically it's just the issue of running 2 web servers on port 80 at the same time. I have www.mysite.com that I need to point to nginx on port 80. But I also have a node.js server that I need api.mysite.com to point to port 8888.
I'm messing around with proxy_pass in my config (http://nginx.org/en/docs/http/ngx_http_proxy_module.html#proxy_pass) but with no luck. I also tried this https://stackoverflow.com/a/20716524/605841 with no luck.
If anyone has any tips that would be great. Thanks in advance.
Nginx public dir: /var/www/html. Express app location: /var/www/html/myNodeAppRoot
Here's my /etc/nginx/sites-available/api.mysite.com file (sym linked into sites-enabled):
server {
listen 80;
# server_name ~^(?<login>[a-z]+)\.api\.mysite\.com$;
server_name api.mysite.com$;
location / {
# root /var/www/html/myNodeAppRoot;
# proxy_pass http://unix:/tmp/\$login.api.mysite.com.sock:$uri$is_args$args;
proxy_pass http://unix:/tmp/api.mysite.com.sock:$uri$is_args$args;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}
And here's my default.conf file:
#
# The default server
#
server {
listen 80 default_server;
server_name www.mysite.com;
#charset koi8-r;
#access_log logs/host.access.log main;
location / {
root /var/www/html;
index index.php index.html index.htm;
}
error_page 404 /404.html;
location = /404.html {
root /var/www/html;
}
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /var/www/html;
}
# proxy the PHP scripts to Apache listening on 127.0.0.1:80
#
#location ~ \.php$ {
# proxy_pass http://127.0.0.1;
#}
# pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
#
location ~ \.php$ {
root /var/www/html;
try_files $uri =404;
# fastcgi_pass 127.0.0.1:9000;
fastcgi_pass unix:/tmp/php5-fpm.sock;
fastcgi_index index.php;
# fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
include fastcgi_params;
}
# deny access to .htaccess files, if Apache's document root
# concurs with nginx's one
#
location ~ /\.ht {
deny all;
}
}
Thanks for any help!
I run a bunch of Node.js applications on the same server, while nginx serves some static content. Here's my setup:
# the meteor server
server {
server_name example.com;
access_log /etc/nginx/logs/example.access;
error_log /etc/nginx/logs/example.error error;
location / {
proxy_pass http://localhost:3030;
proxy_set_header X-Real-IP $remote_addr;
}
}
I just repeat this block and change the port for each new Node.js app. Then I run Node.js with a different --port 3030 parameter.
nginx can be configured to use unix sockets, which look like an item in the file system. Node supports these out of the box. This allows you to avoid any issues with ports behind nginx.
A good tutorial for setting up a Node app with nginx and sockets can be found here.

Resources