As i stated in the title, I tried setting headers in node.js, but some of them just would not stick/would get overwritten.
Here is content of my server.js file:
const next = require("next");
const http = require("http");
const url = require("url");
const path = require("path");
const port = process.env.PORT || 3000;
const dev = process.env.NODE_ENV !== "production";
const app = next({ dev });
const handle = app.getRequestHandler();
app.prepare().then(() => {
http
.createServer((req, res) => {
const parsedUrl = url.parse(req.url, true);
res.setHeader("X-Content-Type-Options", "nosniff");
res.setHeader("X-Frame-Options", "DENY");
res.setHeader("X-XSS-Protection", "1; mode=block");
res.setHeader(
"Strict-Transport-Security",
"max-age=31536000; includeSubDomains; preload"
);
res.setHeader("Cache-Control", "public, max-age=31557600");
handle(req, res, parsedUrl);
})
.listen(port, () => {
console.log(`listening on PORT ${port}`);
});
});
Every header except Cache-control and x-powered-by (its not in code but i tried) is getting set.
Weird thing is that when i log response, my headers are loged out:
'x-content-type-options': [ 'X-Content-Type-Options', 'nosniff' ],
'x-frame-options': [ 'X-Frame-Options', 'DENY' ],
'x-xss-protection': [ 'X-XSS-Protection', '1; mode=block' ],
'strict-transport-security': [
'Strict-Transport-Security',
'max-age=31536000; includeSubDomains; preload'
],
'cache-control': [ 'Cache-Control', 'public, max-age=31557600' ]
There are the headers that i have in my browser:
HTTP/1.1 200 OK
X-Content-Type-Options: nosniff
X-Frame-Options: DENY
X-XSS-Protection: 1; mode=block
Strict-Transport-Security: max-age=31536000; includeSubDomains; preload
Cache-Control: no-store, must-revalidate
X-Powered-By: Next.js
ETag: "1373f-5S13UfVtDhxl5s8GCDKvO1iq/oY"
Content-Type: text/html; charset=utf-8
Vary: Accept-Encoding
Content-Encoding: gzip
Date: Thu, 06 Feb 2020 17:13:04 GMT
Connection: keep-alive
Transfer-Encoding: chunked
Any idea whats happening here? Some default setting overwriting my custom?
What you are experiencing is specific to Next.js framework.
Regarding 'X-Powered-By', this is expected, as per the Next.js docs
By default Next.js will add x-powered-by to the request headers. To opt-out of it, open next.config.js and disable the poweredByHeader config:
Regarding 'Cache-Control', it looks like it is overwritten by Next.js in development mode as you can see in the source code
sendHTML(req: IncomingMessage, res: ServerResponse, html: string) {
// In dev, we should not cache pages for any reason.
res.setHeader('Cache-Control', 'no-store, must-revalidate')
return super.sendHTML(req, res, html)
}
Related
I'm trying to add content security policy to my app. So I'm using helmet csp. But when I add it and check it in browser / terminal, I see that content security policy is not getting set. Not able to figure out why?
I have a prod.js module like this
const helmet = require('helmet');
const compression = require('compression');
const crypto = require("crypto");
module.exports = function (app) {
app.use(helmet())
app.use((req, res, next) => {
res.locals.cspNonce = crypto.randomBytes(16).toString("hex");
next();
});
app.use((req, res, next) => {
csp({
useDefaults: true,
directives: {
scriptSrc: [ "'self', js.stripe.com', 'https://checkout.stripe.com',
'js.stripe.com', 'https://billing.stripe.com'"],
'https://www.googletagmanager.com',
'*.googletagmanager.com',( request, response ) => `'nonce-${res.locals.cspNonce}'` ],
styleSrc: ["'unsafe-inline'"],
connectSrc:[" * 'self' https://checkout.stripe.com https://billing.stripe.com"],
frameSrc: [" 'self https://checkout.stripe.com https://billing.stripe.com https://js.stripe.com "],
imgSrc: [" 'self' blob: https://api.wcompany.com/ data:"],
},
})(req, res, next);
});
app.use(compression());
};
and then I have index.js file like this :
const winston = require('winston');
const express = require('express');
const https = require('https');
const path = require('path');
const fs = require('fs');
const app = express();
require('./startup/routes')(app);
require('./startup/db')();
require('./startup/config')();
require('./startup/validation')();
require('./startup/prod')(app);
const port = process.env.PORT || 3000;
But I can't see content policy header in the browser. Should I explicitly add res.headers to see CSP in browser.
Also on terminal I checked curl http://localhost:3000 --include
Access-Control-Allow-Origin: *
Content-Security-Policy: default-src 'none'
X-DNS-Prefetch-Control: off
Expect-CT: max-age=0
X-Frame-Options: SAMEORIGIN
Strict-Transport-Security: max-age=15552000; includeSubDomains
X-Download-Options: noopen
X-Content-Type-Options: nosniff
X-Permitted-Cross-Domain-Policies: none
Referrer-Policy: no-referrer
X-XSS-Protection: 0
Content-Type: text/html; charset=utf-8
Content-Length: 139
Vary: Accept-Encoding
Date: Fri, 25 Feb 2022 15:57:17 GMT
Connection: keep-alive
Keep-Alive: timeout=5
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Error</title>
</head>
<body>
<pre>Cannot GET /</pre>
</body>
</html>
P.S: I'm quite new to programming. So appreciate any help.
You are not loading helmet into your express application. You need to add something like this:
app.use(helmet())
I'm trying to set an httpOnly cookie from my node.js api (localhost:3001) to work with my react client app (localhost:3000), everything I've tried so far results in no cookie being set in my browser. Some key factors about my setup:
Backend is node, running fastify, fastify-cookie & cors
// CORS
server.use(
require('cors')({
origin: ['https://localhost:3000'],
optionsSuccessStatus: 200,
credentials: true
})
)
// Cookies
server.register(require('fastify-cookie'), {
secret: process.env.JWT_SECRET
})
// Sending the cookie
reply
.setCookie('token', token, {
domain: 'localhost',
path: '/',
secure: true,
sameSite: 'lax',
httpOnly: true
})
.send({ user })
Client is running https localhost in chrome, making api calls using fetch.
const fetchUsers = async () => {
const req = await fetch(`${process.env.USERS_API_BASE}/users`, { credentials: 'include' })
const res = await req.json()
console.log(res)
}
Result
No cookie is ever set in my chrome application inspector, but it is sent to the browser from the server and looks correct.
set-cookie: token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1aWQiOjEsImVtYWlsIjoiaGVsbG9Ac2hhbi5kaWdpdGFsIiwiaWF0IjoxNjIwNDI1ODI0LCJleHAiOjE2MjA0Mjk0MjR9.S8eOQMtSBY85wlenuxjIGYNuk3Ec5cKQ87pAhmCvQ9w.nfRxGzq3IMFimC%2FSJeUH9Xl7bH%2FyXVprwK1NBYfur4k; Domain=localhost; Path=/; HttpOnly; Secure; SameSite=Lax
request.cookies on the sever always returns a blank object {}. Any suggestions?
What you are facing is a CORS error OR at least it is categorized as one..
you see the server seems to think you're making a cross-domain request..
If you log the responce Headers this is typically what you would see
HTTP/1.1 200 OK
Date: Sun, 20 May 2018 20:43:05 GMT
Server: Apache
Set-Cookie: name=value; expires=Sun, 20-May-2018 21:43:05 GMT; Max-Age=3600; path=/; domain=.localHost
Cache-Control: no-cache, private
Access-Control-Allow-Origin: http://localHost:8080
Vary: Origin
X-RateLimit-Limit: 60
X-RateLimit-Remaining: 59
Content-Length: 2
Keep-Alive: timeout=10, max=100
Connection: Keep-Alive
Content-Type: text/html; charset=UTF-8
but when you are making a request you kinda send it like this
const res = await axios({ method: 'POST', url: 'http://127.0.0.1:3000/api/v1/users/login', data: { email, password } });
Do you see the problem 127.0.0.1 != http://localhost:8000 and that is the solution to your problem
In short Check the Key=value Pair of Access-Control-Allow-Origin on your response and Request the domain names should match else the cookie won't be set on the browser...
Here is a GitHub Issue Link for this same problem
I'm trying to build a Node/Express server that essentially acts as a middleman server and logs any requests that come in, then forwards the request to the destination server, and forwards back any responses that come from the destination server. The goal is to be as "transparent as possible, making it seem as if there is no middleman server at all.
The problem I'm having is that my express server seems to be dumping in a bunch of unnecessary headers in the response from the destination server.
In my app.js I have some (a lot of) middlewares that are useful for my app in general but seems to inject headers in the response:
app.use(rateLimiter);
app.use(helmet());
app.use(xss());
app.use(mongoSanitize());
app.use(nocache());
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
Then I have my middleware endpoint that receives the request, forwards it, and returns the response:
exports.createLink = async (req, res, next) => {
try {
const url = 'https://destination-endpoint.com';
const options = {
url: url,
};
request( options, function(err, remoteResponse, remoteBody) {
res.writeHead(remoteResponse.statusCode, {...remoteResponse.headers});
return res.end(remoteBody);
});
} catch (error) {
console.log(error);
next(error);
}
};
If I hit my middleman I get the response:
HTTP/1.1 200 OK
Access-Control-Allow-Origin: http://localhost:3000
Vary: Accept-Encoding
Access-Control-Allow-Credentials: true
X-RateLimit-Limit: 100
X-RateLimit-Remaining: 99
date: Sun, 24 Jan 2021 05:23:07 GMT
X-RateLimit-Reset: 1611465796
Content-Security-Policy: default-src 'self';base-uri 'self';block-all-mixed-content;font-src 'self' https: data:;frame-ancestors 'self';img-src 'self' data:;object-src 'none';script-src 'self';script-src-attr 'none';style-src 'self' https: 'unsafe-inline';upgrade-insecure-requests
X-DNS-Prefetch-Control: off
Expect-CT: max-age=0
X-Frame-Options: SAMEORIGIN
Strict-Transport-Security: max-age=15552000; includeSubDomains
X-Download-Options: noopen
X-Content-Type-Options: nosniff
X-Permitted-Cross-Domain-Policies: none
Referrer-Policy: no-referrer
X-XSS-Protection: 0
Surrogate-Control: no-store
cache-control: no-cache, private
Pragma: no-cache
Expires: 0
server: nginx/1.14.2
content-type: text/plain; charset=UTF-8
transfer-encoding: chunked
connection: close
x-request-id: 864b8443-5fe2-498e-8e88-662035afe6c7
x-token-id: d0cb94e2-9c87-4d6e-b32a-11fcc698ad2c
set-cookie: laravel_session=tBlSCeel0OFIR5pL9C6f02JfXGqoyg3SN6BH6jjG; expires=Sun, 24-Jan-2021 07:23:07 GMT; Max-Age=7200; path=/; httponly
{
"foo": "bar"
}%
While if I hit the endpoint directly I just get this:
HTTP/1.1 200 OK
Server: nginx/1.14.2
Content-Type: text/plain; charset=UTF-8
Transfer-Encoding: chunked
Vary: Accept-Encoding
X-Request-Id: 1f033b57-4a2f-48a1-82b3-41bce6e2d748
X-Token-Id: d0cb94e2-9c87-4d6e-b32a-11fcc698ad2c
Cache-Control: no-cache, private
Date: Sun, 24 Jan 2021 05:24:28 GMT
Set-Cookie: laravel_session=MdOGJ18iDU28XLmNYXf5T2RxSa25KxqFisxzCBzR; expires=Sun, 24-Jan-2021 07:24:28 GMT; Max-Age=7200; path=/; httponly
{
"foo": "bar"
}%
As you can see there is a lot of extra stuff added to the header. Some of it may be solvable by removing middlewares/etc. but I think the bigger issue is that I want to return the response from the destination server AS IS no matter what. I found that I can delete the current headers before sending it like this:
request( options, function(err, remoteResponse, remoteBody) {
const headers = res.getHeaders();
for (const head in headers){
res.removeHeader(head);
}
res.writeHead(remoteResponse.statusCode, {...remoteResponse.headers});
return res.end(remoteBody);
});
But that seems very heavy handed, there has to be an easier way to overwrite/set all response headers to exactly what I need. So my overall question is:
How do I return the response from the request to the destination server EXACTLY as is?
Yes I know you can normally fix these by setting the origin wilcard to '*' but I'm using "credentials : true", so that's thrown my normal solution out the window.
The Issue I'm having is that when I try to perform a post request to Express, it seems to first submit as an OPTIONS, then get redirected as GET, then redirected again as POST.
All my "Access-Control-Allow-XXXX" headers appear in the original OPTIONS but then gets lost in the redirects.
The error being shown from the browser is;
Access to XMLHttpRequest at 'http://localhost:4200/' (redirected from 'http://localhost:3000/api/save') from origin 'null' has been blocked by CORS policy: Request header field access-control-allow-origin is not allowed by Access-Control-Allow-Headers in preflight response.
Here's my express cors setup, the dynamic origin was set because I thought the direct might have been looking for the API address as a possible origin, and then undefined and null because I got annoyed with it;
var app = express();
var port = process.env.PORT || 3000;
whitelist = [UI_BASE_URL, API_BASE_URL, undefined, null];
app.use(
cors({
allowedHeaders: [
'Origin',
'X-Requested-With',
'Content-Type',
'Accept',
'X-Access-Token',
'Access-Control-Allow-Origin',
'Access-Control-Allow-Headers',
'Access-Control-Allow-Methods'
],
preflightContinue: false,
credentials: true,
origin: function(origin, callback) {
if (whitelist.indexOf(origin) !== -1) {
callback(null, true);
} else {
callback(new Error('Not allowed by CORS'));
}
},
methods: 'GET,HEAD,OPTIONS,PUT,PATCH,POST,DELETE',
})
);
And here's the service call from angular
const httpOptions = {
headers: new HttpHeaders({
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'POST, GET, OPTIONS, PUT',
'Access-Control-Allow-Headers': 'Origin, X-Requested-With, Content-Type, Accept'
}),
};
save(myData): Observable<any> {
return this.http.post(
this.workSpaceReportURL + 'save',
{
data: myData,
withCredentials: true,
},
httpOptions
);
}
and because they might be useful, the request and response headers from the POST.
RESPONSE
Access-Control-Allow-Credentials: true
Access-Control-Allow-Origin: http://localhost:4200
Cache-Control: no-store, no-cache, must-revalidate, proxy-revalidate
Connection: keep-alive
Content-Length: 43
Content-Type: text/plain; charset=utf-8
Date: Mon, 11 Nov 2019 14:50:31 GMT
Expires: 0
Location: http://localhost:4200
Pragma: no-cache
Set-Cookie: connect.sid=s%3Aj3wYyE_I14o6b_C-L6EDnOxesW0CBVks.WEwBNiQegZ1ufG2d40%2BF4BGfbCWPbOz9FS4Kj%2BC14Pc; Path=/; Expires=Mon, 18 Nov 2019 13:30:31 GMT; HttpOnly
Strict-Transport-Security: max-age=15552000; includeSubDomains
Surrogate-Control: no-store
Vary: Origin, Accept
X-Content-Type-Options: nosniff
X-DNS-Prefetch-Control: off
X-Download-Options: noopen
X-Frame-Options: SAMEORIGIN
X-XSS-Protection: 1; mode=block
Accept: application/json, text/plain, */*
Accept-Encoding: gzip, deflate, br
Accept-Language: en-US,en;q=0.9
Access-Control-Allow-Headers: Origin, X-Requested-With, Content-Type, Accept
Access-Control-Allow-Methods: POST, GET, OPTIONS, PUT
Access-Control-Allow-Origin: *
Connection: keep-alive
Content-Length: 184
Content-Type: application/json
Host: localhost:3000
Origin: http://localhost:4200
Referer: http://localhost:4200/data-controller
Sec-Fetch-Mode: cors
Sec-Fetch-Site: same-site
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36
I've tried a number of different solutions I've come across on SO but they normally end up being fixed by the usual wildcard origin.
Thanks for any suggestions you can make.
It looks like you used an old way to add cord headers. Have you tried doing this?
var cors = require('cors')
app.use(cors())
Make sure to run the last one before you run your other app.use
Ok, I think I realised what the solution was. I'd just spent so long starting at it to realise.
I'd managed to strip out the Content-Type without noticing, and was passing the withCredentials in as part of the body, not the httpOptions (which I tihnk was my original problem).
In case it helps someone else out a few years from now, the httpOptions and post should've looked like the below.
I'm now going to go stand in the server room of shame.
const httpOptions = {
headers: new HttpHeaders({
'Content-Type': 'application/json'
}),
withCredentials: true,
};
save(myData): Observable<any> {
return this.http.post(
this.workSpaceReportURL + 'save',
{
myData,
},
httpOptions
);
}
I'm missing a content-length header on my response from a Node server that I'm piping a .zip file from another location. I've injected a content-length header via the code below, but still it seems the transfer-encoding: chunked is overwriting it somehow.
Response Headers
HTTP/1.1 200 OK
access-control-allow-origin: *
connection: close
content-type: application/zip
date: Mon, 14 Jul 2014 03:47:00 GMT
etag: "\"eb939974703e14ee9f578642972ed984\""
last-modified: Sat, 12 Jul 2014 02:15:52 GMT
server: Apache-Coyote/1.1
set-cookie: rememberMe=deleteMe; Path=/; Max-Age=0; Expires=Sun, 13-Jul-2014 03:47:00 GMT
transfer-encoding: chunked
X-Powered-By: Express
Code
var request = require('request');
var express = require('express');
var async = require('async');
var app = express();
app.get('/:bundle_id?', function(req, res) {
var bundle_id = req.params.bundle_id;
bundle_id = bundle_id.replace(/\.zip$/, '');
var url = "https://url....../bundles/" + bundle_id;
async.waterfall([
function(callback) {
request.get(url, function(req, res, data) {
callback(null, JSON.parse(data).entities[0]['file-metadata']['content-length']);
});
}
], function(err, contentLength) {
request.get({
url: url,
headers: {
"Accept": "application/zip"
}
}).pipe(res);
res.oldWriteHead = res.writeHead;
res.writeHead = function(statusCode, reasonPhrase, headers) {
res.header('Content-Length', contentLength);
res.oldWriteHead(statusCode, reasonPhrase, headers);
}
});
});
app.listen(9000);
Turns out this was actually a rather simple fix: setting the transfer-encoding header to an empty string in the response solved the problem:
...
res.oldWriteHead = res.writeHead;
res.writeHead = function(statusCode, reasonPhrase, headers) {
res.header('Content-Length', contentLength);
res.header('transfer-encoding', ''); // <-- add this line
res.oldWriteHead(statusCode, reasonPhrase, headers);
}
...
The reason this works, is because after doing some digging, it appears the transfer-encoding header replaces content-length (since both can't co-exist). It just so happens that the clients I was using to test were choosing chunked transfer encoding over content length.
If you define a Content-Length, Transfer-Encoding will no longer be sent to "chunked".