I have been working with Jupyterhub's Configurable Http Proxy and I have been adding the necessary options for the proxy to handle client's ssl certificates without having to use the command line options.
My main goal is that I want to take in a clients request to the proxy and add their certificate information to the header. Once in the header, I will use jupyterhub's authenticator to craft a username.
My issue is that when I use the proxy.on('proxyReq method available for the http-proxy to set the header, I get this error: [Error: Can't set headers after they are sent.]
I have been looking all over the code to see where a response/request is being written or sent, but I cannot find it.
Here is the ConfigurableProxy function code, I can give you more if needed:
function ConfigurableProxy (options) {
var that = this;
this.options = options || {};
this.trie = new trie.URLTrie();
this.auth_token = this.options.auth_token;
this.includePrefix = options.includePrefix === undefined ? true : options.includePrefix;
this.routes = {};
this.host_routing = this.options.host_routing;
this.error_target = options.error_target;
if (this.error_target && this.error_target.slice(-1) !== '/') {
this.error_target = this.error_target + '/'; // ensure trailing /
}
this.error_path = options.error_path || path.join(__dirname, 'error');
if (this.options.default_target) {
this.add_route('/', {
target: this.options.default_target
});
}
options.ws = true;
options.secure= true;
// These are the ssl options
options.ssl = {
//Right the key and cert are relative path on my computer
//but these can be changed.
key: fs.readFileSync('/Users/grantherman/Desktop/jupyterHubCSProject/ssl/server.key'),
cert: fs.readFileSync('/Users/grantherman/Desktop/jupyterHubCSProject/ssl/server.crt'),
requestCert: true,
//Right now this is set to false, but if we add a CA to these options
// and set this to true, the proxy will reject all unkown ssl certs
rejectUnauthorized: false
};
var response = [];
var data = [];
var proxy = this.proxy = httpProxy.createProxyServer(options);
proxy.on('proxyReq', function(proxyReq, req, res, options) {
console.log("proxy request");
try{
proxyReq.setHeader('X-Special-Proxy-Header', req.socket.getPeerCertificate());
}catch(err){
console.log(err);
}
});
proxy.on('data', function(data, req, res, options) {
data.push(data);
});
proxy.on('proxyRes', function(proxyRes, req, res, options) {
response.push(proxyRes);
});
proxy.on('error', function(error, req, res, options) {
log.add(error);
});
proxy.on('close', function (req, socket, head) {
// view disconnected websocket connections
console.log('Client disconnected');
});
// tornado-style regex routing,
// because cross-language cargo-culting is always a good idea
this.api_handlers = [
[ /^\/api\/routes(\/.*)?$/, {
get : bound(this, authorized(this.get_routes)),
post : json_handler(bound(this, authorized(this.post_routes))),
'delete' : bound(this, authorized(this.delete_routes))
} ]
];
I think this is going to require modifications to configurable-http-proxy itself. The place to add headers is on the original req object prior to initiating the proxied request, here.
It would look something like:
ConfigurableProxy.prototype.handle_proxy = function (kind, req, res) {
...
req.headers['X-My-Header'] = 'My-Value';
// dispatch the actual method
this.proxy[kind].apply(this.proxy, args);
Adding a hook to CHP for modifying the request here, on its way through, should make this doable without modifying the CHP source.
Related
I have a node.js / Polymer 1 website. I am using HTTP-proxy-middleware to route api calls (/api/webapi) to my backend API server.
On one of the pages I have a vaadin-upload (v2.3.0) component that sends files to the api. Everything appears to work fine when running on local host but when I deploy to our test servers I am experiencing issues. Either the upload completes quickly and then sits "processing" for a long time or it stalls.
Using postman I have managed to send a file to the API directly, to the proxy server. I have also managed to get the upload component to call the API directly. All these cases work correctly, and output from the API would suggest in all cases the API is receiving/processing data at the same rate. From this I have narrowed it down to an interaction between Vaadin-Upload and http-proxy-middleware.
Does anyone have experience with this and help me configure the proxy correctly.
proxy configuration:
const url = require('url');
var hpmproxy = require('http-proxy-middleware');
var config = require('../config');
// Adds user authorization token from passport to request
var addAuthTokenMiddleware = function (req, res, next) {
if (req.session && req.isAuthenticated()) {
req.headers['authorization'] = 'Bearer ' + req.user.token;
next();
} else {
req.abort();
}
};
function isLoggedIn(req, res, next) {
// if user is authenticated in the session, carry on
if (req.session && req.isAuthenticated())
return next();
res.status(403).end();
};
function restream(proxyReq, req) {
if (isMultipartRequest(req))
console.log('Multipart');
if (!isEmpty(req.body)) {
console.log("parse");
var bodyData = JSON.stringify(req.body);
proxyReq.setHeader('Content-Type', 'application/json');
proxyReq.setHeader('Content-Length', Buffer.byteLength(bodyData));
proxyReq.write(bodyData);
}
console.log("-->[proxyReq]----", proxyReq.path, proxyReq.getHeader('Content-Type'));
};
function handleResponse(proxyRes, req, res) {
console.log('---[proxyRes]<---', proxyRes.req.method, proxyRes.req.path, proxyRes.statusCode);
};
function isMultipartRequest(req) {
let contentTypeHeader = req.headers['content-type'];
return contentTypeHeader && contentTypeHeader.indexOf('multipart') > -1;
};
function isEmpty(obj) {
for(var prop in obj) {
if(obj.hasOwnProperty(prop))
return false;
}
return JSON.stringify(obj) === JSON.stringify({});
}
var options = {
target: config.webApiHost,
changeOrigin: true, // needed for virtual hosted sites
pathRewrite: {
'^/api/webapi/': config.webApiPath
},
secure: !config.selfSigned,
onProxyRes: handleResponse,
onProxyReq: restream
// ,logLevel: 'debug'
};
var hpmApiProxy = hpmproxy(options);
module.exports = function (app, passport, config) {
app.use('/api/webapi/', isLoggedIn, addAuthTokenMiddleware, hpmApiProxy);
console.log(' WebAPI Proxy Loaded');
}
I need to modify the request body asynchronously. Something along the lines of this:
proxy.on('proxyReq', function(proxyReq, req, res, options) {
if(req.body) {
new Promise(function(resolve){
setTimeout(function() { // wait for the db to return
'use strict';
req.body.text += 'test';
let bodyData = JSON.stringify(req.body);
proxyReq.setHeader('Content-Type','application/json');
proxyReq.setHeader('Content-Length', Buffer.byteLength(bodyData));
// stream the content
proxyReq.write(bodyData);
resolve();
},1);
});
}
});
When I run this I get the error saying cannot modfiy headers once they have been set. Which makes sense.
How can I halt the sending of the request until I'm ready? I've looked at removing various listeners from proxyReq without success..
By looking at the source code #-) it seems like it's not really possible because the proxyReq event is sent and then the code moves on.
If it would instead wait for a promise, it would be possible (if you'd return that promise as well).
A minimal fork on this lib could be for example:
// Enable developers to modify the proxyReq before headers are sent
proxyReq.on('socket', function(socket) {
if(server) { server.emit('proxyReq', proxyReq, req, res, options); }
});
(proxyReq.proxyWait || Promise.resolve())
.then( ... // rest of the code inside the callback
And then
proxy.on('proxyReq', function(proxyReq, req, res, options) {
if(req.body) {
proxyReq.proxyWait = new Promise(function(resolve){
setTimeout(function() { ...
But depending on your use case, there might be other solutions as well. For example, consider if it's really necessary that you use this proxy library. It You could alternatively use http directly, where you have all the control on the events and callbacks.
You can set selfHandleResponse: true inside the HttpProxy.createProxyServer. This then allows (and forces) you to handle the proxyRes manually!
const proxy = HttpProxy.createProxyServer({selfHandleResponse: true});
proxy.on('proxyRes', async (proxyReq, req, res, options) => {
if (proxyReq.statusCode === 404) {
req.logger.debug('Proxy Request Returned 404');
const something = await doSomething(proxyReq);
return res.json(something);
}
return x;// return original proxy response
});
I came here looking for the solution to a slightly different problem: Modifying the request headers (not body) before proxying.
I post this here in case that it is helpful to others. And maybe the code can be adapted to also modify the request body.
const http = require('http');
const httpProxy = require('http-proxy');
var proxy = httpProxy.createProxyServer({});
var server = http.createServer(function(req, res) {
console.log(`${req.url} - sleeping 1s...`);
setTimeout(() => {
console.log(`${req.url} - processing request`);
req.headers['x-example-req-async'] = '456';
proxy.web(req, res, {
target: 'http://127.0.0.1:80'
});
}, 1000);
});
server.listen(5050);
I have a small proxy for certain requests in Express. Using the request library, I have fairly concise code:
app.use('/api', function(req, res) {
var url = rewriteUrl(req.url);
var newReq = request(url, function(error) {
if (error) {
logError(error);
}
});
req.pipe(newReq).pipe(res);
});
My problem is that the response from the API server contains a bunch of unwanted headers that I want to remove. How can I remove the headers from the response of newReq before piping it to res?
mscdex's answer did work for me, but I found a way that I think is slightly cleaner. In my original code, I had this line:
req.pipe(newReq).pipe(res);
I replaced that with these lines:
req.pipe(newReq).on('response', function(res) {
delete res.headers['user-agent'];
// ...
}).pipe(res);
With the request module, there currently isn't a way (AFAIK) to have a callback and not buffer the server response. So here is how you might do it with the built-in http.request:
app.use('/api', function(req, res) {
var url = rewriteUrl(req.url);
var newReq = http.request(url, function(newRes) {
var headers = newRes.headers;
// modify `headers` here ...
res.writeHead(newRes.statusCode, headers);
newRes.pipe(res);
}).on('error', function(err) {
res.statusCode = 500;
res.end();
});
req.pipe(newReq);
});
It is easy with request.
request("https://example.com/image.png")
.on("response", remoteRes => {
// You can add/remove/modify headers here
remoteRes.headers["content-disposition"] = "attachment; filename=awesome.png";
})
.pipe(res);
There is more elegant way to modify/remove headers by setting a pipe filter as follows:
const req = request.get(url);
req.pipefilter = function(response, dest) {
// remove headers
for(const h in response.headers) {
dest.removeHeader(h);
}
// or modify
dest.setHeader('Content-Type', 'text/html')
}
req.pipe(resp)
I'm using node.js and I need to get my external IP address, provided by my ISP.
Is there a way to achieve this without using a service like http://myexternalip.com/raw ?
Thanks.
Can do the same as what they do in Python to get external IP, connect to some website and get your details from the socket connection:
const net = require('net');
const client = net.connect({port: 80, host:"google.com"}, () => {
console.log('MyIP='+client.localAddress);
console.log('MyPORT='+client.localPort);
});
*Unfortunately cannot find the original Python Example anymore as reference..
Update 2019:
Using built-in http library and public API from https://whatismyipaddress.com/api
const http = require('http');
var options = {
host: 'ipv4bot.whatismyipaddress.com',
port: 80,
path: '/'
};
http.get(options, function(res) {
console.log("status: " + res.statusCode);
res.on("data", function(chunk) {
console.log("BODY: " + chunk);
});
}).on('error', function(e) {
console.log("error: " + e.message);
});
Tested with Node.js v0.10.48 on Amazon AWS server
--
Update 2021
ipv4bot is closed, here is another public API:
var http = require('http');
http.get({'host': 'api.ipify.org', 'port': 80, 'path': '/'}, function(resp) {
resp.on('data', function(ip) {
console.log("My public IP address is: " + ip);
});
});
--
Update 2022
ChatGPT wrote longer example using ipify with json: *Yes, i've tested it.
https://gist.github.com/unitycoder/745a58d562180994a3025afcb84c1753
More info https://www.ipify.org/
npm install --save public-ip from here.
Then
publicIp.v4().then(ip => {
console.log("your public ip address", ip);
});
And if you want the local machine ip you can use this.
var ip = require("ip");
var a = ip.address();
console.log("private ip address", a);
Use my externalip package on GitHub
externalip(function (err, ip) {
console.log(ip); // => 8.8.8.8
});
Edit: This was written back in 2013... The site is gone. I'm leaving the example request code for now unless anyone complains but go for the accepted answer.
http://fugal.net/ip.cgi was similar to that one.
or you can
require('http').request({
hostname: 'fugal.net',
path: '/ip.cgi',
agent: false
}, function(res) {
if(res.statusCode != 200) {
throw new Error('non-OK status: ' + res.statusCode);
}
res.setEncoding('utf-8');
var ipAddress = '';
res.on('data', function(chunk) { ipAddress += chunk; });
res.on('end', function() {
// ipAddress contains the external IP address
});
}).on('error', function(err) {
throw err;
}).end();
Ref: http://www.nodejs.org/api/http.html#http_http_request_options_callback
this should work well without any external dependencies (with the exception of ipify.org):
var https = require('https');
var callback = function(err, ip){
if(err){
return console.log(err);
}
console.log('Our public IP is', ip);
// do something here with the IP address
};
https.get({
host: 'api.ipify.org',
}, function(response) {
var ip = '';
response.on('data', function(d) {
ip += d;
});
response.on('end', function() {
if(ip){
callback(null, ip);
} else {
callback('could not get public ip address :(');
}
});
});
You could also use https://httpbin.org
GET https://httpbin.org/ip
Simply use superagent
var superagent = require('superagent');
var getip = function () {
superagent
.get('http://ip.cn/')
.set('User-Agent', 'curl/7.37.1')
.end(function (err, res) {
if (err) {
console.log(err);
}
var ip = res.text.match(/\d+\.\d+\.\d+\.\d+/)[0];
console.log(ip)
// Here is the result
});
};
Another little node module is ext-ip. The difference is, that you can use different response options, matching your coding style. It's ready to use out of the box ...
Promise
let extIP = require('ext-ip')();
extIP.get().then(ip => {
console.log(ip);
})
.catch(err => {
console.error(err);
});
Events
let extIP = require('ext-ip')();
extIP.on("ip", ip => {
console.log(ip);
});
extIP.on("err", err => {
console.error(err);
});
extIP();
Callback
let extIP = require('ext-ip')();
extIP((err, ip) => {
if( err ){
throw err;
}
console.log(ip);
});
The simplest answer, based on experience is that you can't get your external IP in most cases without using an external service, since you'll typically be behind a NAT or shielded by a firewall. I say in most cases, since there may be situations where you can get it from your router, but it is too case specific to provide a general answer.
What you want is simply to choose your favourite http client in NodeJS and find a maintained server that simply responds with the IP address in the body. You can also use a package, but you should see if it is still using a maintained remote server.
While there are plenty of examples already, here is one that first tries IPv6 and then falls back to IPv4. It leverages axios, since that is what I am comfortable with. Also, unless the optional parameter debug is set to true, the result is either a value or undefined.
const axios = require('axios');
// replace these URLs with whatever is good for you
const remoteIPv4Url = 'http://ipv4bot.whatismyipaddress.com/';
const remoteIPv6Url = 'http://ipv6bot.whatismyipaddress.com/';
// Try getting an external IPv4 address.
async function getExternalIPv4(debug = false) {
try {
const response = await axios.get(remoteIPv4Url);
if (response && response.data) {
return response.data;
}
} catch (error) {
if (debug) {
console.log(error);
}
}
return undefined;
}
// Try getting an external IPv6 address.
async function getExternalIPv6(debug = false) {
try {
const response = await axios.get(remoteIPv6Url);
if (response && response.data) {
return response.data;
}
} catch (error) {
if (debug) {
console.log(error);
}
}
return undefined;
}
async function getExternalIP(debug = false) {
let address;
// Try IPv6 and then IPv4
address = await getExternalIPv6(debug);
if (!address) {
address = await getExternalIPv4(debug);
}
return address;
}
module.exports { getExternalIP, getExternalIPv4, getExternalIPv6 }
Feel free to suggest improvements.
You may use the request-ip package:
const requestIp = require('request-ip');
// inside middleware handler
const ipMiddleware = function(req, res, next) {
const clientIp = requestIp.getClientIp(req);
next();
};
My shameless plug: canihazip (Disclosure: I'm the author of module, but not of the main page.)
It can be required as a module, exposing a single function that can optionally be passed a callback function an it will return a promise.
It can be also be installed globally and used as CLI.
You could very easily use an api solution for retrieving the external IP!
I made a ip tracker site made for this kinda thing a few days ago!
Here is a snippit of code you could use to get IP!
async function getIp(cb) {
let output = null;
let promise = new Promise(resolve => {
let http = new XMLHttpRequest();
http.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
output = this.responseText;
resolve("done");
}
}
http.open("GET", "https://iptrackerz.herokuapp.com/ip", true);
http.send();
});
await promise;
if (cb != undefined) {
cb(JSON.parse(output)["ip"]);
} else {
return JSON.parse(output)["ip"];
}
}
Ok, now you have the function getIp()!
The way I coded it allows you to do 2 different ways of invoking it!
Here they are.
Asynchronous
async function printIP() {
let ip = await getIp();
document.write("Your IP is " + ip);
};
printIP();
Callback
getIp(ip => {
document.write("Your IP is " + ip);
});
I was looking for a solution not relying to other's libraries/ resources,
and found this as acceptable alternative:
Just a GET request to external server ( under my control ),
where I read req.headers['x-forwarded-for'] and serve it back to my client.
node.js has a lot of great built in modules you can use without including any external dependencies. you can make this file.
WhatsMyIpAddress.js
const http = require('http');
function WhatsMyIpAddress(callback) {
const options = {
host: 'ipv4bot.whatismyipaddress.com',
port: 80,
path: '/'
};
http.get(options, res => {
res.setEncoding('utf8');
res.on("data", chunk => callback(chunk, null));
}).on('error', err => callback(null, err.message));
}
module.exports = WhatsMyIpAddress;
Then call it in your main.js like this.
main.js
const WhatsMyIpAddress = require('./src/WhatsMyIpAddress');
WhatsMyIpAddress((data,err) => {
console.log('results:', data, err);
});
You can use nurl library command ippublic to get this. (disclosure: I made nurl)
> npm install nurl-cli -g
> ippublic;
// 50.240.33.6
Regular client initiated requests to the node server are captured fine in Fiddler. However, requests sent from node to a web service are not captured. It did not help to pass in config for proxy (127.0.0.1:8888) to the request method. How can I route the request messages through Fiddler?
var http = require('http');
var request = require('request');
request.get(webserviceURL, { "auth" : {"user": "user", "pass" = "pass", sendImmediately: true },
"proxy" : { "host" : "127.0.0.1", "port" : 8888 }},
function (error, response) { console.log( "response received" );
});
Request repo: https://github.com/mikeal/request
I just tried to do this myself (using Fiddler and the request library from npm). Here's how I got it working:
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = '0'; // Ignore 'UNABLE_TO_VERIFY_LEAF_SIGNATURE' authorization error
// Issue the request
request(
{
method: "GET",
uri: "https://secure.somewebsite.com/",
proxy: "http://127.0.0.1:8888" // Note the fully-qualified path to Fiddler proxy. No "https" is required, even for https connections to outside.
},
function(err, response, body) {
console.log("done");
});
This is with Fiddler2 using the default port and proxy options (and no proxy authentication).
Fiddler works by setting your "Internet Options" (from start menu) "Connections" > "LAN Settings" > "Proxy Server" to its port, thus making all HTTP traffic (clients which obey this setting) go through it.
You should point your node.js client lib to use a proxy, the settings are written in that options dialog after you start Fiddler.
The proxy option should be a full url, like this:
proxy : "http://127.0.0.1:8888"
To do this on an ad-hoc basis, without changing your code, you can use environment variables.
Request respects:
HTTP_PROXY
HTTPS_PROXY
NO_PROXY
So, to proxy just set these in your console before running your process.
For example, to setup http and https proxy use:
set HTTP_PROXY="http://127.0.0.1:8888"
set HTTPS_PROXY="http://127.0.0.1:8888"
set NODE_TLS_REJECT_UNAUTHORIZED=0
The latter line stops issues with SSL through the fiddler proxy.
I've been wanting the same... an equivalent of the Network tab in chrome DevTools, only for Nodejs. Unfortunately, it doesn't appear as though one exists. I don't have Fiddler on macos, so this is how I went about stubbing the require('http') methods to log and pass though. Leaving this here in case I need it again or someone else finds it helpful. You can turn it on by attaching a debugger and require('filename')() the file containing this script.
module.exports = () => {
const http = require('http');
http._request = http.request;
global.DO_LOG_AJAX = true;
const log = str => {
if (global.DO_LOG_AJAX) {
console.debug(str);
}
};
const flushLog = (requestLines, responseLines) => {
if (global.DO_LOG_AJAX) {
log([
'----------------Begin Request-----------------------------------',
...requestLines,
'----------------End Request / Begin Response--------------------',
...responseLines,
'----------------End Reponse-------------------------------------',
].join('\n'));
}
};
let write;
let end;
http.request = (...requestParams) => {
const req = http._request(...requestParams);
const { method, path, headers, host, port } = requestParams[0];
const requestLogLines = [];
requestLogLines.push(`${method} ${path}`);
requestLogLines.push(`Host: ${host}:${port}`);
for (const header of Object.keys(headers)) {
requestLogLines.push(`${header}: ${headers[header]}`);
}
write = write || req.write;
end = end || req.end;
req.on('error', err => {
log({ err });
});
req._write = write;
req._end = end;
const requestBody = [];
req.write = (...writeParams) => {
requestBody.push(writeParams[0].toString());
return req._write(...writeParams);
};
req.end = (...endParams) => {
if (endParams[0]) {
requestBody.push(endParams[0].toString());
}
requestLogLines.push('');
requestLogLines.push(requestBody.join(''));
return req._end(...endParams);
};
const responseLogLines = [];
req.once('response', response => {
const responseBody = [];
responseLogLines.push(`${response.statusCode} ${response.statusMessage}`);
for (const header of Object.keys(response.headers)) {
responseLogLines.push(`${header}: ${response.headers[header]}`);
}
const onData = chunk => {
responseBody.push(chunk.toString());
};
const onClose = err => {
responseLogLines.push('');
responseLogLines.push(responseBody.join(''));
responseLogLines.push('');
responseLogLines.push(`--- ERROR --- ${err.toString()}`);
flushLog(requestLogLines, responseLogLines);
req.removeListener('data', onData);
};
const onEnd = () => {
responseLogLines.push('');
responseLogLines.push(responseBody.join(''));
flushLog(requestLogLines, responseLogLines);
req.removeListener('data', onData);
};
response.on('data', onData);
response.once('close', onClose);
response.once('end', onEnd);
});
return req;
};
};