I'm using http-proxy to create a simple proxy: localhost:3000?q=${encodeURIComponent(targetURL)} will access targetURL using the proxy.
Here's my working code:
var http = require("http");
var httpProxy = require("http-proxy");
//create a server object:
http
.createServer(function (req, res) {
try {
// Get the `?q=` query param.
const url = req.query.q;
const parsed = new URL(decodeURIComponents(url));
const proxy = httpProxy.createProxyServer();
// Fix URLs and query params forwarding.
// https://stackoverflow.com/questions/55285448/node-http-proxy-how-to-pass-new-query-parameters-to-initial-request
proxy.on("proxyReq", function (proxyReq) {
proxyReq.path = parsed.toString();
});
proxy.on("end", () => res.end());
// Avoid the following error:
// "Error [ERR_TLS_CERT_ALTNAME_INVALID]: Hostname/IP does not match certificate's altnames"
req.headers["host"] = parsed.host || undefined;
proxy.web(req, res, { target: url }, (err) => {
throw err;
});
} catch (err) {
res.write(JSON.stringify({ error: err.message }));
res.end();
}
})
.listen(8080); //the server object listens on port 8080
When I visit localhost:3000?q=https://google.com, everything works. However, if I click on a link in the website, then the route is changed on my hostname directly, not in the query param.
So:
I go to localhost:3000?q=https://google.com
I click on "Images', which should bring me to localhost:3000?q=https://google.com/images
instead, it brings me to localhost:3000/images?q=https://google.com, which 404s
How do I solve navigation in the target website?
Related
I'm trying to get data in JSON format. I just copied an old project and changed it IP address to database, username, port, password and database name.
When I try to access data through this addres: localhost:3000/&id=13
The browser just doesn't load them.
When I enter the address with the port without / I see the message with error:
return res.status(500).json({ error: "Грешна заявка. Опитай отново !"})
The same code is pinned to another database and I see the data in JSON format.
I checked 10 times if the username, password, port and database name are correct and they are fine.
The code:
// Create express app
var express = require("express")
var app = express()
var mysql = require('mysql')
var express = require("express")
var cors = require('cors')
app.use(cors())
// Server port
var HTTP_PORT = 3000
var pool = mysql.createPool({
connectionLimit: 10,
host: '192.168.0.1',
user: 'user',
port: '3388',
password: 'password',
database: 'databasename'
});
var ardaforecast = '';
app.route('/')
.get(function (req, res) {
// omitted
res.setHeader('Access-Control-Allow-Origin', '*', 'Cache-Control', 'private, no-cache, no-store, must-revalidate');
//const date = req.query.date;
const id = req.query.id;
pool.query(`CALL Get_Alert_levels_Station(${id})`, function (error, result) {
if (error)
return res.status(500).json({ error: "Грешна заявка. Опитай отново !"})
aladinModel = result;
res.json({ ardaforecast })
});
});
// Start server
app.listen(HTTP_PORT, () => {
console.log("Server running on port %PORT%".replace("%PORT%", HTTP_PORT))
});
pool.on('error', function (err) {
console.log(err.code); // 'ER_BAD_DB_ERROR'
});
app.use(function (req, res) {
res.status(404);
})
;
Can I get an example of how I can fix this or how to find out where the problem is ?
You can use this one to see how what your url contains: https://www.freeformatter.com/url-parser-query-string-splitter.html
In your example, the problem is that you're using & (ampersand), but what it does is separating multiple query parameters. Since you have just one, your url is not properly structured and does not contain any parameters whatsoever.
You should use ? (question mark) to denote the first one:
localhost:3000/?id=13
p.s. Успех ;)
After following a tutorial on Node.js I tried to get the details of a request like so:
const url = require('url');
http
.createServer((req, res) => {
*let parsedUrl = url.parse(req.url, true);*
res.write('----------> ');
res.write(parsedUrl.search);
res.write(parsedUrl.search);
re.write(parsedUrl.pathname);
res.write(' <----------');
res.end();
})
.listen(3000, () => cl('Listening on port 3000.'));
This is working fine but I get a warning that url.parse() is deprecated and instead I should use new URL() API. But the problem is that with url.parse() I can pass the req.url as parameter whereas with new URL() I have to pass a string as parameter, therefore I can't use req.url to get the request details. Or is something I'm missing?
http
.createServer((req, res) => {
*let myUrl = new URL(req.url.toString());*
res.write(myUrl);
res.end();
})
.listen(3000, () => cl('Listening on port 3000.'));
If curl this URL curl http://localhost:3000/test?hello=world, I get this error TypeError [ERR_INVALID_URL]: Invalid URL: /test?hello=world
import http from "node:http"
import { URL } from "node:url"
const PORT = 3000
const server = http.createServer((req, res) => {
const url = new URL(`http://${req.headers.host}${req.url}`)
return res.end(url.href)
})
server.listen(PORT, () => `Server is running on port ${PORT}`)
I want to pull a URL from the DB and use it as the proxied URL. However the setup I've come up with initializes a new BrowserSync server for each URL, using incrementing port numbers.
Is there a way to accomplish this without initializing a new BrowserSync server every time?
Or should I be using another approach?
var bs = require("browser-sync");
var express = require("express");
var router = express.Router();
var app = express();
router.get("/", function(req, res){
var proxyUrl = getUrl() //get url from db (www.example.com)
bs.create("bs1").init({
notify: false,
open: false,
ui: false,
port: 10000,
proxy: proxyUrl
});
res.send();
});
app.use(router);
app.listen(8080, function(){
console.log('listening on *:8080');
});
The above is fine(ish) but is it good practice to be initializing a new server for every URL (potentially thousands)?
And is it safe to be exposing a new port number to every user of the system? (Can I mask this with a subdomain?)
Update
My end goal is to use a unique subdomain to refer to each proxy url.
For example:
sub1.mysite.com proxies www.example.com,
sub2.mysite.com proxies www.example2.com
Browser-sync will not work as the proxy is tie to server setup.
I use following packages:
express
express-http-proxy
vhost (express vhost)
const port = 8080;
var app = require('express')();
var proxy = require('express-http-proxy');
var url = require('url');
var vhost = require('vhost');
app.listen(port);
/* Assuming getUrl() will return an array of sites */
// var sites = getUrl();
// DO NOT put '/' at the end of site
var sites = [
'http://www.bing.com',
'http://samanthagooden.com',
'http://www.courtleigh.com'
];
var i = 0;
sites.forEach(site => {
i++;
var subDomain = 'sub' + i + '.mysite.com';
app.use(vhost(subDomain, proxy(site, {
forwardPath: (req, res) => url.parse(req.url).path,
intercept: (rsp, data, req, res, callback) => {
if (res._headers['content-type']) {
var contentType = res._headers['content-type'];
if (
contentType.indexOf('text') !== -1 ||
contentType.indexOf('javascript') !== -1
) {
// Replace link if content-type = text or javascript
var reg = new RegExp(site, 'g');
res.send(data.toString().replace(reg, ''));
} else {
res.send(data);
}
} else {
res.send(data);
}
}
})));
console.log(subDomain + ':' + port + ' proxy: ' + site);
});
The above example will create following proxies:
sub1.mysite.com:8080 proxy: www.bing.com
sub2.mysite.com:8080 proxy: www.example.com
Maybe I'm misunderstanding what you are trying to do, but Browsersync and express seems a bit overkill in this case, why not just use node-http-proxy with the native http module?
var http = require('http')
var httpProxy = require('http-proxy')
var options = ...
var proxy = httpProxy.createProxyServer(options)
var server = http.createServer(function (req, res) {
var proxyUrl = getUrl()
proxy.web(req, res, { target: proxyUrl })
})
server.listen(8080, function () {
console.log('listening on *:8080')
})
As per me If you want SAAS service using proxy is not the good idea to go is what am thinking.. if you are going with proxy for each client will create process with new port... My Solution is to create node server with listen localhost and map *.domain.com to the server..
If you are using individual database for each client :-
in node logic get cname from request host and use that reference to connect database.
Final Controller code would be..
var express = require('express');
var router = express.Router();
var MongoClient = require('mongodb').MongoClient;
/* GET home page. */
router.get('/', function(req, res, next) {
var client = req.subdomains[0];
console.log(client);
MongoClient.connect('mongodb://localhost:27017/'+client, function(err, db) {
if (err) {
throw err;
}
db.collection('app1').find().toArray(function(err, result) {
if (err) {
throw err;
}
console.log('data');
console.log(result);
});
});
res.render('index', { title: 'Express' });
});
module.exports = router;
~
~
In future if you get more clients you can implement node cluster or standard Ubuntu cluster using webservice
I'm trying to create an HTTP/S MitM forwarding proxy using Node.js.
The way I'm tackling this project is by reusing the solution found in ./lib/proxy.js file of the NPM Proxy Cache project created by #runk after he raised the issue on the Node HTTP Proxy project issue tracker.
My Proxy() class looks like this:
var request = require('request')
, https = require('https')
, http = require('http')
, net = require('net')
, url = require('url')
, os = require('os')
, fs = require('fs');
var SOCKET_PATH = os.tmpdir() + 'mitm.sock';
console.log('[SOCKET PATH] ' + SOCKET_PATH);
function Proxy (config) {
config = config || {};
if(fs.existsSync(SOCKET_PATH)) {
fs.unlinkSync(SOCKET_PATH);
}
var options = {
key: fs.readFileSync('./certs/dummy.key', 'utf8'),
cert: fs.readFileSync('./certs/dummy.crt', 'utf8')
};
// HTTPS Server
https.createServer(options, this.handler).listen(config.port + 1, this.hostname, function (e) {
if(e) {
console.log('[HTTPS] Server listen() error !');
throw e;
}
});
// HTTP Server
var server = http.createServer(this.handler);
server.listen(config.port, this.hostname, function (e) {
if(e) {
console.log('[HTTP] Server listen() error !');
throw e;
}
});
// Intercept CONNECT requests for HTTPS handshake
server.addListener('connect', this.httpsHandler);
}
Proxy.prototype.handler = function (req, res) {
var schema = !!req.client.pair ? 'https' : 'http'
, path = url.parse(req.url).path;
var dest = schema + '://' + req.headers['host'] + path;
console.log('(1) - [' + schema.toUpperCase() + '] ' + req.method + ' ' + req.url);
var params = {
rejectUnauthorized: false,
url: dest
};
if(req.method.toUpperCase() !== 'GET') {
return console.log('[HTTP] Request is not HTTP GET.');
}
var onResponse = function (e, response) {
if(e == null && response.statusCode === 200) {
return r.pipe(res);
}
var body = 'Status ' + response.statusCode + ' returned';
if(e) {
body = e.toString();
}
res.end(body);
};
var r = request(params);
r.on('response', onResponse.bind(null, null));
r.on('error', onResponse.bind(null));
};
Proxy.prototype.httpsHandler = function (request, socketRequest, bodyHead) {
var httpVersion = request['httpVersion']
, url = request['url'];
console.log('(2) - [HTTPS] ' + request['method'] + ' ' + request['url']);
var proxySocket = new net.Socket();
// ProxySocket event handlers
proxySocket.connect(SOCKET_PATH, function () {
proxySocket.write(bodyHead);
proxySocket.write('HTTP/' + httpVersion + ' 200 Connection established\r\n\r\n');
});
proxySocket.on('data', function (chunk) {
console.log('ProxySocket - "data"');
socketRequest.write(chunk);
});
proxySocket.on('end', function () {
console.log('ProxySocket - "end"');
socketRequest.end();
});
proxySocket.on('error', function (e) {
console.log('ProxySocket - "error"');
console.log(e);
console.log(e.stack);
socketRequest.write('HTTP/' + httpVersion + ' 500 Connection error\r\n\r\n');
socketRequest.end();
});
// SocketRequest event handlers
socketRequest.on('data', function (chunk) {
console.log('SocketRequest - "data"');
proxySocket.write(chunk);
});
socketRequest.on('end', function () {
console.log('SocketRequest - "end"');
proxySocket.end();
});
socketRequest.on('error', function (e) {
console.log('socketRequest - "error"');
console.log(e);
console.log(e.stack);
proxySocket.end();
});
};
module.exports = Proxy;
And my Index.js file that start my program looks like this:
var Proxy = require('./lib/proxy');
var proxy = new Proxy({
hostname: '127.0.0.1',
port: 8000
});
Here's my directory / file structure this:
/my_project
/certs
dummy.crt // Copied from the NPM Proxy Cache project
dummy.csr // Copied from the NPM Proxy Cache project
dummy.key // Copied from the NPM Proxy Cache project
/lib
proxy.js
index.js
I'm testing my program by setting (in Mac OSX Maverick) an HTTP and HTTPS proxy as IP address 127.0.0.1 and port 8000.
When browsing an HTTP only website everything works fine, but if I browse an HTTPS website I get the following error:
{[Error: connect ENOENT] code: 'ENOENT', errno: 'ENOENT', syscall: 'connect'}
Error: connect ENOENT
at errnoException (net.js:904:11)
at Object.afterConnect [as oncomplete] (net.js:895:19)
Any ideas from where this issue could come from and how to fix this ?
Thank you very much in advance !
(If you want to test my code, the NPM module request is the only dependency needed to run the code.)
EDIT: The certs can be downloaded from here : https://github.com/runk/npm-proxy-cache/tree/master/cert.
I'm an author of npm-proxy-cache. In fact I've created another project called thin https://www.npmjs.org/package/thin and I hope in future the npm proxy cache thing will utilize it. Despite the fact that it's still very rough it's usable and it does what you need.
E.g.
proxy code
var Thin = require('thin')
var proxy = new Thin;
// `req` and `res` params are `http.ClientRequest` and `http.ServerResponse` accordingly
// be sure to check http://nodejs.org/api/http.html for more details
proxy.use(function(req, res, next) {
console.log('Proxying:', req.url);
next();
});
// you can add different layers of "middleware" similar to "connect",
// but with few exclusions
proxy.use(function(req, res, next) {
if (req.url === '/foobar')
return res.end('intercepted');
next();
});
proxy.listen(8081, 'localhost', function(err) {
// .. error handling code ..
});
server code
var express = require('express'); // v3.4
var app = express();
app.use(express.urlencoded({limit: '10mb'}));
app.get('/test', function(req, res){
console.log(req.protocol, 'get req.query', req.query);
res.end('get: hello world');
});
app.post('/test', function(req, res) {
console.log(req.protocol, 'post req.query', req.query);
console.log(req.protocol, 'post req.body', req.body);
res.end('post: hello world');
});
app.listen(3000);
var fs = require('fs');
var https = require('https');
https.createServer({
key: fs.readFileSync('./cert/dummy.key'), // your mitm server keys
cert: fs.readFileSync('./cert/dummy.crt')
}, app).listen(3001);
You need to start proxy and server in two terminal sessions, then
curl -d "foo=baz" -k -x https://localhost:8081 https://localhost:3001/test?foo=bar
curl -d "foo=baz" -x http://localhost:8081 http://localhost:3000/test?foo=bar
After that you should be able to see following output from the server
https post req.query { foo: 'bar' }
https post req.body { foo: 'baz' }
http post req.query { foo: 'bar' }
http post req.body { foo: 'baz' }
Small example for interceptor
curl -d "foo=baz" -k -x https://localhost:8081 https://localhost:3001/foobar
It should return intercepted
Hope that helps :)
I created a proxy server in node.js using the node-http-proxy module.
Looks like this:
var http = require('http'),
httpProxy = require('http-proxy'),
io = require("socket.io").listen(5555);
var proxy = new httpProxy.RoutingProxy();
http.createServer(function (req, res) {
proxy.proxyRequest(req, res, {
host: 'localhost',
port: 1338
});
}).listen(9000);
So, I need, before sending the response back to the client to get the body from the server that I proxy and analyze it.
But I can't find event or attribute, where I can get this data. I tried:
proxy.on('end', function (data) {
console.log('end');
});
But I can't figure our how to get the mime body from it.
If all you want to do is examine the response (read-only) , you could use :
proxy.on('proxyRes', function(proxyRes, req, res){
proxyRes.on('data' , function(dataBuffer){
var data = dataBuffer.toString('utf8');
console.log("This is the data from target server : "+ data);
});
});
But , note that the 'proxyRes' event is emitted after the response is sent.
Reference in :
https://github.com/nodejitsu/node-http-proxy/issues/382
I found answer:
I rewrite response function for body -
var http = require('http'),
httpProxy = require('http-proxy'),
io = require("socket.io").listen(5555);
var proxy = new httpProxy.RoutingProxy();
http.createServer(function (req, res) {
console.log(req.url);
res.oldWrite = res.write;
res.write = function(data) {
/* add logic for your data here */
console.log(data.toString('UTF8'));
res.oldWrite(data);
}
proxy.proxyRequest(req, res, {
host: 'localhost',
port: 1338
});
}).listen(9000);
This code will console all url request and response form this endpoint.
Based on the answer of #Psycho, following code can be used to modify headers as well
var server = http.createServer(function(req, res) {
res.oldWrite = res.write;
res.write = function(data) {
console.log(data.toString('UTF8'));
res.oldWrite(data);
}
res.oldSetHeader = res.setHeader
res.setHeader = function(k,v) {
console.log(k,v)
res.oldSetHeader(k,v)
}
proxy.web(req, res, { target: proxyPool[key]}); })