i am using nodejs v0.10.28 on ubuntu 12LTS,
am trying to make a request to https enable site behind a web proxy using this test code:
var http = require("http");
var options = {
host: "mycompany.proxy.host",
port: 8088,
//host: "https://login.launchpad.net",
//port: 443,
//path: "/",
headers: {
Host: "login.launchpad.net"
}
};
http.get(options, function(res) {
console.log(res);
res.pipe(process.stdout);
}).on('error', function(e) {
console.log(e);
});
but am not sure what are the correct option params to use to make a successful HTTP GET
You should be using node https instead of http module for making https requests. Otherwise the server will most likely reset your connection, and an ECONNRESET error will be caught by the error handler.
Related
I created a local HTTP Proxy server for the test, which works fine for my Firefox and Chrome browsers, however, when I tried the following code, the server simply replied a 400 error.
const http = require("http");
const req = http.request({
host: "localhost",
port: 8001,
method: "CONNECT",
path: "sfnjs.com:443"
})
req.end();
req.on("error", err => {
console.log(err);
}).on('connect', (res, socket, head) => {
// Make a request over an HTTP tunnel
socket.write('GET / HTTP/1.1\r\n' +
'Host: sfnjs.com:443\r\n' +
'Connection: close\r\n' +
'\r\n');
socket.on('data', (chunk) => {
console.log(chunk.toString());
});
});
And the server responded
HTTP/1.1 400 Bad Request
Server: nginx/1.16.1
Date: Mon, 22 Jun 2020 03:00:49 GMT
Content-Type: text/html
Content-Length: 255
Connection: close
<html>
<head><title>400 The plain HTTP request was sent to HTTPS port</title></head>
<body>
<center><h1>400 Bad Request</h1></center>
<center>The plain HTTP request was sent to HTTPS port</center>
<hr><center>nginx/1.16.1</center>
</body>
</html>
And I wasn't able to use the https module to connect the proxy server since the proxy server only accepts HTTP messages. If I trying so, for example:
const https = require("https");
const req = https.request({
host: "localhost",
port: 8001,
method: "CONNECT",
path: "sfnjs.com:443"
})
The proxy server would simply close the connection and throws an error like this:
Error: Client network socket disconnected before secure TLS connection was established
at connResetException (internal/errors.js:570:14)
at TLSSocket.onConnectEnd (_tls_wrap.js:1361:19)
at Object.onceWrapper (events.js:312:28)
at TLSSocket.emit (events.js:228:7)
at endReadableNT (_stream_readable.js:1185:12)
at processTicksAndRejections (internal/process/task_queues.js:81:21) {
code: 'ECONNRESET',
path: null,
host: 'localhost',
port: 8001,
localAddress: undefined
}
However, if I configure this proxy in Firefox or Chrome and visit the target website, it will work very well, so how does the browser do this and how can I implement it in a Node.js application?
And I wasn't able to use the https module to connect the proxy server since the proxy server only accepts HTTP messages
It sounds like you are using the https module at the wrong place. You should be using the http module to connect to the proxy then the https module where you now are using your own custom code: socket.write('GET / HTTP/1.1\r\n' ...
req.on("error", err => {
console.log(err);
}).on('connect', (res, socket, head) => {
// Make a request over an HTTP tunnel
const req2 = https.request({ // HTTPS
host: 'sfnjs.com:443',
method: 'GET',
createConnection: () => { return socket }, // tunnel
path: '/'
}, res2 => {
res2.on('data', (chunk) => {
console.log(chunk.toString());
});
});
req2.end()
});
Note that the code above is untested but should illustrate the general idea.
I'm doing research in proxies in nodejs. I came across something that blew my mind. In one of the options for a http.request connection, the source code showed this as the options object
const options = {
port: 1337,
host: '127.0.0.1',
method: 'CONNECT',
path: 'www.google.com:80'
};
This was a part of a far bigger code which was the whole tunneling system. But can someone just explain how the options above work? The whole code is below
const http = require('http');
const net = require('net');
const { URL } = require('url');
// Create an HTTP tunneling proxy
const proxy = http.createServer((req, res) => {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('okay');
});
proxy.on('connect', (req, clientSocket, head) => {
// Connect to an origin server
const { port, hostname } = new URL(`http://${req.url}`);
const serverSocket = net.connect(port || 80, hostname, () => {
clientSocket.write('HTTP/1.1 200 Connection Established\r\n' +
'Proxy-agent: Node.js-Proxy\r\n' +
'\r\n');
serverSocket.write(head);
serverSocket.pipe(clientSocket);
clientSocket.pipe(serverSocket);
});
});
// Now that proxy is running
proxy.listen(1337, '127.0.0.1', () => {
// Make a request to a tunneling proxy
const options = {
port: 1337,
host: '127.0.0.1',
method: 'CONNECT',
path: 'www.google.com:80'
};
const req = http.request(options);
req.end();
req.on('connect', (res, socket, head) => {
console.log('got connected!');
// Make a request over an HTTP tunnel
socket.write('GET / HTTP/1.1\r\n' +
'Host: www.google.com:80\r\n' +
'Connection: close\r\n' +
'\r\n');
socket.on('data', (chunk) => {
console.log(chunk.toString());
});
socket.on('end', () => {
proxy.close();
});
});
});
Source: https://nodejs.org/api/http.html#http_event_connect
You probably have never used a network that requires you to configure a HTTP proxy. Most networks these days configure their firewall to allow HTTP traffic. This means most people these days have never needed to use a HTTP proxy to access the web.
A long-long time ago when I first started using the internet (around 1994) a lot of networks don't allow transparent internet access. Your PC does not have any connection to the outside world. But sysadmins would install a HTTP proxy that you can connect to. Your PC would only have access to the LAN (which the proxy is a part of) and only the HTTP proxy would have access to the internet.
Here's an example of how you'd configure Windows to use a HTTP proxy:
If you configure your PC as above, then when you connect to www.google.com your browser would connect to the host proxy.example.com on port 8080 and then request it to fetch data from www.google.com.
As for why it calls the requested resource path it's because it is sent in the "path" part of the packet.
For example, a normal GET request for getting this page looks something like this:
GET /questions/60498963 HTTP/1.1
Host: stackoverflow.com
And the string after GET and before protocol version is normally called the path:
.---------- this is normally called
| the "path"
v
GET /questions/60498963 HTTP/1.1
Host: stackoverflow.com
When making a proxy request the HTTP header looks like this:
CONNECT stackoverflow.com/questions/60498963 HTTP/1.1
So the url including the domain name is sent to the proxy in the part of the packet usually used to send file path.
Note that all this has nothing to do with Node.js. This is just basic networking (no programming languages involved).
I get the error Uncaught Error: write EPROTO when attempting to use my API through the nodejs request module on port 443. It works if I change the port to a non-443 port and requests to the app on port 443 work through the browser and curl.
Why would I get EPROTO and how might I fix it to get my automated tests working again?
Asked for code, it's just an https server with a simple request:
var server = https.createServer ({secureProtocol: 'TLSv1_2_method',
key : fs.readFileSync('key'),
cert : fs.readFileSync('cert'),
ca: [
fs.readFileSync('othercert')
]
}, app);
And a simple request:
var request = require('request');
var basepath = "https://localhost";
var req_opts = {
method: "POST",
uri: basepath + '/myroute',
body: {
},
rejectUnauthorized: false,
json: true
}
request(req_opts, function(err, response, body){
if(err){
console.log(err);
throw(err);
}
console.log(body);
});
This only happens on my local windows machine, not on the remote ubuntu server. Both have node version 4.2.3
Searching it looks like the request might not be able to negotiate a cipher, but then why does it succeed if not on port 443?
Skype was also listening locally on port 443 which is why this was occurring.
I need to send my client HTTPS requests through an intranet proxy to a server.
I use both https and request+global-tunnel and neither solutions seem to work.
The similar code with 'http' works. Is there other settings I missed?
The code failed with an error:
REQUEST:
problem with request: tunneling socket could not be established, cause=socket hang up
HTTPS:
events.js:72
throw er; // Unhandled 'error' event
^
Error: socket hang up
at SecurePair.error (tls.js:1011:23)
at EncryptedStream.CryptoStream._done (tls.js:703:22)
at CleartextStream.read [as _read] (tls.js:499:24)
The code is the simple https test.
var http = require("https");
var options = {
host: "proxy.myplace.com",
port: 912,
path: "https://www.google.com",
headers: {
Host: "www.google.com"
}
};
http.get(options, function(res) {
console.log(res);
res.pipe(process.stdout);
});
You probably want to establish a TLS encrypted connection between your node app and target destination through a proxy.
In order to do this you need to send a CONNECT request with the target destination host name and port. The proxy will create a TCP connection to the target host and then simply forwards packs between you and the target destination.
I highly recommend using the request client. This package simplifies the process and handling of making HTTP/S requests.
Example code using request client:
var request = require('request');
request({
url: 'https://www.google.com',
proxy: 'http://97.77.104.22:3128'
}, function (error, response, body) {
if (error) {
console.log(error);
} else {
console.log(response);
}
});
Example code using no external dependencies:
var http = require('http'),
tls = require('tls');
var req = http.request({
host: '97.77.104.22',
port: 3128,
method: 'CONNECT',
path: 'twitter.com:443'
});
req.on('connect', function (res, socket, head) {
var tlsConnection = tls.connect({
host: 'twitter.com',
socket: socket
}, function () {
tlsConnection.write('GET / HTTP/1.1\r\nHost: twitter.com\r\n\r\n');
});
tlsConnection.on('data', function (data) {
console.log(data.toString());
});
});
req.end();
At a high level, I am trying to use Quota Guard Static to talk to an IP limited API from a Heroku app from a node.js application. The API has its own node.js client implementation, but underneath the covers it's just an HTTP[S] api. The library uses superagent and superagent-proxy underneath the covers to do the actual HTTP[S] requests.
In node 0.10, all worked fine. In node 0.12, I see errors like:
Error: write EPROTO 140735203734288:error:140770FC:SSL routines:SSL23_GET_SERVER_HELLO:unknown protocol:../deps/openssl/openssl/ssl/s23_clnt.c:782:
at exports._errnoException (util.js:746:11)
at WriteWrap.afterWrite (net.js:775:14)
In io.js 2.02 I see:
Error: write EPROTO at Object.exports._errnoException (util.js:844:11)
I tried globally using SSLv3 as shown in this answer, but it seemed to have no effect.
The proxy url is specified as an http URL with port 9293. This answer suggested using port 443, but since the proxy provider is external to me, I cannot change it.
How might I get the proxied request to work in node 0.12?
Tim here from QuotaGuard. This seems to be an issue manifesting itself in the https-proxy-agent used by superagent-proxy for HTTPS requests causing the request to be made to the secure endpoint on the wrong port.
This is a simple example that should connect to Google on port 443.
var url = require('url');
var https = require('https');
var HttpsProxyAgent = require('https-proxy-agent');
// HTTP/HTTPS proxy to connect to
var proxy = process.env.QUOTAGUARDSTATIC_URL;
console.log('using proxy server %j', proxy);
// HTTPS endpoint for the proxy to connect to
var endpoint = process.argv[2] || 'https://www.google.com/';
console.log('attempting to GET %j', endpoint);
var opts = url.parse(endpoint);
// create an instance of the `HttpsProxyAgent` class with the proxy server information
var agent = new HttpsProxyAgent(proxy);
opts.agent = agent;
https.get(opts, function (res) {
console.log('"response" event!', res.headers);
res.pipe(process.stdout);
});
The actual request is being made on port 80 so Google is rejecting it. Here are the HTTP headers:
["Proxy-Authorization: Basic Xgat28sa78saBZZ \r\n", "Host: www.google.com:80\r\n", "Connection: close\r\n"]
The same example on a patched version correctly connects to port 443 and works:
https://github.com/TooTallNate/node-https-proxy-agent/compare/master...timrwilliams:master
I suspect something has changed upstream which is causing the wrong port to be passed to https-proxy-agent but this type of problem is more appropriately discussed on Github issues.
A quick fix would be switching to use the request library instead:
var request = require('request');
var options = {
proxy: process.env.QUOTAGUARDSTATIC_URL,
url: 'https://www.google.com/',
headers: {
'User-Agent': 'node.js'
}
};
function callback(error, response, body) {
if (!error && response.statusCode == 200) {
console.log(body);
}
}
request(options, callback);