Connect to websocket if cloudflare protection is enabled - node.js

I am connected to a websocket of some website using node.js and ws module , that usually works fine but recently they re updating something and than they enable cloudflare protection ...
So i get 503 server response when trying to connect
function connect_to_ws(failures) {
if (already_connected) return;
if (failures == undefined) failures = 0;
log('Connecting to "' + ws_server + '"...');
var ws = new WebSocket(ws_server, [], {
'headers': {
'Cookie': cookie
}
});
ws.on('open', function() {
already_connected = true;
log('Connected');
});
...

Related

node.js http connection works with AWS instance, doesn't work with LoadBalancer

I'm using node.js and the http object to connect to a webserver with a special application server. Works fine for an server at amazon, no problems.
No we have a Classic Loadbalancer running. Works - I can enter the URL of the ELB in my browser and I see the answer of our server.
But when I try to open the same URL using my node.js script I always get an timeout.
Sample code:
try {
console.log( "start test" );
var http = require("http");
var options = { host: "test.server.name",
port: "80",
path: "/dp/",
headers: {
'Encoding' : 'BINARY'
}
};
console.log( "start get " + options.host );
http.get( options , function( res ) {
console.log( "in callback" );
res.on('data', function(data) {
console.log( "callback get data " + data );
});
res.on('end', function() {
console.log( "callback end" );
});
res.on('error', function(e) {
console.log( "callback error" );
});
} );
} catch( err ) {
console.log( "error " + err.message );
}
Code works directly with the server.
Doesn't work when using the loadbalancer.
And I have to idea what to do - any ideas?
I got it. Don't know why - but the problem was in the proxy we're using.
I found this:
How can I use an http proxy with node.js http.Client?
and by using our proxy it worked...
I found it - it was the proxy.
I set the proxy using this:
How can I use an http proxy with node.js http.Client?
and this:
https://www.vanamco.com/2014/06/24/proxy-requests-in-node-js/
Works now!

nodejs tcp proxy authentication

I am using a nodejs module called tcp-proxy to pipe the requests from client to proxy server in order to proxy the requests.
I want this proxy to work only when the user provides the username and password, so I modified my code to make the client send proxy authentication header before proceeding.
Here is my javascript code:
var tcpProxy = require('tcp-proxy');
var server = tcpProxy.createServer({
target: {
host: 'HOST'
port: 'PORT'
}
});
server.listen(8000);
server.on('connection', (socket) => {
socket.on('data', (data) => {
if(!authorized(data, socket)) {
socket.write('HTTP/1.1 407 Proxy Authentication Required\nProxy-Authenticate: Basic realm=""\nConnection: keep-alive');
socket.write('\n\r\n');
socket.end();
}
});
});
function authorized(data, socket) {
if(socket.auth){
return true;
}
for(var i=0; i<=10; i++) {
if(data.toString().split('\r\n')[i]) {
var headerType = data.toString().split('\r\n')[i].split(':')[0];
if(headerType == "Proxy-Authorization") {
var header = data.toString().split('\r\n')[i].split(':')[1];
var creds = new Buffer(header.split(' ')[2], 'base64').toString().split(':');
var username = creds[0];
var password = creds[1];
if(username === "USERNAME" && password === "PASSWORD") {
socket.auth = true;
return true;
} else {
return false;
}
}
}
}
return false;
}
Everything is working fine in Chrome and Mozilla Firefox, but in Safari and some other client applications it doesn't work as expected.
I've tried to debug and found that when making https request from Safari browser (with proxy set in Wifi settings on Mac) the Proxy-Authorization header is missing from the request, and the same happens with the some other client application which have the option to use proxy server.
Since my proxy server is supposed to drop the request if it doesn't find the header it does that for Safari.
Why does this happen? Any solution to get it working on all browsers and applications?

"getaddrinfo ENOTFOUND" error on heroku

I am tring to deploy a webRTC signaling server on Heroku. It works on my local machine but gave:
Error: getaddrinfo ENOTFOUND global.xirsys.net global.xirsys.net`
when deployed on Heroku.
I am using simpleWebRTC + signalmaster + XirSys
The official doc from xirSys shows:
// Node Get ICE STUN and TURN list
var https = require("https");
var options = {
host: "global.xirsys.net",
path: "/_turn/Acumany",
method: "PUT",
headers: {
"Authorization": "Basic " + new Buffer("acumany:4b6aea04-6152-11e7-9d16-3fa9b82ffd4f").toString("base64")
}
};
var httpreq = https.request(options, function(httpres) {
var str = "";
httpres.on("data", function(data){ str += data; });
httpres.on("error", function(e){ console.log("error: ",e); });
httpres.on("end", function(){
console.log("ICE List: ", str);
});
});
httpreq.end();
And I used axios:
axios.put("https://acumany:4b6aea04-6152-11e7-9d16-3fa9b82ffd4f#global.xirsys.net/_turn/Acumany")
.then((res) => {
var result = res.data;
var iceServers = result.v.iceServers;
var turnservers = [],
stunservers = [];
iceServers.forEach(function (server) {
if(server.url.indexOf("stun:") != -1){
stunservers.push(server);
}else{
turnservers.push(server);
}
});
console.log("emitting server info => ", stunservers, turnservers);
client.emit('stunservers', stunservers || []);
client.emit('turnservers', turnservers);
})
.catch(function (err) {
console.log("axios error => ", err);
});
My guess is that heroku switch http/https in its own load balancer.
How can I change this code to make it work? (either with https or axios)
global.xirsys.net is a dynamic domain which geographically routes your request. Maybe something about that is incompatible with Heroku. Try a static xirsys endpoint instead. Like ss.xirsys.com for Singapore or us.xirsys.com for US east coast.
You can see a list of endpoints here: https://us.xirsys.com:9000/api-intro

HTTP2 / SPDY Push-Stream Verification: How to Test?

I am implementing HTTP2/SPDY push resourcing using the node module spdy. I have followed indutny's doc and have been doing test runs implementing his example into my server.js.
The problem is two fold; I am not getting any errors in the log(s) nor am I seeing the alert in the browser. I don't see any change in the Developers Console as well. If I set a bogus push URL, I get no response/errors,etc. I believe in theory, the Priority should change from medium to High(?). See Screen shoot.
Is there another way for me to test if the push is being made to the browser? Or, do I have something wrong in my script (Please check for inconsistencies)? Also, what to throw in stream.on('error', function() {});?
Testing in Chrome (on a ChromeBook), nodejs v5.1.0, npm v3.3.12 - H2 enabled is verified in Chrome.
server.js:
var environment = '../env/' + process.env.NODE_ENV
// Process User config
, fS = require('fs')
, jsonFile = fS.readFileSync(environment + '/config.json')
, jsonString, hostIp, hostPort, cacheExp, cps;
try {
jsonString = JSON.parse(jsonFile);
var SERV_HOST = jsonString['hostIp']
, SERV_PORT = jsonString['hostPort']
, CACHE_EXP = jsonString['cacheExp']
, CPS = jsonString['cps']
, xPowerBy = 'OceanPress'
, xFrameOptions = 'DENY'
, xXSSProtection = '1; mode=block'
, xContentTypeOption = 'nosniff';
} catch (err) {
console.log('There is an error parsing the json file : ' + err);
}
// Load modules
var fs = require('fs')
, watch = require('staticsmith-watch')
, buffet = require('buffet')({root: environment + '/_public'})
, spdy = require('spdy')
// spdy options
, options = {
key: fs.readFileSync(environment + '/keys/key.pem')
, cert: fs.readFileSync(environment + '/keys/cert.pem')
// SPDY-specific options
, spdy: {
protocols: [ 'h2','spdy/3.1', 'spdy/3', 'spdy/2','http/1.1', 'http/1.0' ]
, plain: false
, maxStreams: 200
, connection: {
windowSize: 1024 * 1024
, autoSpdy31: false
}
}
// Set ip and port
, host: SERV_HOST
, port: SERV_PORT
}
// Security header options
, security = [
{ name: 'X-Powered-By',
option: xPowerBy }
, { name: 'x-frame-options',
option: xFrameOptions }
, { name: 'X-XSS-Protection',
option: xXSSProtection }
, { name: 'X-Content-Type-Options',
option: xContentTypeOption }
, { name: 'Cache-Control',
option: CACHE_EXP }
, { name: 'Content-Security-Policy',
option: CPS }
, { name: 'server',
option: 'Who knows' }
];
if (process.env.NODE_ENV == 'production') {
spdy.createServer(options, function(req, res) {
// Add Content Security Rules
for(var i = 0; i < security.length; i++) {
res.setHeader(security[i].name, security[i].option);
}
// #see https://www.npmjs.com/package/buffet
buffet(req, res, function (err, result) {
// Push JavaScript asset (main.js) to the client
var stream = res.push('/js/main.js', {
req: {'accept': '*/*'},
res: {'content-type': 'application/javascript'}
});
stream.on('acknowledge', function() {
console.log("Stream ACK");
});
stream.on('error', function() {
console.error("stream ERR");
});
stream.end('alert("hello from push stream!");');
// write main response body and terminate stream
res.end('<script src="/js/main.js"></script>');
// There was an error serving the file? Throw it!
if (err) {
console.error("Error serving " + req.url + " - " + err.message);
// Respond to the client
res.writeHead(err.status, err.headers);
}
});
}).listen(options.port, options.host);
console.log("serving at https://" + options.host + ":" + options.port);
console.log("On Node v" + process.versions.node);
console.log("On npm v" + process.versions.npm);
watch({
pattern: '**/*',
livereload: true,
});
}
UPDATE:
I have also added:
stream.on('acknowledge', function() {
console.log('stream ACK');
});
There is no console log written - It's like the function is dead.
Dev Console with push-stream (main.js):
There are a few problems here.
The buffet callback is only invoked when the requested URL does not match a file on disk. Just like express middleware, it's essentially a next function. Thus, you're never actually pushing anything.
The first argument to res.push is a URL, not a filesystem path.
res.push will not exist on ≤ HTTP/1.1 connections; you need to make sure it's there or you'll throw an uncaught exception (and crash).
Here's a reduced working example.
spdy.createServer({
key: fs.readFileSync('./s.key'),
cert: fs.readFileSync('./s.crt')
}, function(req, res) {
if (req.url == '/') {
res.writeHead(200, { 'Content-Length': 42 });
res.end('<h1>Hi</h1><script src="main.js"></script>');
if (res.push) {
// Push JavaScript asset (main.js) to the client
var stream = res.push('/main.js', {
req: {'accept': '**/*'},
res: {'content-type': 'application/javascript'}
});
stream.on('error', function() {
console.error(err);
});
stream.end('alert("hello from push stream!");');
}
} else {
res.writeHead(404);
res.end();
}
}).listen(777);
As far as actually verifying in Chrome that things are being pushed, open a new tab and type chrome://net-internals/#http2. Click the ID of the HTTP/2 session with your server, then click the session in the left-hand pane. Mixed in with the initial request, you'll see something like:
t= 3483 [st= 19] HTTP2_SESSION_RECV_PUSH_PROMISE
--> :method: GET
:path: /main.js
:scheme: https
:authority: localhost:777
--> id = 3
--> promised_stream_id = 4
t= 3483 [st= 19] HTTP2_SESSION_RECV_HEADERS
--> fin = false
--> :status: 200
--> stream_id = 4
t= 3483 [st= 19] HTTP2_SESSION_RECV_DATA
--> fin = true
--> size = 0
--> stream_id = 4
t= 3546 [st= 82] HTTP2_STREAM_ADOPTED_PUSH_STREAM
--> stream_id = 4
--> url = "https://localhost:777/main.js"
(I did not see the Priority of main.js change in the dev tools -- it was still Medium.)
Within the Chrome inspector, I discovered it is quite easily recognized when a resource has been pushed by the server.
First: Within the network view/tab, the resource(s) in question will show virtually no request sent and 'waiting(TTFB)' in the waterfall (See image below).
The theme.min.css & theme.min.js resources are pushed:
Second: After clicking on the pushed the resource(s), opening the "Headers" pane and inspecting the "Request Headers" panel at the bottom, check for Provisional headers are shown. If the warning is shown for the resource, then it was pushed. See this SO answer to why you will see this warning.
Headers Inspector:
If you need a little more detailed information about the pushed resource(s), using the chrome://net-internals/#http2 method as stated in the second part of #josh3736 answer would work too. But if you need a quick way to verify that the resource(s) has been pushed and excepted by the client, viewing the waterfall will show this.

How to capture http messages from Request Node library with Fiddler

Regular client initiated requests to the node server are captured fine in Fiddler. However, requests sent from node to a web service are not captured. It did not help to pass in config for proxy (127.0.0.1:8888) to the request method. How can I route the request messages through Fiddler?
var http = require('http');
var request = require('request');
request.get(webserviceURL, { "auth" : {"user": "user", "pass" = "pass", sendImmediately: true },
"proxy" : { "host" : "127.0.0.1", "port" : 8888 }},
function (error, response) { console.log( "response received" );
});
Request repo: https://github.com/mikeal/request
I just tried to do this myself (using Fiddler and the request library from npm). Here's how I got it working:
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = '0'; // Ignore 'UNABLE_TO_VERIFY_LEAF_SIGNATURE' authorization error
// Issue the request
request(
{
method: "GET",
uri: "https://secure.somewebsite.com/",
proxy: "http://127.0.0.1:8888" // Note the fully-qualified path to Fiddler proxy. No "https" is required, even for https connections to outside.
},
function(err, response, body) {
console.log("done");
});
This is with Fiddler2 using the default port and proxy options (and no proxy authentication).
Fiddler works by setting your "Internet Options" (from start menu) "Connections" > "LAN Settings" > "Proxy Server" to its port, thus making all HTTP traffic (clients which obey this setting) go through it.
You should point your node.js client lib to use a proxy, the settings are written in that options dialog after you start Fiddler.
The proxy option should be a full url, like this:
proxy : "http://127.0.0.1:8888"
To do this on an ad-hoc basis, without changing your code, you can use environment variables.
Request respects:
HTTP_PROXY
HTTPS_PROXY
NO_PROXY
So, to proxy just set these in your console before running your process.
For example, to setup http and https proxy use:
set HTTP_PROXY="http://127.0.0.1:8888"
set HTTPS_PROXY="http://127.0.0.1:8888"
set NODE_TLS_REJECT_UNAUTHORIZED=0
The latter line stops issues with SSL through the fiddler proxy.
I've been wanting the same... an equivalent of the Network tab in chrome DevTools, only for Nodejs. Unfortunately, it doesn't appear as though one exists. I don't have Fiddler on macos, so this is how I went about stubbing the require('http') methods to log and pass though. Leaving this here in case I need it again or someone else finds it helpful. You can turn it on by attaching a debugger and require('filename')() the file containing this script.
module.exports = () => {
const http = require('http');
http._request = http.request;
global.DO_LOG_AJAX = true;
const log = str => {
if (global.DO_LOG_AJAX) {
console.debug(str);
}
};
const flushLog = (requestLines, responseLines) => {
if (global.DO_LOG_AJAX) {
log([
'----------------Begin Request-----------------------------------',
...requestLines,
'----------------End Request / Begin Response--------------------',
...responseLines,
'----------------End Reponse-------------------------------------',
].join('\n'));
}
};
let write;
let end;
http.request = (...requestParams) => {
const req = http._request(...requestParams);
const { method, path, headers, host, port } = requestParams[0];
const requestLogLines = [];
requestLogLines.push(`${method} ${path}`);
requestLogLines.push(`Host: ${host}:${port}`);
for (const header of Object.keys(headers)) {
requestLogLines.push(`${header}: ${headers[header]}`);
}
write = write || req.write;
end = end || req.end;
req.on('error', err => {
log({ err });
});
req._write = write;
req._end = end;
const requestBody = [];
req.write = (...writeParams) => {
requestBody.push(writeParams[0].toString());
return req._write(...writeParams);
};
req.end = (...endParams) => {
if (endParams[0]) {
requestBody.push(endParams[0].toString());
}
requestLogLines.push('');
requestLogLines.push(requestBody.join(''));
return req._end(...endParams);
};
const responseLogLines = [];
req.once('response', response => {
const responseBody = [];
responseLogLines.push(`${response.statusCode} ${response.statusMessage}`);
for (const header of Object.keys(response.headers)) {
responseLogLines.push(`${header}: ${response.headers[header]}`);
}
const onData = chunk => {
responseBody.push(chunk.toString());
};
const onClose = err => {
responseLogLines.push('');
responseLogLines.push(responseBody.join(''));
responseLogLines.push('');
responseLogLines.push(`--- ERROR --- ${err.toString()}`);
flushLog(requestLogLines, responseLogLines);
req.removeListener('data', onData);
};
const onEnd = () => {
responseLogLines.push('');
responseLogLines.push(responseBody.join(''));
flushLog(requestLogLines, responseLogLines);
req.removeListener('data', onData);
};
response.on('data', onData);
response.once('close', onClose);
response.once('end', onEnd);
});
return req;
};
};

Resources