I am attempting to send a POST request to a server I have no control over. The server receives the POST but then sends a response every time an external event happens.
So by "subscribing" with one POST I should be able to receive multiple responses until the connection is dropped.
I have attempted to do this using sockets, but it seems the socket sends as TCP which the server does not recognise - it needs to be HTTP (I checked this using Wireshark as they have an APITool with which to test the connection).
This is my code to receive the data:
var net = require("net");
var client = new net.Socket();
client.connect(
8080,
"hostname.com",
function() {
console.log("Connected");
client.write(`POST /Subscribe HTTP/1.1
Authorization: Basic authcode
Host: hostname.com:8080
Content-Length: 1078
Connection: keep-alive
Keep-Alive: 300
<?xml version="1.0" encoding="UTF-8"?>
<config version="1.7">
... (some XML here)
</config>
`);
console.log("Finished Write");
}
);
client.on("data", function(data) {
console.log("Received " + data.length + " bytes\n" + data);
});
client.on("close", function() {
console.log("Connection closed");
});
client.on("error", function(err) {
console.log("Connection error" + err);
});
client.on("end", function(err) {
console.log("Connection end" + err);
});
Is there a way to force the socket to send as HTTP, or is there a way to receive a "stream" from a POST request over http?
Thank you.
EDIT 1: Adding the raw HTTP code whePOST /SetSubscribe HTTP/1.1
Host: hostname.com:8080
Keep-Alive: 300
Authorization: Basic authkey
Content-Type: application/xml
Content-Length: 1076
<?xml version="1.0" encoding="UTF-8"?>
... XML
</config>
And this gives the response:
<?xml version="1.0" encoding="UTF-8" ?>
<config version="1.7" xmlns="http://www.ipc.com/ver10">
... XML
</config>
With the following headers:
Content-Type: application-xml
Content-Length: 397
Connection: keep-alive
When I watch this in WireShark I see multiple responses coming back but I don't know how to "grab" them in code to use them. Postman doesn't show each of these new responses coming back though.
Related
I have heard that Connection:Keep-Alive header will tell the server to keep the connection between client and server for a while to prevent the effort for each time client establish a request to server. I tried to add that to the request's header but it didn't work out as expected. The socket connection still close on each request.
Could you please help to explain why that happened? Did I missing something about Connection:Keep-Alive or did I implement it the wrong way?
Client:
const http = require("http");
const options = {
port: 4000,
headers: {
connection: "keep-alive",
},
};
function request() {
console.log(`making a request`);
const req = http.request(options, (res) => {
console.log(`STATUS: ${res.statusCode}`);
console.log(`HEADERS: ${JSON.stringify(res.headers)}`);
res.setEncoding("utf8");
res.on("data", (chunk) => {
console.log(`BODY: ${chunk}`);
});
res.on("end", () => {
console.log("No more data in response.");
});
});
req.on("error", (e) => {
console.error(`problem with request: ${e.message}`);
});
req.end();
}
setInterval(() => {
request();
}, 3000);
Server:
const http = require("http");
const server = http.createServer((req, res) => {
setTimeout(() => {
res.end();
}, 500);
});
server.on("connection", function (socket) {
socket.id = Date.now();
console.log(
"A new connection was made by a client." + ` SOCKET ${socket.id}`
);
socket.on("end", function () {
console.log(
`SOCKET ${socket.id} END: other end of the socket sends a FIN packet`
);
});
socket.on("timeout", function () {
console.log(`SOCKET ${socket.id} TIMEOUT`);
});
socket.on("error", function (error) {
console.log(`SOCKET ${socket.id} ERROR: ` + JSON.stringify(error));
});
socket.on("close", function (had_error) {
console.log(`SOCKET ${socket.id} CLOSED. IT WAS ERROR: ` + had_error);
});
});
server.on("clientError", (err, socket) => {
socket.end("HTTP/1.1 400 Bad Request\r\n\r\n");
});
server.listen(4000);
And this is what I've got:
Client:
making a request
STATUS: 200
HEADERS: {"date":"Thu, 09 Apr 2020 13:05:44 GMT","connection":"keep-alive","content-length":"81"}
No more data in response.
making a request
STATUS: 200
HEADERS: {"date":"Thu, 09 Apr 2020 13:05:47 GMT","connection":"keep-alive","content-length":"81"}
No more data in response.
making a request
STATUS: 200
HEADERS: {"date":"Thu, 09 Apr 2020 13:05:50 GMT","connection":"keep-alive","content-length":"81"}
No more data in response.
and Server:
A new connection was made by a client. SOCKET 1586437543111
{ connection: 'keep-alive', host: 'localhost:1234' }
SOCKET 1586437543111 END: other end of the socket sends a FIN packet
SOCKET 1586437543111 CLOSED. IT WAS ERROR: false
A new connection was made by a client. SOCKET 1586437546091
{ connection: 'keep-alive', host: 'localhost:1234' }
SOCKET 1586437546091 END: other end of the socket sends a FIN packet
SOCKET 1586437546091 CLOSED. IT WAS ERROR: false
A new connection was made by a client. SOCKET 1586437549095
{ connection: 'keep-alive', host: 'localhost:1234' }
SOCKET 1586437549095 END: other end of the socket sends a FIN packet
SOCKET 1586437549095 CLOSED. IT WAS ERROR: false
NodeJS version: 10.15.0
There is one more thing makes me even more confuse is that when I use telnet localhost 1234 to make a request:
GET / HTTP/1.1
Connection: Keep-Alive
Host: localhost
Then the connection is not close, and no new connection being created as expected. Is that because telnet receive Connection: Keep-Alive and keep the connection by itself?
I have heard that Connection:Keep-Alive header will tell the server to keep the connection between client and server for a while to prevent the effort for each time client establish a request to server.
This is wrong. The client does not demand anything from the server, it just suggests something to the server.
Connection: keep-alive just tells the server that the client would be willing to keep the connection open in order to send more requests. And implicitly suggests that it would be nice if the server would do the same in order to reuse the existing connection for more requests. The server then can decide by its own if it keeps the connection open after the response was send or closes it immediately or closes it after some inactivity or whatever.
Of course, it is not enough that the client just sends the HTTP header (which is implicit with HTTP/1.1 anyway so no need to send it). It must actually keep the TCP connection open in order to send more requests on the same TCP connection. Your current implementation will not do this as can be seen from the servers log, i.e. the client is closing the connection first:
SOCKET 1586437549095 END: other end of the socket sends a FIN packet
In order to have real keep-alive on the client side you would use an agent which keeps the connection open through multiple http.request. See HTTP keep-alive in node.js for more.
I think websockets are a better solution than tradition http/https for your use case. The connection between client and server will stay alive on the websocket, until the client side is closed, or if you force close the connection.
Node.js websocket package
const WebSocket = require('ws');
const wss = new WebSocket.Server({ port: 8080 });
wss.on('connection', function connection(ws) {
ws.on('message', function incoming(message) {
console.log('received: %s', message);
});
ws.send('something');
});
Client side websockets
I am writing a client program to connect to a website over HTTP/HTTPS. The program first tries to connect to the server using HTTPS. However, after receiving a response status code of 301, I tried handling the request with HTTP whenever there is a 301 by making a new request to the HTTP server. As is commonly done to consume data, I added a listener callback on the 'data' event of the http.get() using the on method of http.clientRequest. However, there is no data in the console output. I am suspecting this is due to the following CSP header that I have been receiving with requests:
Message Headers:
content-type: text/html; charset=UTF-8
location: http://www.whoscored.com/
server: Microsoft-IIS/8.0
strict-transport-security: max-age=16070400
content-security-policy: frame-ancestors *.whoscored.com; upgrade-insecure-requests;
x-content-security-policy: frame-ancestors *.whoscored.com; upgrade-insecure-requests;
date: Sun, 29 Oct 2017 02:44:33 GMT
connection: close
content-length: 148
Logging the data:
The code is provided below:
Https.get(options, (res: Http.IncomingMessage): void => {
logger.log('HTTPS Client for ScrapeX');
logger.log('-------------------------');
logger.logHeaders(res.headers);
switch(res.statusCode) {
case 200:
(() => {
//
logger.log('The connection was established successfully');
})();
break;
case 301:
(() => {
// fallback to http
let buf = '';
httpClient((res1) => {
logger.log('HTTP Client');
logger.log('----------------');
logger.logHeaders(res1.headers);
}, n_options)
.on('error', (err) => {
logger.log('Error: ' + err.message);
logger.printStack(err);
})
.on('data', (chunk: string): void => {
buf += chunk;
})
.on('close', () => {
logger.log('Logging the data: ');
logger.log(buf);
});
})();
break;
}
})
.on('error', (err) => {
logger.log(err.message);
logger.log(err.stack);
})
.on('close', () => {
logger.log('Connection closed');
});
You can't connect to that server with HTTP, that's your problem; it's using both upgrade-insecure-requests and more importantly strict-transport-security. Any browser that respects strict-transport-security will simply refuse to connect over unsecured HTTP.
Not sure what to tell you beyond in this case you simply can't retry in HTTP, or at least, it'll always give either an error or just redirect to HTTPS.
The CSP shouldn't really be an issue, that's for loading other resources while on that page, it doesn't block people from downloading the page in the first place.
I'm trying to send a SSE text/event-stream response from an express.js end point. My route handler looks like:
function openSSE(req, res) {
res.writeHead(200, {
'Content-Type': 'text/event-stream; charset=UTF-8',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Transfer-Encoding': 'chunked'
});
// support the polyfill
if (req.headers['x-requested-with'] == 'XMLHttpRequest') {
res.xhr = null;
}
res.write(':' + Array(2049).join('\t') + '\n'); //2kb padding for IE
res.write('id: '+ lastID +'\n');
res.write('retry: 2000\n');
res.write('data: cool connection\n\n');
console.log("connection added");
connections.push(res);
}
Later I then call:
function sendSSE(res, message){
res.write(message);
if (res.hasOwnProperty('xhr')) {
clearTimeout(res.xhr);
res.xhr = setTimeout(function () {
res.end();
removeConnection(res);
}, 250);
}
}
My browser makes the and holds the request:
None of the response gets pushed to the browser. None of my events are fired. If I kill the express.js server. The response is suddenly drained and every event hits the browser at once.
If I update my code to add res.end() after the res.write(message) line It flushes the stream correctly however it then fallsback to event polling and dosen't stream the response.
I've tried adding padding to the head of the response like
res.write(':' + Array(2049).join('\t') + '\n');
as I've seen from other SO post that can trigger a browser to drain the response.
I suspect this is an issue with express.js because I had been previously using this code with nodes native http server and it was working correctly. So I'm wondering if there is some way to bypass express's wrapping of the response object.
This is the code I have working in my project.
Server side:
router.get('/listen', function (req, res) {
res.header('transfer-encoding', 'chunked');
res.set('Content-Type', 'text/json');
var callback = function (data) {
console.log('data');
res.write(JSON.stringify(data));
};
//Event listener which calls calback.
dbdriver.listener.on(name, callback);
res.socket.on('end', function () {
//Removes the listener on socket end
dbdriver.listener.removeListener(name, callback);
});
});
Client side:
xhr = new XMLHttpRequest();
xhr.open("GET", '/listen', true);
xhr.onprogress = function () {
//responseText contains ALL the data received
console.log("PROGRESS:", xhr.responseText)
};
xhr.send();
I was struggling with this one too, so after some browsing and reading I solved this issue by setting an extra header to the response object:
res.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Content-Encoding": "none"
});
Long story short, when the EventSource is negotiating with the server, it is sending an Accept-Encoding: gzip, deflate, br header which is making express to respond with an Content-Encoding: gzip header. So there are two solutions for this issue, the first is to add a Content-Encoding: none header to the response and the second is to (gzip) compress your response.
I'm sending the following post with FireFox rest client:
POST /WSPublic.asmx HTTP/1.1
Content-Type: application/soap+xml; charset=utf-8
Content-Length: 462
Host: host
Connection: close
<?xml version="1.0" encoding="utf-8"?>
<soap12:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap12="http://www.w3.org/2003/05/soap-envelope"><soap12:Body><ContadoresTR xmlns="https://xxxxx"><Usuario>user</Usuario><Contrasena>password</Contrasena><NInstalacion>inst</NInstalacion><VersionFicheroEsperado>1</VersionFicheroEsperado></ContadoresTR></soap12:Body></soap12:Envelope>
It works, and I receive the response from the server.
Looking in Wireshark conversations, I see that firstly the RESTClient sends the following stream:
...........fe....0..S.... ..L0g....!...z.P. _....N.im3....8.q.'...6.9.....p>. .+./.
...........3.2.9./.5.
.......c........
ges.leako.com......
.................#..3t.....!...h2-14.spdy/3.1.spdy/3.http/1.1.........
Then I try to do the same using node.js. The HTTP client sends the same post and I don't receive response from the server. Looking at the Wireshark conversations, I see that the first HTTP Client stream the format is different from the RESTClient of firefox.
POST /WSPublic.asmx HTTP/1.1
Content-Type: application/soap+xml; charset=utf-8
Content-Length: 462
Host: host
Connection: close
<?xml version="1.0" encoding="utf-8"?>
<soap12:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap12="http://www.w3.org/2003/05/soap-envelope"><soap12:Body><ContadoresTR xmlns="https://xxxxxx"><Usuario>user</Usuario><Contrasena>password</Contrasena><NInstalacion>inst</NInstalacion><VersionFicheroEsperado>1</VersionFicheroEsperado></ContadoresTR></soap12:Body></soap12:Envelope>
Here the node.js script:
var http = require("http")
var body = '<?xml version="1.0" encoding="utf-8"?>'
+'<soap12:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"'
+'xmlns:xsd="http://www.w3.org/2001/XMLSchema"'
+'xmlns:soap12="http://www.w3.org/2003/05/soap-envelope">'
+'<soap12:Body>'
+'<ContadoresTR xmlns="https://ges.leako.com/WSPublic">'
+'<Usuario>xxx</Usuario>'
+'<Contrasena>xxx</Contrasena>'
+'<NInstalacion>xxx</NInstalacion>'
+'<VersionFicheroEsperado>x</VersionFicheroEsperado>'
+'</ContadoresTR>'
+'</soap12:Body>'
+'</soap12:Envelope>';
var postRequest = {
host: "ges.leako.com",
path: "/WSPublic.asmx",
port: 443,
method: "POST",
headers: {
'Content-Type': 'application/soap+xml; charset=utf-8',
'Content-Length': Buffer.byteLength(body),
'Connection': 'keep-alive'
}
};
var buffer = "";
var req = http.request( postRequest, function( res ) {
console.log( res.statusCode );
var buffer = "";
res.on( "data", function( data ) { buffer = buffer + data; } );
res.on( "end", function( data ) { console.log( buffer ); } );
});
req.write( body );
req.end();
What I'm doing wrong? Why Firefox send the POST format different than the node.js HTTP Client, and which is that format?
Thanks in advance.
Apart from the 'keep-alive' 'Connection' header and the 'Content-type' one, everything seems fine as mentioned by #mithunsatheesh in this other question how to post XML data in node.js http.request but I guess you were already aware of that since the code is pretty similar in both cases :)
i´m trying to get websites / xml objects from the huawei umts stick internal web gui.
I´ve written a wrapper in c using libcurl which works fine. Now i want to port this to node using request.
When i call a url with curl from console like this:
curl -G "http://192.168.1.1/api/monitoring/status"
I get normal result:
<?xml version="1.0" encoding="UTF-8"?>
<response>
<ConnectionStatus>901</ConnectionStatus>
<SignalStrength>99</SignalStrength>
<SignalIcon>5</SignalIcon>
<CurrentNetworkType>4</CurrentNetworkType>
<CurrentServiceDomain>3</CurrentServiceDomain>
<RoamingStatus>0</RoamingStatus>
<BatteryStatus></BatteryStatus>
<BatteryLevel></BatteryLevel>
<simlockStatus></simlockStatus>
</response>
When i try to call this site with request:
var request = require("request");
var options = {
url: "http://192.168.1.1/api/monitoring/status"
};
request.get(options, function(error, response, body) {
if(error)
{
console.log(error);
}
console.log("statusCode: " + response.statusCode);
console.log("statusText: " + response.statusText);
console.log(body);
});
I got an 400 Error:
statusCode: 400
statusText: undefined
<HTML><HEAD>
<TITLE>IPWEBS - 400 Bad Request</TITLE>
</HEAD>
<BODY><H2>400 Bad Request</H2>
<P>The request generated an error response.</P>
</BODY>
</HTML>
Any idea on this subject?
It could be that curl is sending a header that the server requires. Try changing your options to:
var options = {
url: "http://192.168.1.1/api/monitoring/status",
headers: {
'User-Agent': 'curl/7.30.0',
'Accept': '*/*'
}
}