I'm prototyping an app using the native mongo rest api where Node returns about 400K of json. I use the following to maket he request to mongo's native api and return the result:
http.request(options, function(req)
{
req.on('data', function(data)
{
console.log(data,data.rows);
response.send( 200, data );
}
);
}
)
.on('error', function(error)
{
console.log('error\t',error);
response.send(500, error);
}
)
.end();
When I hit http://localhost:8001/api/testdata via curl, the response is proper (both what is outputted to Node's console from the console.log and what is received by curl). But when I hit it via ajax in my app, the stream is…interupted, even data outputted to Node's console (Terminal) is odd: It has multiple EOFs, and the Network > response for the call in chrome's dev tools ends at the first EOF.
One other strange thing: data looks like:
{
"offset": 0,
"rows": [ … ]
}
but in neither Node nor client-side (angular) can I reference data.rows (it returns undefined). typeof data returns [object Object].
EDIT The request headers for both curl and angular (as reported by Node) are:
req.headers: {
'x-action': '',
'x-ns': 'test.headends',
'content-type': 'text/plain;charset=utf-8',
connection: 'close',
'content-length': '419585'
}
EDIT I checked response headers in both angular and curl directly (instead of from Node), annnd there's a disagreement (same output from both curl and angular directly instead of from node):
access-control-allow-headers: "Origin, X-Requested-With, Content-Type, Accept"
access-control-allow-methods: "OPTIONS,GET,POST,PUT,DELETE"
access-control-allow-origin: "*"
connection: "keep-alive"
content-length: "65401" // <---------------- too small!
content-type: "application/octet-stream"
// ^-- if i force "application/json"
// with response.json() instead of response.send() in Node,
// the client displays octets (and it takes 8s instead of 0s)
date: "Mon, 15 Jul 2013 18:36:50 GMT"
etag: ""-207110537""
x-powered-by: "Express"
Node's http.request() returns data in chunks for streaming (would be nice if they explicitly state this). Thus it's necessary to write each chunk to the body of Express's response, listen for the end of the http request (which is not really documented), and then call response.end() to actually finish the response.
var req = http.request(options, function(res)
{
res.on( 'data', function(chunk) { response.write(chunk); } );
res.on( 'end', function() { response.end(); } );
}
);
req.on('error', function(error) { … });
req.end();
Where response is Express's response the the initial client request (curl or angular's ajax call).
resp.set('content-type' , 'application/json');
const stream = db.Country.findAllWithStream();
// console.log(stream);
stream.on('data', chunk => {
stream.pipe(resp);
});
stream.on('end', () => {
console.log('\n\nEND!!!!!');
resp.end()
});
Related
I am using the below code, to start a server that accepts http requests, and then forwards them to an https api, then relay the response.
I have a problem in that the api gives a 'Content-Encoding: gzip' response, which seems to be causing me trouble.
If I don't relay the gzip response header, the C# code I'm testing gets a response that is just random symbols, the compressed data I assume, as does Postman. When I do include as per my example below, I get StatusCode 0: "The magic number in GZip header is not correct. Make sure you are passing in a GZip stream."
I've tried passing the response headers back as:
res.writeHead(cres.statusCode, cres.headers});
But that just seems to result in the jumbled output again. How can I fix this?
const https = require('https')
const port = 55555
const requestHandler = (req, res) => {
console.log(req.url)
var options = {
host: 'my.api.com',
path: '/7f308b16-d165-4062-b00f-76970783442e'+req.url,
path: req.url,
method: 'GET',
headers: req.headers
};
var json = '';
var creq = https.request(options, function(cres) {
// set encoding
cres.setEncoding('utf8');
// wait for data
cres.on('data', function(chunk){
console.log('data: '+chunk);
json += chunk;
});
cres.on('close', function(){
console.log('close: '+cres.statusCode);
res.writeHead(cres.statusCode);
res.end();
});
cres.on('end', function(){
console.log('end: '+json.toString());
console.log(res.headers)
res.writeHead(cres.statusCode, {'Content-Encoding': 'gzip', 'Content-Type':'text/json; charset=utf-8', 'Cache-Control':'private' });
res.end(json);
});
}).on('error', function(e) {
// we got an error, return 500 error to client and log error
console.log(e.message);
res.writeHead(500);
res.end();
});
creq.end();
}
const server = http.createServer(requestHandler)
server.listen(port, (err) => {
if (err) {
return console.log('something bad happened', err)
}
console.log(`server is listening on ${port}`)
})```
I have a node web server which listens on
this.app.use('/register-receiver', (req, res) => {
const receiverId = req.query.id;
res.status(200).set({
connection: 'keep-alive',
'Cache-Control': 'no-cache',
'Content-Type': 'application/json',
});
this.receivers[receiverId] = res;
});
It periodically sends a JSON payload to clients connected to /register-receiver with
this.receivers[receiverId].write(JSON.stringify({
// some big json
}))
I have another node program which acts as a client that makes a connection to the server on startup.
const options = {
agent: false,
host: <my host>,
defaultPort: <my port>
path: `/register-receiver?id=${activeConfig.receiver.id}`
headers: {
connection: 'keep-alive',
'Content-Type': 'application/json',
}
};
http.get(options, res => {
res.setEncoding('utf8');
res.on('data', data => {
try {
const response = JSON.parse(data);
// do stuff with response
} catch(e) {
console.log(`error ${e} with ${data}`);
}
});
res.on('end', () => console.log(`connection finished`) )
})
The server needs to periodically send JSON payloads to the client. The client should receive these JSONs and do something with them. However, the problem is that large JSONs writes are chunked such that the client will receive the JSON in pieces. This breaks JSON.parse(data) and now the client doesn't know how to process the server payloads. I can't rely on res.on('end') to detect the the completion of a chunked write because this is a keep-alive request that should stay open forever.
I want to avoid designing my own protocol for combining JSON chunks because of it's complexity. It's not as simple as concatenating the strings since I could have interleaved JSON chunks if the server sends 2 large JSON payloads at the same time.
Is it possible to force the server to write the entire JSON as 1 chunk in the stream? How can I setup my client such that it establishes a "forever" connection with my server and listens for complete JSON payloads?
I am trying to set up an express js server that will be hosting a mongodb database. Everything is pretty standard: I have some routes open that will take in data from the client and then store that in the database.
Here is my query string:
let url = "http://xxx.xxx.xx.xxx:3000/update/data=" + JSON.stringify(params);
What I have noticed is that if params doesn't contain much information, it works fine. However, if params is contains a lot of information, then the client throws this error:
Failed to load resource: The network connection was lost.
Http failure response for (unknown url): 0 Unknown Error
(This same error is happening in both Safari and Chrome.)
For example, if params is as below:
{
"accountId": "12345678910",
"data": [
1, 2, 3, 4
]
}
then there is no issue. However, if params.data is a huge array with a ton of information in it instead of just [1, 2, 3, 4], then the error is thrown.
Also, my express server never even seems to receive the request. No logs; nothing. What I would expect to happen is just a normal response and result, however it seems like the client is just giving up on sending something large. Perhaps it has something to do with sending it as a big string?
You put your data on your URL. But, URLs have limited length.
You need to use POST and put your data in the HTTP request body.
You haven't shown us how you use that URL, so it's hard to make suggestions about altering your code. Using the http request operation is the way to go. Something like this might work...
const payload = JSON.stringify(params);
const url = 'http://xxx.xxx.xx.xxx:3000/update/';
const options = {
method: 'POST', // <--- tell it to POST
headers: {
'Content-Type': 'application/json', // <--- tell it you're posting JSON
'Content-Length': payload.length; // <--- tell it how much data you're posting.
}
};
const req = http.request(url, options, (res) => {
/* handle stuff coming back from request here */
console.log(`STATUS: ${res.statusCode}`);
console.log(`HEADERS: ${JSON.stringify(res.headers)}`);
res.setEncoding('utf8');
let chunks=[];
res.on('data', (chunk) => {
chunks.push(chunk);
console.log(`BODY: ${chunk}`);
});
res.on('end', () => {
const resultingData = chunks.join();
console.log('No more data in response.');
});
});
req.on('error', (e) => {
console.error(`problem with request: ${e.message}`);
});
// write data to request body
req.write(payload);
req.end();
I am trying to act as a proxy between a client and an IP Camera using a NodeJS server. When I request a real-time stream from the camera, it responds with
HTTP/1.0 200 OK
Content-Type: Application/octet-stream
followed by a continuous stream of data. If I open the camera stream in Chrome it initiates a never ending download and curling it also initiates a continuous response.
Node appears to be buffering the response from the camera and parsing it through its HTTP parser each time. This works fine the first time as it has the correct headers but upon the second buffer of data it errors with
HPE_INVALID_HEADER_TOKEN
Can someone please help explain why this is happening? It's a continuous stream of data, why is it trying to parse the HTTP headers on the second buffer? I am not sure whether there is an option I am missing or my camera is not following the HTTP specification properly.
Edit: Example Code
const options = {
family: 4,
headers: {
Authorization: 'Basic ' + base64EncodedAuth,
},
host: '192.168.1.131',
method: 'GET',
path: '/cgi-bin/realmonitor.cgi?action=getStream&channel=1&subtype=0',
port: 80,
protocol:'http:',
};
const req = http.request(options, (res) => {
console.log(`STATUS: ${res.statusCode}`);
console.log(`HEADERS: ${JSON.stringify(res.headers)}`);
res.on('data', (chunk) => {
console.log(`BODY: ${chunk}`);
});
res.on('end', () => {
console.log('No more data in response.');
});
});
req.on('error', (e) => {
console.log(`problem with request: ${e.message}`);
});
req.end();
The only callback that is hit is the 'error' one.
I further examined the curl log from the camera and noticed that everything was being marked as:
<= Recv header
It is never sending the separate CRLF required by the HTTP specification to signal that all the headers have been sent. That is why the parser was trying to parse it as a header and, quite rightly, throwing an error.
I'm trying to send a SSE text/event-stream response from an express.js end point. My route handler looks like:
function openSSE(req, res) {
res.writeHead(200, {
'Content-Type': 'text/event-stream; charset=UTF-8',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Transfer-Encoding': 'chunked'
});
// support the polyfill
if (req.headers['x-requested-with'] == 'XMLHttpRequest') {
res.xhr = null;
}
res.write(':' + Array(2049).join('\t') + '\n'); //2kb padding for IE
res.write('id: '+ lastID +'\n');
res.write('retry: 2000\n');
res.write('data: cool connection\n\n');
console.log("connection added");
connections.push(res);
}
Later I then call:
function sendSSE(res, message){
res.write(message);
if (res.hasOwnProperty('xhr')) {
clearTimeout(res.xhr);
res.xhr = setTimeout(function () {
res.end();
removeConnection(res);
}, 250);
}
}
My browser makes the and holds the request:
None of the response gets pushed to the browser. None of my events are fired. If I kill the express.js server. The response is suddenly drained and every event hits the browser at once.
If I update my code to add res.end() after the res.write(message) line It flushes the stream correctly however it then fallsback to event polling and dosen't stream the response.
I've tried adding padding to the head of the response like
res.write(':' + Array(2049).join('\t') + '\n');
as I've seen from other SO post that can trigger a browser to drain the response.
I suspect this is an issue with express.js because I had been previously using this code with nodes native http server and it was working correctly. So I'm wondering if there is some way to bypass express's wrapping of the response object.
This is the code I have working in my project.
Server side:
router.get('/listen', function (req, res) {
res.header('transfer-encoding', 'chunked');
res.set('Content-Type', 'text/json');
var callback = function (data) {
console.log('data');
res.write(JSON.stringify(data));
};
//Event listener which calls calback.
dbdriver.listener.on(name, callback);
res.socket.on('end', function () {
//Removes the listener on socket end
dbdriver.listener.removeListener(name, callback);
});
});
Client side:
xhr = new XMLHttpRequest();
xhr.open("GET", '/listen', true);
xhr.onprogress = function () {
//responseText contains ALL the data received
console.log("PROGRESS:", xhr.responseText)
};
xhr.send();
I was struggling with this one too, so after some browsing and reading I solved this issue by setting an extra header to the response object:
res.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Content-Encoding": "none"
});
Long story short, when the EventSource is negotiating with the server, it is sending an Accept-Encoding: gzip, deflate, br header which is making express to respond with an Content-Encoding: gzip header. So there are two solutions for this issue, the first is to add a Content-Encoding: none header to the response and the second is to (gzip) compress your response.