I have an endpoint in a node app which is used to download images
var images = {
'car': 'http://someUrlToImage.jpg',
'boat': 'http://someUrlToImage.jpg',
'train': 'http://someUrlToImage.jpg'
}
app.get('/api/download/:id', function(req, res){
var id = req.params.id;
res.setHeader("content-disposition", "attachment; filename=image.jpg");
request.get(images[id]).pipe(res);
});
Now this code works fine, but after a few hours of the app running, the endpoint just hangs.
I am monitoring the memory usage of the app, which remains consistent, and any other endpoints which just return some JSON respond as normal so it is not as if the event loop is somehow being blocked. Is there a gotcha of some kind that I am missing when using the request module to pipe a response? Or is there a better solution to achieve this?
I am also using the Express module.
You should add an error listener on your request because errors are not passed in pipes. That way, if your request has an error, it will close the connection and you'll get the reason.
request
.get(...)
.on('error', function(err) {
console.log(err);
res.end();
})
.pipe(res)
Related
I am using Firebase database for storing data. If i update data first time it work fine , but if do update second time it give error.
Code for updating data is.
router.post("/saveSetting", function(req, res) {
var fbref = firebase.database().ref("setting");
fbref.set(req.body).then(function(data) {
res.end('ok');
});
})
/*get data from firebase*/
router.get('/setting', function(req, res) {
var fbref = firebase.database().ref("setting");
fbref.once("value").then(function(data) {
res.send(data.val());
})
});
Terminal ScreenShoot
firebase is completely made on socket. If you are listening using on(),then you have to deregister your listener after event got fire. or you could simply use once() listener.
This issue arises when you try to send the same response to the client on second time.
I've been trying to stream binary data (PDF, images, other resources) directly from a request to a remote server but have had no luck so far. To be clear, I don't want to write the document to any filesystem. The client (browser) will make a request to my node process which will subsequently make a GET request to a remote server and directly stream that data back to the client.
var request = require('request');
app.get('/message/:id', function(req, res) {
// db call for specific id, etc.
var options = {
url: 'https://example.com/document.pdf',
encoding: null
};
// First try - unsuccessful
request(options).pipe(res);
// Second try - unsuccessful
request(options, function (err, response, body) {
var binaryData = body.toString('binary');
res.header('content-type', 'application/pdf');
res.send(binaryData);
});
});
Putting both data and binaryData in a console.log show that the proper data is there but the subsequent PDF that is downloaded is corrupt. I can't figure out why.
Wow, never mind. Found out Postman (Chrome App) was hijacking the request and response somehow. The // First Try example in my code excerpt works properly in browser.
Unable to find out the issue in following script, what i want to achieve with the script is to have a node log server that would listen to post requests with log title and log details as query parameters, write to a file and then throw back as json on get request.
Problem:
It constantly shows loader sometime and gives the required log sometime.
Note:
The process spawning is done to update the browser during the logging, if someone has better solution, plz suggest
Post Call:
http://127.0.0.1:8081/log?title="test"&detail="test detail"
Code:
var express = require("express");
var spawn = require('child_process').spawn;
var fs = require("fs");
var srv = express();
var outputFilename = '/tmp/my.json';
function getParamsObject(context) {
var params = {};
for (var propt_params in context.params) {
params[propt_params] = context.params[propt_params];
//define(params, propt_params, context.params[propt_params]);
}
for (var propt_body in context.body) {
params[propt_body] = context.body[propt_body];
//define(params, propt_body, context.body[propt_body]);
}
for (var propt_query in context.query) {
params[propt_query] = context.query[propt_query];
//define(params, propt_query, context.query[propt_query]);
}
return params;
}
srv.get("/", function(req, res) {
res.send("Hello World From Index\n");
});
srv.get("/Main", function(req, res) {
res.send("Hello World From Main\n");
});
srv.get("/ReadFile", function(req, res) {
fs.readFile("example_one.txt", function(err, data) {
if(err) throw err;
res.send(data.toString());
});
});
srv.get("/ReadFileJSON", function(req, res) {
fs.readFile("example_one.txt", function(err, data) {
if(err) throw err;
res.setHeader("content-type", "application/json");
res.send(new Parser().parse(data.toString()));
});
});
srv.post("/log", function(req, res) {
var input = getParamsObject(req);
if(input.detail) {
var myData = {
Date: (new Date()).toString(),
Title: input.title,
Detail: input.detail
}
fs.writeFile(outputFilename, JSON.stringify(myData, null, 4), function(err) {
if(err) {
console.log(err);
}
});
}
res.setHeader("content-type", "application/json");
res.send({message:"Saved"});
});
srv.get("/log", function(req, res) {
var child = spawn('tail', ['-f', outputFilename]);
child.stdout.pipe(res);
res.on('end', function() {
child.kill();
});
});
srv.listen(8081);
console.log('Server running on port 8081.');
To clarify the question...
You want some requests to write to a log file.
You want to effectively do a log tail over HTTP, and are currently doing that by spawning tail in a child process.
This isn't all that effective.
Problem: It constantly shows loader sometime and gives the required log sometime.
Web browsers buffer data. You're sending the data, sure, but the browser isn't going to display it until a minimum buffer size is reached. And then, there are rules for what will display when all the markup (or just text in this case) hasn't loaded yet. Basically, you can't stream a response to the client and reliably expect the client to do anything with it until it is done streaming. Since you're tailing a log, that puts you in a bad predicament.
What you must do is find a different way to send that data to the client. This is a good candidate for web sockets. You can create a persistent connection between the client and the server and then handle the data immediately rather than worrying about a client buffer. Since you are using Node.js already, I suggest looking into Socket.IO as it provides a quick way to get up and running with web sockets, and long-polling JSON (among others) as a fallback in case web sockets aren't available on the current browser.
Next, there is no need to spawn another process to read a file in the same way tail does. As Trott has pointed out, there is an NPM package for doing exactly what you need: https://github.com/lucagrulla/node-tail Just set up an event handler for the line event, and then fire a line event on the web socket so that your JavaScript client receives it and displays it to the user immediately.
There are a couple of things that seem to stand out as unnecessary complications that may be the source of your problem.
First, the spawn seems unnecessary. It appears you want to open a file for reading and get updated any time something gets added to the file. You can do this in Node with fs.watch(), fs.watchFile(), or the node-tail module. This may be more robust than using spawn() to create a child process.
Second (and less likely to be the source of the problem, I think), you seem to be using query string parameters on a POST request. While not invalid, this is unusual. Usually, if you are using the POST method, you send the data via post, as part of the body of the request. If using the GET method, data is sent as a query string. If you are not using the body to send data, switch to GET.
I'm really new to node.js so please bear with me if I'm making a obvious mistake.
To understand node.js, i'm trying to create a webserver that basically:
1) update the page with appending "hello world" everytime the root url (localhost:8000/) is hit.
2) user can go to another url (localhost:8000/getChatData) and it will display all the data built up from the url (localhost:8000/) being triggered
Problem I'm experiencing:
1) I'm having issue with displaying that data on the rendered page. I have a timer that should call get_data() ever second and update the screen with the data variable that stores the appended output. Specifically this line below response.simpleText(200, data); isn't working correctly.
The file
// Load the node-router library by creationix
var server = require('C:\\Personal\\ChatPrototype\\node\\node-router').getServer();
var data = null;
// Configure our HTTP server to respond with Hello World the root request
server.get("/", function (request, response) {
if(data != null)
{
data = data + "hello world\n";
}
else
{
data = "hellow world\n";
}
response.writeHead(200, {'Content-Type': 'text/plain'});
console.log(data);
response.simpleText(200, data);
response.end();
});
// Configure our HTTP server to respond with Hello World the root request
server.get("/getChatData", function (request, response) {
setInterval( function() { get_data(response); }, 1000 );
});
function get_data(response)
{
if(data != null)
{
response.writeHead(200, {'Content-Type': 'text/plain'});
response.simpleText(200, data);
console.log("data:" + data);
response.end();
}
else
{
console.log("no data");
}
}
// Listen on port 8080 on localhost
server.listen(8000, "localhost");
If there is a better way to do this, please let me know. The goal is to basically have a way for a server to call a url to update a variable and have another html page to report/display the updated data dynamically every second.
Thanks,
D
The client server model works by a client sending a request to the server and the server in return sends a response. The server can not send a response to the client that the client hasn't asked for. The client initiates the request. Therefore you cannot have the server changing the response object on an interval.
The client will not get these changes to the requests. How something like this is usually handled as through AJAX the initial response from the server sends Javascript code to the client that initiates requests to the server on an interval.
setTimeout accepts function without parameter which is obvious as it will be executed later in time. All values you need in that function should be available at the point of time. In you case, the response object that you are trying to pass, is a local instance which has scope only inside the server.get's callback (where you set the setTimeout).
There are several ways you can resolve this issue. you can keep a copy of the response instance in the outer scope where get_data belongs or you can move the get_data entirely inside and remove setTimeout. The first solution is not recommended as if getChatData is called several times in 1sec the last copy will be prevailing.
But my suggestion would be to keep the data in database and show it once getChatData is called.
I have been working on the answer from this question located here How to make a socket a stream? To connect https response to S3 after imagemagick. As per loganfsmyth recommendation I commented the req.end(image) line however when I attempt to upload a file the server simply times out. I experience similar behaviour when I uncomment the req.end(image) line with the exception that the image successfully uploadsto S3. Can someone clarify for me which way is correct also if it is right to uncomment the req.end(image) line what is the best way to send a response to the browser to prevent it from timing out?
https.get(JSON.parse(queryResponse).data.url,function(res){
graphicsmagick(res)
.resize('50','50')
.stream(function (err, stdout, stderr) {
ws. = fs.createWriteStream(output)
i = []
stdout.on('data',function(data){
i.push(data)
})
stdout.on('close',function(){
var image = Buffer.concat(i)
var req = S3Client.put("new-file-name",{
'Content-Length' : image.length
,'Content-Type' : res.headers['content-type']
})
req.on('response',function(res){ //prepare 'response' callback from S3
if (200 == res.statusCode)
console.log('it worked')
})
//req.end(image) //send the content of the file and an end
})
})
})
Basically the page was being requested twice which caused the image to be overwritten because of the favicon. node.js page refresh calling resources twice?
In the question you link to, the user was using putStream, so calling req.end() is incorrect, however in your case you are using put directly, so you need to call req.end(). Otherwise with it commented out, you never actually use the image value, except for the length, so you never send the image data.
It is hard to tell without seeing the server handler that actually runs this code, but you need to (optionally) return some response, and then .end() the actual connection to the browser too, or it will set there waiting.
So if you have something like this
http.createServer(function(req, browserResponse){
// Other code.
req.on('response',function(s3res){ //prepare 'response' callback from S3
if (200 == s3res.statusCode) console.log('it worked')
// Close the response. You also pass it data to send to the browser.
browserResponse.end();
})
// Other code.
});