Why does node webkit crash when downloading a big file? - node.js

Im using node webkit 0.7.5 (which is the only version so far which allows me to display video in a correct way).
I'm trying to download a big video file (200+ mb) and sometimes, not always, node webkit crashes when the file is done downloading. Writestream.end() is called, and that is when the crash happens.
My code:
var fs = require('fs');
var request = require("request");
var req = request(self.requestOptions(), function (err) {
if (err) {
//handle error
}
});
req.on("response", function (response) {
writeFile(response);
});
function writeFile(response) {
var file = fs.createWriteStream(filename);
var self = this;
response.on('data', function (data) {
file.write(data);
});
response.on('end', function () {
log.debug("response end");
file.end();//crash happens after calling this
});
response.on('error', function (err) {
//handle error, not called in this situation
});
file.on('finish', function () {
log.debug("file finish");
});
}
In general this method works excellent, but with some files and situations node webkit just decides to call it quits.
Am I doing something obviously wrong? I've searched the net for possible clues or solutions, but I haven't found anything.

Related

Passing the result of a function to server.js in Node JS

I am a newbie in Node JS and trying to get my head around functional programming.
I have the following code in a file called findinfo.js and I'm trying to pass the result to the main server.js:
const fs = require('fs');
const values = ["yes", "no", "?", "unknown", "partial"];
var cInfo = [];
function getFile (cb) {
fs.readFile('./scripts/blahblah.json', 'utf-8', function (err, jfile) {
if (err) {
throw new Error (err);
}
console.log("Function is executing...")
JSON.parse(jfile);
console.log('Parsing file done');
cb(jfile);
});
}
Then I'm trying to call this function from server.js,
var findinfo = require('./findinfo');
console.log(getFile());
which as expected crashes the program.
So what changes should I make in order to make it work?
You need to export getFile so it can be imported using require.
const fs = require('fs');
const values = ["yes", "no", "?", "unknown", "partial"];
var cInfo = [];
function getFile (cb) {
fs.readFile('./scripts/blahblah.json', 'utf-8', function (err, jfile) {
if (err) {
// throw new Error (err); // don't throw inside async callback
return cb(err);
}
console.log("Function is executing...")
JSON.parse(jfile);
console.log('Parsing file done');
cb(null, jfile);
});
}
module.exports = getFile;
server.js
var getFile = require('./findinfo');
getFile(function(err, file) {
console.log(err, file);
});
since getFile is an asynchronous function, you have to wait until it finishes, when cb is called, to console.log the result.
And using throw in a asynchronous callback is not recommended, since it will crash the server, it's recommended to pass the error to the callback.
You should take a look at this question, so you learn more about how to handle asynchronous code.
How do I return the response from an asynchronous call?

Node.js in aws callback variable

Hi im trying to use callback for variable external use outside the function but something is wrong, i think my callback is not so correct as i think:
function latitude(callback){
var mylat;
const https = require('https');
https.get('https://url_of_my_json', (resp) => {
let data = '';
// A chunk of data has been recieved.
resp.on('data', (chunk) => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
mylat = JSON.parse(data).results[0].geometry.location.lat;
callback(mylat);
});
}).on("error", (err) => {
console.log("Error: " + err.message);
});
}
var mylat = latitude(); // variable i want to use for the rest of code
Thanks
The syntax of the callback is incorrect.
here is the example, for better understanding of callback Try this example as simple as you can read, just copy save newfile.js do node newfile to run the application.
function myNew(next){
console.log("Im the one who initates callback");
next("nope", "success");
}
myNew(function(err, res){
console.log("I got back from callback",err, res);
});
happy coding :)

Nodejs: How to send a readable stream to the browser

If I query the box REST API and get back a readable stream, what is the best way to handle it? How do you send it to the browser?? (DISCLAIMER: I'm new to streams and buffers, so some of this code is pretty theoretical)
Can you pass the readStream in the response and let the browser handle it? Or do you have to stream the chunks into a buffer and then send the buffer??
export function getFileStream(req, res) {
const fileId = req.params.fileId;
console.log('fileId', fileId);
req.sdk.files.getReadStream(fileId, null, (err, stream) => {
if (err) {
console.log('error', err);
return res.status(500).send(err);
}
res.type('application/octet-stream');
console.log('stream', stream);
return res.status(200).send(stream);
});
}
Will ^^ work, or do you need to do something like:
export function downloadFile(req, res) {
const fileId = req.params.fileId;
console.log('fileId', fileId);
req.sdk.files.getReadStream(fileId, null, (err, stream) => {
if (err) {
console.log('error', err);
return res.status(500).send(err);
}
const buffers = [];
const document = new Buffer();
console.log('stream', stream);
stream.on('data', (chunk) => {
buffers.push(buffer);
})
.on('end', function(){
const finalBuffer = Buffer.concat(buffers);
return res.status(200).send(finalBuffer);
});
});
}
The first example would work if you changed you theoretical line to:
- return res.status(200).send(stream);
+ res.writeHead(200, {header: here})
+ stream.pipe(res);
That's the nicest thing about node stream. The other case would (in essence) work too, but it would accumulate lots of unnecessary memory.
If you'd like to check a working example, here's one I wrote based on scramjet, express and browserify:
https://github.com/MichalCz/scramjet/blob/master/samples/browser/browser.js
Where your streams go from the server to the browser. With minor mods it'll fit your problem.

Deleting file in node.js not working

I am using Node.js with Express.
I am trying to delete a file after sending it to client with express js.
function deleteFile (file) {
fs.unlink(file, function (err) {
if (err) {
logger.error(err);
}
});
}
app.get("/deleteFileAfterDownload", function (req, res){
var fileName = "a.pdf"
var stream = fs.createReadStream(fileName);
var streamClosed = false;
req.on('end',function(){
if (!streamClosed){
stream.emit('close');
// I tried stream.destroy() but that is also not working
}
});
stream.on('close', function () {
streamClosed = true;
deleteFile(fileName);
});
req.on('data', function(){});
stream.pipe(res);
});
But the file is not getting deleted. it seems the process is still using file because just after I end the process, the file is getting deleted.
Can anybody tell me why?
If I am doing it wrong, please tell me a good way.
Please add a log in deleteFile, makesure it is called.
Try simplify it:
var fileName = "a.pdf"
var stream = fs.createReadStream(fileName);
stream.pipe(res);
res.once("finish", function () {
deleteFile(fileName);
});
The previous example only delete file if download finished,
if you want delete file unconditionly, try the following:
var fileName = "a.pdf";
var stream = fs.createReadStream(fileName);
stream.pipe(res).once("close", function () {
stream.close();
deleteFile(fileName);
});
stream.close() is important here, because stream not close if pipe aborted.

How to write an image in Node after request() has returned

This feels like an obvious question but it's perplexing me: I want a Node function that downloads a resource at a URI. I need it to work for several different content types without the user needing to specify which type it is.
I know how to pipe request to fs.createWriteStream when you know it's going to be an image, but not how to handle it when you've already invoked the callback from request. Here's where I am:
var request = require('request'),
fs = require('graceful-fs');
function cacheURI(uri, cache_path, cb) {
request(uri, function(err, resp, body) {
var content_type = resp.headers['content-type'].toLowerCase().split("; ")[0],
type = content_type.split("/")[0],
sub_type = content_type.split("/")[1];
if (sub_type == "json") {
body = JSON.parse(body);
}
if (type == "image") {
// this is where the trouble starts
var ws = fs.createWriteStream(cache_path);
ws.write(body);
ws.on('close', function() {
console.log('image done');
console.log(resp.socket.bytesRead);
ws.end();
cb()
});
} else {
// this works fine for text resources
fs.writeFile(cache_path, body, cb);
}
});
}
This answer to a previous question suggests the following:
request.get({url: 'https://someurl/somefile.torrent', encoding: 'binary'}, function (err, response, body) {
fs.writeFile("/tmp/test.torrent", body, 'binary', function(err) {
if(err)
console.log(err);
else
console.log("The file was saved!");
});
});
But I can't pass "binary" to request if I don't yet know the type of response I'll get.
UPDATE
Per the suggested answer, changing "close" to "finish" in the event handler does fire the callback:
if (opts.image) {
var ws = fs.createWriteStream(opts.path);
ws.on('finish', function() {
console.log('image done');
console.log(resp.socket.bytesRead);
});
//tried as buffer as well
//ws.write(new Buffer(body));
ws.write(body);
ws.end();
}
This does write the image file, but not correctly:
As suggested in here, try using the finish event (if you have node >= v0.10)
ws.on('finish', function() {
console.log('image done');
console.log(resp.socket.bytesRead);
ws.end();
cb()
});

Resources