Downloading heavy images(>2GB) from server using NodeJS - node.js

I am using the AngularJS and NodeJS for my project. I am storing my images to the AWS-EFS storage which is attached with an AWS-EC2 instance. Everything is working fine till I was downloading small images(less than 1 GB). When I tried to download big images(greater than 1 GB) it is throwing error.
Here is my Back-end code,
getTiff: function(req, res)
{
res.set('Content-Type', 'application/octet-stream')
var name = req.param('filename').split('.')[0]
var id = req.param('id')
var filePath = '/mys3storage/' + id + '/' + name +
'/3_dsm_ortho/2_mosaic/' + name + '_transparent_mosaic_group1.tiff'
files = fs.readFileSync(filePath)
res.send(files)
},
error: Sending 500 ("Server Error") response:
RangeError: Invalid typed array length
at new Uint8Array (native)
at createBuffer (buffer.js:23:15)
at allocate (buffer.js:98:12)
at new Buffer (buffer.js:53:12)
at Object.fs.readFileSync (fs.js:419:16)
at Object.module.exports.getAutocad.res.getContourLines.res.getContourPdf.res.getOrthoM (G:\desktop\unifliBackend\api\controllers\WebController.js:153:20)
at wrapper (G:\desktop\unifliBackend\node_modules\#sailshq\lodash\lib\index.js:3250:19)
at routeTargetFnWrapper (G:\desktop\unifliBackend\node_modules\sails\lib\router\bind.js:181:5)
at callbacks (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:164:37)
at param (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:138:11)
at param (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:135:11)
at param (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:135:11)
at pass (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:145:5)
at nextRoute (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:100:7)
at callbacks (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:167:11)
at alwaysAllow (G:\desktop\unifliBackend\node_modules\sails\lib\hooks\policies\index.js:224:11)
at routeTargetFnWrapper (G:\desktop\unifliBackend\node_modules\sails\lib\router\bind.js:181:5)
at callbacks (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:164:37)
at param (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:138:11)
at param (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:135:11)
at param (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:135:11)
at pass (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:145:5)
at nextRoute (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:100:7)
at callbacks (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:167:11)
at G:\desktop\unifliBackend\node_modules\sails\lib\router\bind.js:232:18
at callbacks (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:164:37)
at param (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:138:11)
at pass (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:145:5)
at nextRoute (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:100:7)
at callbacks (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:167:11)
at G:\desktop\unifliBackend\node_modules\sails\lib\router\bind.js:232:18
at callbacks (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:164:37)
at param (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:138:11)
at pass (G:\desktop\unifliBackend\node_modules\#sailshq\express\lib\router\index.js:145:5)
I'have also tried with createReadStream() but I can download one file at a time, on second file it would give me network failed error on browser. Please help!
getTiff: function(req, res) {
res.set('Content-Type', 'application/octet-stream')
var name = req.param('filename').split('.')[0]
var filePath = req.param('id') var filePath = '/mys3storage/' + id + '/' + name + '/3_dsm_ortho/2_mosaic/' + name + '_transparent_mosaic_group1.tiff'
fs.createReadStream(filePath).pipe(res)
}
This is the stream code I have used. It is working fine when I download one file at a time. if try to download to two files at a time it will stop the second file and shows "network failed" from browser
Here is my browser side code:
$scope.downloadOrthoMission = function(idForDownload, missionIdForDownload) {
window.open(adminurl + 'getTiff/' + idForDownload + '/' + missionIdForDownload + '.tif', '_self')
}

Just referred this answer,
Nginx node.js express download big files stop at 1.08GB
It is working absolutely fine for me.

Related

Cannot read property 'toString' of undefined node.js filesystem

I have this script here, upon loading the page, it should first load a header html boilerplate, some dynamic paragraphs, and end the request with a footer html boiler plate. Both Boilerplates come from external files.
After testing, I am receiving:
TypeError: Cannot read property 'toString' of undefined
at ReadFileContext.callback (/user_code/index.js:20:23)
at FSReqWrap.readFileAfterOpen [as oncomplete] (fs.js:367:13)
My understanding was that I needed .toString() to ensure that what was loaded from the external files were buffered as strings.
exports.request = functions.https.onRequest((req, res) => {
fs.readFile('/public/templates/header-template.html', function(_err, data) {
res.write(data.toString());
console.log('Header Loaded');
});
res.writeHead(200, {'Content-Type': 'text/html'});
var q = url.parse(req.url, true).query;
var txt = '<p>Thanks <b>' + q.firstname + ' ' + q.lastname + '</b>. An email has been sent to <b>' + q.email + '</b> with details about your request.';
res.write(txt);
fs.readFile('/public/templates/footer-template.html', function(_err, data) {
res.end(data.toString());
console.log('Footer Loaded');
});
});
Any idea what I am doing wrong?
You should pass the encoding to toString() to convert from a Buffer.
data.toString('utf8');
Additionally, it seems your variable data is not defined, so perfhaps your file doesn't exists. Capture the error and see what's wrong:
if(_err){
console.error(_err);
}

I want to pipe a readable css file to the http response

I have an issue with outputting the readable stream to the http response.
behind the scenes there is a regular request and response streams coming from the generic http createServer. I check to see if the 'req.url' ends in css, and I create a readable stream of this file. I see the css contents in the console.log, with the right css code I expect. Then, I try to pipe the readable css file stream to the response, but in Chrome, the file response is blank when I inspect the response. It is a 200 response though. Any thoughts at first glance? I've tried different variations of where I have code commented out.
router.addRoute("[a-aA-z0-9]{1,50}.css$", function(matches){
var cssFile = matches[0];
var pathToCss = process.cwd() + "/" + cssFile;
// takes care of os diffs regarding path delimiters and such
pathToCss = path.normalize(pathToCss);
console.log(matches);
console.log("PATH TO CSS");
console.log(pathToCss)
var readable = fs.createReadStream(pathToCss);
var write = function(chunk){
this.queue(chunk.toString());
console.log(chunk.toString());
}
var end = function(){
this.queue(null);
}
var thru = through(write,end);
//req.on("end",function(){
res.pipe(readable.pipe(thru)).pipe(res);
//res.end();
//});
});
you need to pipe your readable stream into your through-stream, and then pipe it into the response:
readable.pipe(thru).pipe(res);
edit: for preparing your css path, just use path.join instead of concatenating your path and normalizing it:
var pathToCss = path.join(process.cwd(), cssFile);
I separated out this route (css) from my normal html producing routes, the problem I had was that my normal routes in my router object returned strings, like res.end(compiled_html_str), and the css file readable stream was going through that same routing function. I made it separate by isolating it from my router.
var cssMatch = [];
if(cssMatch = req.url.match(/.+\/(.+\.css$)/)){
res.writeHead({"Content-Type":"text/css"});
var cssFile = cssMatch[1];
var pathToCss = process.cwd() + "/" + cssFile;
// takes care of os diffs regarding path delimiters and such
pathToCss = path.normalize(pathToCss);
console.log(cssMatch);
console.log("PATH TO CSS");
console.log(pathToCss)
var readable = fs.createReadStream(pathToCss);
var cssStr = "";
readable.on("data",function(chunk){
cssStr += chunk.toString();
});
readable.on("end",function(){
res.end(cssStr);
});
}

Unable to read a saved file in heroku

I am using NodeJS on heroku.
I read a file from another server and save it into my application in the /temp directory.
Next, I read the same file to pass it on to my client.
The code that saves the file and then subsequently reads it is:
http.request(options, function (pdfResponse) {
var filename = Math.random().toString(36).slice(2) + '.pdf',
filepath = nodePath.join(process.cwd(),'temp/' + filename);
pdfResponse.on('end', function () {
fs.readFile(filepath, function (err, contents) {
//Stuff to do after reading
});
});
//Read the response and save it directly into a file
pdfResponse.pipe(fs.createWriteStream(filepath));
});
This works well on my localhost.
However, when deployed to heroku, I get the following error:
events.js:72
throw er; // Unhandled 'error' event
Error: ENOENT, open '/app/temp/nvks0626yjf0qkt9.pdf'
Process exited with status 8
State changed from up to crashed
I am using process.cwd() to ensure that the path is correctly used. But even then it did not help. As per the heroku documentation, I am free to create files in the applications directory, which I am doing. But I can't figure out why this is failing to read the file...
The error you describe there is consistent with /app/temp/ not existing. You need to create it before you start writing in it. The idea is:
var fs = require("fs");
var path = require("path");
var temp_dir = path.join(process.cwd(), 'temp/');
if (!fs.existsSync(temp_dir))
fs.mkdirSync(temp_dir);
I've used the sync version of the calls for illustrations purposes only. This code should be part of the start up code for your app (instead of being called for each request) and how you should structure it depends on your specific application.

Streaming file upload to a rest file server using node js without temporary files

fileupload = (req, res, uploadEndpoint) ->
req.connection.setTimeout 10000
poster = request.post(uploadEndpoint, (err, response, body) ->
console.log err + ":" + response.statusCode + ":" + body
jsonbody = JSON.parse(body)
console.log "jsonbody: " + JSON.stringify(jsonbody)
console.log "Error ofcourse" if jsonbody.error isnt `undefined`
)
req.pipe(poster).pipe res
This is acoffee-script snippet. Im using it for uploading a file into another rest based fileserver. This is not working when the file is big. this gives me this error. Expressjs is used without bodyparser but app.use express.json() app.use express.urlencoded() these two are used. I dont want o use temp files to avoid expensive i/o in between. Looks like it is piping issue. Any insight?
{"error":"multipart: Part Read: read tcp 192.168.1.1:49688: i/o timeout"}
Here is a middleware I wrote for doing this very job (including handling zipped files!). It uses multiparty, so you'll have to npm install that if you want to use it.
multiparty = require("multiparty")
zlib = require("zlib")
stream_file_upload = (req, res, next) ->
# Create the formidable form
form = new multiparty.Form()
needed_parts = 0
succeeded_parts = 0
form.on "part", (part) ->
needed_parts += 1
if part.filename
# Handle unzipping
unzipper = null
if /\.zip$/.exec(part.filename)
unzipper = zlib.createUnzip()
else if /\.gz$/.exec(part.filename)
unzipper = zlib.createGunzip()
if unzipper
part.pipe(unzipper)
req.file = unzipper or part
next() if needed_parts == succeeded_parts += 1
else
# Need to wait for these to get parsed before next
val = ""
part.on "data", (data) ->
val += data
part.on "end", () ->
req.body[part.name] = val
next() if needed_parts == succeeded_parts += 1
form.parse req
The middleware adds a req.file variable which is a stream.

Meteor/Node writeFile crashes server

I have the following code:
Meteor.methods({
saveFile: function(blob, name, path, encoding) {
var path = cleanPath(path), fs = __meteor_bootstrap__.require('fs'),
name = cleanName(name || 'file'), encoding = encoding || 'binary',
chroot = Meteor.chroot || 'public';
// Clean up the path. Remove any initial and final '/' -we prefix them-,
// any sort of attempt to go to the parent directory '..' and any empty directories in
// between '/////' - which may happen after removing '..'
path = chroot + (path ? '/' + path + '/' : '/');
// TODO Add file existance checks, etc...
fs.writeFile(path + name, blob, encoding, function(err) {
if (err) {
throw (new Meteor.Error(500, 'Failed to save file.', err));
} else {
console.log('The file ' + name + ' (' + encoding + ') was saved to ' + path);
}
});
function cleanPath(str) {
if (str) {
return str.replace(/\.\./g,'').replace(/\/+/g,'').
replace(/^\/+/,'').replace(/\/+$/,'');
}
}
function cleanName(str) {
return str.replace(/\.\./g,'').replace(/\//g,'');
}
}
});
Which I took from this project
https://gist.github.com/dariocravero/3922137
The code works fine, and it saves the file, however it repeats the call several time and each time it causes meteor to reset using windows version 0.5.4. The F12 console ends up looking like this: . The meteor console loops over the startup code each time the 503 happens and repeats the console logs in the saveFile function.
Furthermore in the target directory the image thumbnail keeps displaying and then display as broken, then a valid thumbnail again, as if the fs is writing it multiple times.
Here is the code that calls the function:
"click .savePhoto":function(e, template){
e.preventDefault();
var MAX_WIDTH = 400;
var MAX_HEIGHT = 300;
var id = e.srcElement.id;
var item = Session.get("employeeItem");
var file = template.find('input[name='+id+']').files[0];
// $(template).append("Loading...");
var dataURL = '/.bgimages/'+file.name;
Meteor.saveFile(file, file.name, "/.bgimages/", function(){
if(id=="goodPhoto"){
EmployeeCollection.update(item._id, { $set: { good_photo: dataURL }});
}else{
EmployeeCollection.update(item._id, { $set: { bad_photo: dataURL }});
}
// Update an image on the page with the data
$(template.find('img.'+id)).delay(1000).attr('src', dataURL);
});
},
What's causing the server to reset?
My guess would be that since Meteor has a built-in "automatic directories scanning in search for file changes", in order to implement auto relaunching of the application to newest code-base, the file you are creating is actually causing the server reset.
Meteor doesn't scan directories beginning with a dot (so called "hidden" directories) such as .git for example, so you could use this behaviour to your advantage by setting the path of your files to a .directory of your own.
You should also consider using writeFileSync insofar as Meteor methods are intended to run synchronously (inside node fibers) contrary to the usual node way of asynchronous calls, in this code it's no big deal but for example you couldn't use any Meteor mechanics inside the writeFile callback.
asynchronousCall(function(error,result){
if(error){
// handle error
}
else{
// do something with result
Collection.update(id,result);// error ! Meteor code must run inside fiber
}
});
var result=synchronousCall();
Collection.update(id,result);// good to go !
Of course there is a way to turn any asynchronous call inside a synchronous one using fibers/future, but that's beyond the point of this question : I recommend reading this EventedMind episode on node future to understand this specific area.

Resources