Couldn't serve a file after deleting with unlink() [duplicate] - node.js

I'm using Node.js and I need to delete a file after user download it. Is there any way to call a callback method after pipe process ends, or any event for that purpose?
exports.downloadZipFile = function(req, res){
var fileName = req.params['fileName'];
res.attachment(fileName);
fs.createReadStream(fileName).pipe(res);
//delete file after download
};

You can call fs.unlink() on the finish event of res.
Or you could use the end event of the file stream:
var file = fs.createReadStream(fileName);
file.on('end', function() {
fs.unlink(fileName, function() {
// file deleted
});
});
file.pipe(res);

Related

Why it is needed to invoke close after finish in nodejs stream?

Here is a source:
var http = require('http');
var fs = require('fs');
var download = function(url, dest, cb) {
var file = fs.createWriteStream(dest);
var request = http.get(url, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(cb); // close() is async, call cb after close completes.
});
}).on('error', function(err) { // Handle errors
fs.unlink(dest); // Delete the file async. (But we don't check the result)
if (cb) cb(err.message);
});
};
Why file.close(cb) is called on finish event? Does not finish close a stream? Look strange.
Before NodeJS version 5.5.0 it was the application's responsibility to close the file descriptor and make sure there's no file descriptor leak.
In v5.5 they introduced the autoClose attribute for the method createWriteStream with a default value of true which means on error or finish the file descriptor will be closed automatically.
The source you're looking at was before v5.5.0 (2016-01-21)

Correct way to remove temp file after reponse has been sent

I have a simple Express 4 based API that outputs an MP3 file that was generated based on some provided parameters. However, I seem to be unable to delete this temporary MP3 file after the response has been sent out.
What I have:
app.get('/endpoint', function(request, response) {
// Distill parameters from request and create tempFileMp3
var stat = Fs.statSync(tempFileMp3);
response.writeHead(status, {
'Content-Type': 'audio/mpeg',
'Content-Length': stat.size
});
stream = Fs.createReadStream(tempFileMp3);
stream.pipe(response);
});
At first I was hoping that I would be able to delete the tempFileMp3 in a stream event, either the end or finish events:
stream
.on('end', function() {
Fs.unlinkSync(tempFileMp3);
})
.on('close', function() {
Fs.unlinkSync(tempFileMp3);
})
;
But neither the end nor close events are fired.
How can I delete the temporary MP3 file after the response has been sent?
Try capture response finish:
res.on('finish', function() {
// remove temp files
});
res.on('error', function() {
// remove temp files
});

Node Js FS not writing whole file

I want to upload files from client to server and then store them in a folder corresponding to the user that uploaded that particulart file! The code seems to be ok, but the problem seems to be fs-related! Fs only transfers 15 bytes of the file. In fact it only transfers 15 bytes of every file type I have tried so far (images and videos)! This is my code so far, can you please help me? Thank you!
var multipart = require('connect-multiparty');
var multipartMiddleware = multipart();
app.post('/upload', multipartMiddleware, function(req, res){
var file = req.files.thumbnail; // that's what I've named the html input element
var stream = fs.createWriteStream(file.path);
fs.writeFile('./users/'+req.user._id+'/'+file.originalFilename, stream);
res.redirect('/');
});
The problem is in these lines:
var stream = fs.createWriteStream(file.path);
fs.writeFile('./users/'+req.user._id+'/'+file.originalFilename, stream);
stream, is not the file content of 'file.path'. Instead, it is a stream through which you can write to 'file.path'.
For example:
var stream = fs.createWriteStream(file.path);
stream.write('some content');
stream.end();
When you used it to write to a file using fs.writeFile, it's object representation is what is written, which is '[Object,Object]' (15 byes).
What you should be doing is to read from the other file, and hold the content in a String or Buffer, not in a stream:
var stream = fs.readFileSync(file.path);
And then write into the destination file. Of course, remember to redirect only on completion, if you are using the async model of file write. There is a writeFileSync() API by the way, through which you can write synchronously as well.
Hope this helps.
Try like this:
app.post('/upload', multipartMiddleware, function(req, res){
var file = req.files.thumbnail; // that's what I've named the html input element
fs.writeFile('./users/'+req.user._id+'/'+file.originalFilename, file, function(err) {
if(err) {
res.redirect('error_page');
return console.log(err);
}
console.log("The file was saved!");
res.redirect('/');
});
});

Node pipe stops working

My client sends an image file to the server. It works 5 times and then it suddenly stops. I am pretty new using streams and pipe so I am not sure what I am doing wrong.
Server Code
http.createServer(function(req, res) {
console.log("File received");
// This opens up the writeable stream to `output`
var name = "./test"+i+".jpg";
var writeStream = fs.createWriteStream(name);
// This pipes the POST data to the file
req.pipe(writeStream);
req.on('end', function () {
console.log("File saved");
i++;
});
// This is here incase any errors occur
writeStream.on('error', function (err) {
console.log(err);
});
}).listen(3000);
Client code
var request = require('request');
var fs = require('fs');
setInterval(function () {
var readStream = fs.createReadStream('./test.jpg');
readStream.on('open', function () {
// This just pipes the read stream to the response object (which goes to the client)
readStream.pipe(request.post('http://192.168.1.100:3000/test'));
console.log("Send file to server");
});
}, 1000);
Behaves like a resource exhaustion issue. Not sure which calls throw errors and which just return. Does the server connect on the 6th call? Does the write stream open? Does the pipe open?
Try ending the connection and closing the pipe after the image is saved. Maybe close the write stream too, don't remember if node garbage collects file descriptors.
I had to do the following on the server side to make this work :
res.statusCode = 200;
res.end();

Flowjs file upload - AngularJS and Node

I am using Flowjs, and its ng-flow directive to upload file with NodeJS as backend. When i try to upload file, only file in tem folder is uploaded, but it's note any type of file like JPG or PNG. (flow-135601-juicy_gustinektar02l_10185jpg.1). Here is code:
ANGULARJS
app.config(['flowFactoryProvider', function (flowFactoryProvider) {
flowFactoryProvider.defaults = {
target: 'http://localhost:8086/api/upload/',
permanentErrors: [500, 501],
maxChunkRetries: 1,
chunkRetryInterval: 5000,
simultaneousUploads: 1
};
flowFactoryProvider.on('catchAll', function (event) {
console.log('catchAll', arguments);
});
// Can be used with different implementations of Flow.js
//flowFactoryProvider.factory = fustyFlowFactory;
}]);
NODEJS
// Handle uploads through Flow.js
app.post('/api/upload', function(req, res){
flow.post(req, function(status, filename, original_filename, identifier){
console.log('POST', status, original_filename, identifier);
res.send(200, {
// NOTE: Uncomment this funciton to enable cross-domain request.
//'Access-Control-Allow-Origin': '*'
});
});
});
// Handle cross-domain requests
// NOTE: Uncomment this funciton to enable cross-domain request.
/*
app.options('/upload', function(req, res){
console.log('OPTIONS');
res.send(true, {
'Access-Control-Allow-Origin': '*'
}, 200);
});
*/
// Handle status checks on chunks through Flow.js
app.get('/api/upload', function(req, res){
flow.get(req, function(status, filename, original_filename, identifier){
console.log('GET', status);
res.send(200, (status == 'found' ? 200 : 404));
});
});
Reassembling all chunks is easy, just call this:
var stream = fs.createWriteStream(filename);
r.write(identifier, stream);
And that is it!
But other question is, when this method should be called?
Maybe when all chunks are uploaded and present at tmp folder.
But there is another issue with duplicate calls of the done.
This can be solved by creating and locking the file, once all chunks exists.
Then call
r.write(identifier, stream);
Then clean all chunks, release the lock and close the file.
Same approuch is done in php server side library: https://github.com/flowjs/flow-php-server/blob/master/src/Flow/File.php#L102
Hope this helps, and I hope someone could collaborate and update node.js sample with these fixes.

Resources