Deleting file in node.js not working - node.js

I am using Node.js with Express.
I am trying to delete a file after sending it to client with express js.
function deleteFile (file) {
fs.unlink(file, function (err) {
if (err) {
logger.error(err);
}
});
}
app.get("/deleteFileAfterDownload", function (req, res){
var fileName = "a.pdf"
var stream = fs.createReadStream(fileName);
var streamClosed = false;
req.on('end',function(){
if (!streamClosed){
stream.emit('close');
// I tried stream.destroy() but that is also not working
}
});
stream.on('close', function () {
streamClosed = true;
deleteFile(fileName);
});
req.on('data', function(){});
stream.pipe(res);
});
But the file is not getting deleted. it seems the process is still using file because just after I end the process, the file is getting deleted.
Can anybody tell me why?
If I am doing it wrong, please tell me a good way.

Please add a log in deleteFile, makesure it is called.
Try simplify it:
var fileName = "a.pdf"
var stream = fs.createReadStream(fileName);
stream.pipe(res);
res.once("finish", function () {
deleteFile(fileName);
});
The previous example only delete file if download finished,
if you want delete file unconditionly, try the following:
var fileName = "a.pdf";
var stream = fs.createReadStream(fileName);
stream.pipe(res).once("close", function () {
stream.close();
deleteFile(fileName);
});
stream.close() is important here, because stream not close if pipe aborted.

Related

Writing buffer response from resemble.js to file

I'm using node-resemble-js to compare two PNG images.
The comparison happens without issue and I get a successful/relevant response however I'm having trouble outputting the image diff.
var express = require('express');
var fs = require('fs');
var resemble = require('node-resemble-js');
var router = express.Router();
router.get('/compare', function(req, res, next) {
compareImages(res);
});
var compareImages = function (res) {
resemble.outputSettings({
largeImageThreshold: 0
});
var diff = resemble('1.png')
.compareTo('2.png')
.ignoreColors()
.onComplete(function(data){
console.log(data);
var png = data.getDiffImage();
fs.writeFile('diff.png', png.data, null, function (err) {
if (err) {
throw 'error writing file: ' + err;
}
console.log('file written');
});
res.render('compare');
});
};
module.exports = router;
It writes to diff.png as expected however it's not creating a valid image.
Any ideas where I'm going wrong? Feel like I'm pretty close but just unsure of final piece.
Thanks
Looks like there is a pack() method that needs to be called, which does some work and then streamifies the data. In that case you can buffer the stream and then call writeFile like this:
var png = data.getDiffImage();
var buf = new Buffer([])
var strm = png.pack()
strm.on('data', function (dat) {
buf = Buffer.concat([buf, dat])
})
strm.on('end', function() {
fs.writeFile('diff.png', buf, null, function (err) {
if (err) {
throw 'error writing file: ' + err;
}
console.log('file written');
})
})
or you can just pipe it like this, which is a little simpler:
png.pack().pipe(fs.createWriteStream('diff.png'))
Honestly, your approach made sense to me (grab the Buffer and write it) but I guess that data Buffer attached to what comes back from getDiffImage isn't really the final png. Seems like the docs are a bit thin but there's some info here: https://github.com/lksv/node-resemble.js/issues/4

Piping multiple file streams using Node.js

I want to stream multiple files, one after each other, to the browser. To illustrate, think of having multiple CSS files which shall be delivered concatenated as one.
The code I am using is:
var directory = path.join(__dirname, 'css');
fs.readdir(directory, function (err, files) {
async.eachSeries(files, function (file, callback) {
if (!endsWith(file, '.css')) { return callback(); } // (1)
var currentFile = path.join(directory, file);
fs.stat(currentFile, function (err, stats) {
if (stats.isDirectory()) { return callback(); } // (2)
var stream = fs.createReadStream(currentFile).on('end', function () {
callback(); // (3)
});
stream.pipe(res, { end: false }); // (4)
});
}, function () {
res.end(); // (5)
});
});
The idea is that I
filter out all files that do not have the file extension .css.
filter out all directories.
proceed with the next file once a file has been read completely.
pipe each file to the response stream without closing it.
end the response stream once all files have been piped.
The problem is that only the first .css file gets piped, and all remaining files are missing. It's as if (3) would directly jump to (5) after the first (4).
The interesting thing is that if I replace line (4) with
stream.on('data', function (data) {
console.log(data.toString('utf8'));
});
everything works as expected: I see multiple files. If I then change this code to
stream.on('data', function (data) {
res.write(data.toString('utf8'));
});
all files expect the first are missing again.
What am I doing wrong?
PS: The error happens using Node.js 0.8.7 as well as using 0.8.22.
UPDATE
Okay, it works if you change the code as follows:
var directory = path.join(__dirname, 'css');
fs.readdir(directory, function (err, files) {
var concatenated = '';
async.eachSeries(files, function (file, callback) {
if (!endsWith(file, '.css')) { return callback(); }
var currentFile = path.join(directory, file);
fs.stat(currentFile, function (err, stats) {
if (stats.isDirectory()) { return callback(); }
var stream = fs.createReadStream(currentFile).on('end', function () {
callback();
}).on('data', function (data) { concatenated += data.toString('utf8'); });
});
}, function () {
res.write(concatenated);
res.end();
});
});
But: Why? Why can't I call res.write multiple times instead of first summing up all the chunks, and then write them all at once?
Consider also using multistream, that allows you to combine and emit multiple streams one after another.
The code was perfectly fine, it was the unit test that was wrong ...
Fixed that, and now it works like a charme :-)
May help someone else:
const fs = require("fs");
const pth = require("path");
let readerStream1 = fs.createReadStream(pth.join(__dirname, "a.txt"));
let readerStream2 = fs.createReadStream(pth.join(__dirname, "b.txt"));
let writerStream = fs.createWriteStream(pth.join(__dirname, "c.txt"));
//only readable streams have "pipe" method
readerStream1.pipe(writerStream);
readerStream2.pipe(writerStream);
I also checked Rocco's answer and its working like a charm:
//npm i --save multistream
const multi = require('multistream');
const fs = require('fs');
const pth = require("path");
let streams = [
fs.createReadStream(pth.join(__dirname, "a.txt")),
fs.createReadStream(pth.join(__dirname, "b.txt"))
];
let writerStream = fs.createWriteStream(pth.join(__dirname, "c.txt"));
//new multi(streams).pipe(process.stdout);
new multi(streams).pipe(writerStream);
and to send the results to client:
const multi = require('multistream');
const fs = require('fs');
const pth = require("path");
const exp = require("express");
const app = exp();
app.listen(3000);
app.get("/stream", (q, r) => {
new multi([
fs.createReadStream(pth.join(__dirname, "a.txt")),
fs.createReadStream(pth.join(__dirname, "b.txt"))
]).pipe(r);
});

Save a image using nodejs, expressjs and socket.io

I've tried to save a image to a specified directory with node.js using express.js and socket.io but it doesnt work.
On the client-side:
var reader = new FileReader();
function drop(e) {
e.stopPropagation();
e.preventDefault();
var dt = e.dataTransfer;
var files = dt.files;
jQuery.each(files, function(){
reader.onload = function(e) {
socket.emit('sendfile', e.target.result);
};
});
return false;
}
The image should be uploaded by a drag and drop function.
Then on the server-side:
io.sockets.on('connection', function (socket) {
[...]
socket.on('sendfile', function (data) {
var fs = require('fs');
app.use(express.bodyParser({ keepExtensions: true, uploadDir: '/uploaded' }));
io.sockets.emit('updatechat', socket.username, data); //test
});
I have also tried
socket.on('sendfile', function (data) {
var fs = require('fs');
fs.writeFile('/uploaded/test.png', data, "binary" , function (err) {
if (err) throw err;
console.log('It\'s saved!');
});
io.sockets.emit('updatechat', socket.username, data); //data test
});
but it doesnt saved anything.
The "data test" shows me, that the data are already were arrived on the server, so I don't think, that the problem comes from the client-side, but on the server-side I have no idea what I doing wrong
I made a simple example to illustrate the usage of file upload via socket!
The steps following are:
Create the send-file socket.io event to receive the file on app.js. This file received is a binary one;
In the jade/HTML page put an input file and a button to send it. NOTE: you don't have to use multipart to send a post with multipart content, we are sending socket files not a TCP request/response;
Initialize HTML5 File API support and prepare the listeners to watching out your file input component;
The rest of remaining routines to read the file and sent it content forward.
Now first step (app.js):
var io = require('socket.io').listen(8000, {log: false});
io.sockets.on('connection', function(socket) {
socket.on('send-file', function(name, buffer) {
var fs = require('fs');
//path to store uploaded files (NOTE: presumed you have created the folders)
var fileName = __dirname + '/tmp/uploads/' + name;
fs.open(fileName, 'a', 0755, function(err, fd) {
if (err) throw err;
fs.write(fd, buffer, null, 'Binary', function(err, written, buff) {
fs.close(fd, function() {
console.log('File saved successful!');
});
})
});
});
});
Second step (in my case I've used jade rather html)
extends layout
block content
h1 Tiny Uploader
p Save an Image to the Server
input#input-files(type='file', name='files[]', data-url='/upload', multiple)
button#send-file(onclick='javascript:sendFile();') Send
script(src='http://127.0.0.1:8000/socket.io/socket.io.js')
script(src='/javascripts/uploader.js')
Third and Fourth steps (coding uploader.js to send the file to server)
//variable declaration
var filesUpload = null;
var file = null;
var socket = io.connect('http://localhost:8000');
var send = false;
if (window.File && window.FileReader && window.FileList) {
//HTML5 File API ready
init();
} else {
//browser has no support for HTML5 File API
//send a error message or something like that
//TODO
}
/**
* Initialize the listeners and send the file if have.
*/
function init() {
filesUpload = document.getElementById('input-files');
filesUpload.addEventListener('change', fileHandler, false);
}
/**
* Handle the file change event to send it content.
* #param e
*/
function fileHandler(e) {
var files = e.target.files || e.dataTransfer.files;
if (files) {
//send only the first one
file = files[0];
}
}
function sendFile() {
if (file) {
//read the file content and prepare to send it
var reader = new FileReader();
reader.onload = function(e) {
console.log('Sending file...');
//get all content
var buffer = e.target.result;
//send the content via socket
socket.emit('send-file', file.name, buffer);
};
reader.readAsBinaryString(file);
}
}
Some important considerations:
This is a tiny sample of socket file uploader. I don't consider some important things here: file chunks to send piece of files instead of all content in a row; Update the status of file sent as (error msg, successful msg, progress bar or percent stage, etc.). So this is a sample to initial steps to coding your own file uploader. In this case, we don't need a form to send files, its is completely asynchronous transaction via socket.io.
I hope this post is helpful.
This tutorial goes a little bit further because you can pause/resume your upload but you will find how to upload a file through socketio :)

Node.js: how to pass a readStream as function argument and use in a callback?

I want to pass a stream as argument to a function and use it in an async callback but it is destroyed (stream.readable is false)
for example:
var test = require('./test');
var file = fs.createReadStream('./file.txt');
test(file, console.log);
and in test.js:
module.exports = function(stream, callback) {
//stream.pipe(process.stdout); ///////// STREAM IS READABLE HERE
doSomething('abc', function(err) {
stream.pipe(process.stdout); ///////// STREAM IS NOT READABLE HERE
callback(err);
});
};
enter code here
why is this happening ?
what can I do to use it in the callback ?
This happens because stream ends before you trying to pipe it. In the first place stream is readable because you synchronous code still working. In the second place (inside of callback) stream already ended because callback may be executed after several ticks in future. You need to pasue your streem if you want to read it in future. This code should work:
var test = require('./test');
var file = fs.createReadStream('./file.txt');
file.pause();
test(file, console.log);
test.js
module.exports = function(stream, callback) {
doSomething('abc', function(err) {
stream.resume();
stream.pipe(process.stdout);
callback(err);
});
};

callback to handle completion of pipe

I am using the following node.js code to download documents from some url and save it in the disk.
I want to be informed about when the document is downloaded. i have not seen any callback with pipe.Or, Is there any 'end' event that can be captured on completion of download ?
request(some_url_doc).pipe(fs.createWriteStream('xyz.doc'));
Streams are EventEmitters so you can listen to certain events. As you said there is a finish event for request (previously end).
var stream = request(...).pipe(...);
stream.on('finish', function () { ... });
For more information about which events are available you can check the stream documentation page.
Based nodejs document, http://nodejs.org/api/stream.html#stream_event_finish,
it should handle writableStream's finish event.
var writable = getWriteable();
var readable = getReadable();
readable.pipe(writable);
writable.on('finish', function(){ ... });
Code snippet for piping content from web via http(s) to filesystem. As #starbeamrainbowlabs noticed event finish does job
var tmpFile = "/tmp/somefilename.doc";
var ws = fs.createWriteStream(tmpFile);
ws.on('finish', function() {
// pipe done here, do something with file
});
var client = url.slice(0, 5) === 'https' ? https : http;
client.get(url, function(response) {
return response.pipe(ws);
});
I found an a bit different solution of my problem regarding this context. Thought worth sharing.
Most of the example create readStreams from file. But in my case readStream has to be created from JSON string coming from a message pool.
var jsonStream = through2.obj(function(chunk, encoding, callback) {
this.push(JSON.stringify(chunk, null, 4) + '\n');
callback();
});
// message.value --> value/text to write in write.txt
jsonStream.write(JSON.parse(message.value));
var writeStream = sftp.createWriteStream("/path/to/write/write.txt");
//"close" event didn't work for me!
writeStream.on( 'close', function () {
console.log( "- done!" );
sftp.end();
}
);
//"finish" event didn't work for me either!
writeStream.on( 'close', function () {
console.log( "- done!"
sftp.end();
}
);
// finally this worked for me!
jsonStream.on('data', function(data) {
var toString = Object.prototype.toString.call(data);
console.log('type of data:', toString);
console.log( "- file transferred" );
});
jsonStream.pipe( writeStream );
Here's a solution that handles errors in requests and calls a callback after the file is written:
request(opts)
.on('error', function(err){ return callback(err)})
.pipe(fs.createWriteStream(filename))
.on('finish', function (err) {
return callback(err);
});

Resources