I have request handler to send file from MongoDB (GridFS) to client like below, but it use data variable so content is in memory. I need to make this in streaming mode and send file in chunks to client. I can't regognize how to pipe buffer to response. Look at second code - it doesn't work, but show something what i need.
Maybe it is useful: Data in GridFS is Base64 encoded, but may be changed if streaming can be more efficient.
In-Memory version
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
var stream = gridStore.stream(true);
var data = '';
stream.on("data", function(chunk) {
data += chunk;
});
stream.on("end", function() {
res.send(new Buffer(data, 'base64'));
});
});
});
});
Streaming mode version
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
var stream = gridStore.stream(true);
stream.on("data", function(chunk) {
new Buffer(chunk, 'base64').pipe(res);
});
stream.on("end", function() {
res.end();
});
});
});
});
Update
I think I'm close to resolve this. I found this works, but does't decode from Base64:
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
gridStore.stream(true).pipe(res);
});
exports.sendFile = function(db, res, fileId) {
var grid = require('gridfs-stream');
var gfs = grid(db, mongoose.mongo);
var on_error = function(){
res.status(404).end();
};
var readstream = gfs.createReadStream({
filename: fileId,
root: 'r'
});
readstream.on('error', function(err) {
if (('\'' + err + '\'') === '\'Error: does not exist\'') {
return on_error && on_error(err);
}
throw err;
});
return readstream.pipe(res);
}
I found a solution, but think that can be better. I use base64-stream module to decode Base64 stream. Solution below:
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
gridStore.stream(true).pipe(base64.decode()).pipe(res);
});
});
});
stream.on("data", function(chunk) {
res.send(chunk.toString('utf8'));
});
Related
I am working on the API for store a file uploaded by the user.
function uploadPhoto(req, res) {
var imagedata = new Buffer('');
req.body.on('data', function (chunk) {
imagedata = Buffer.concat([imagedata, chunk]);
});
req.body.on('end', function (chunk) {
fs.writeFile('success.jpeg', imagedata, 'binary', function (err) {
if (err) throw err
console.log('File saved.')
})
});
}
There is an error when I used req.body.on('data').
The data from postman
When I print out the value of req.body with console.log("message: "+req.body), it was string and has value:
I tried to write to file by using Buffer like this
var writeFile = function (type, cb) {
var data = new Buffer(req.body, type);
fs.writeFile(type + '.jpeg', data, type, function (err) {
cb(null, data.length);
});
}
async.parallel([
writeFile.bind(null, 'binary'),
writeFile.bind(null, 'utf8'),
writeFile.bind(null, 'ascii'),
writeFile.bind(null, 'ucs2'),
writeFile.bind(null, 'base64')
], function (err, results) {
response.status(200).send({});
})
This will create some jpeg files with different size but can't read them as an image.
How can I store this image from the user?
Thank you very much.
This looks like a good case for streams.
function uploadPhoto(req, res) {
var file = fs.createWriteStream(__dirname + '/success.jpeg')
req.pipe(file).on('error', function(err) { console.log(err) })
}
Headers could also help determine what file type and character encoding it has.
var file = fs.createWriteStream(__dirname + '/success.jpeg', {defaultEncoding: req.headers.encoding || 'utf8'})
I am uploading an image from nodeJs.
Control is not getting into req.on('end'). Nothing is getting printed inside req.on('end'). I could not identify where is the problem. I am making an API and calling from js to upload an image in the server at a specific location.
app.post('/tde/api/photo/:widgetId/:choosenFileName',function(req,res){
console.log("In file Upload..");
console.log(req.params.widgetId);
console.log(req.params.choosenFileName);
res.writeHead(200, { 'Content-Type': 'application/binary' });
var filedata = '';
var chunks = [];
//req.setEncoding('binary');
req.on('data', function(chunk){
//filedata+= chunk;
chunks.push(chunk);
})
req.on('end', function (chunk) {
var dir = 'uploads/'+req.params.widgetId
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
console.log("directory created..");
}
fs.readdir(dir, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(filename) {
console.log(filename);
fs.unlink(dir+'/'+filename, function(err) {
if (err) {
return console.error(err);
}
console.log("File deleted successfully!");
});
});
//fs.writeFile('uploads/'+req.params.widgetId+'/sanmoy.jpg', chunk, function(err) {
var fileName = req.params.choosenFileName;
var widgetId = req.params.widgetId;
//fs.writeFile('uploads/'+widgetId+'/'+fileName, filedata, 'binary', function(err) {
var buffer = Buffer.concat(chunks)
fs.writeFile('uploads/'+widgetId+'/'+fileName, buffer, function(err) {
if (err) {
return console.error(err);
}
console.log("writing file success!");
})
});
});
res.end("File is uploaded");
});
I need to download a resume from GridFS, below is the code ive written to do it, but this seems to not give me a physical file for download, this is used to reading the contents. How can i download the file?
exports.getFileById = function(req, res){
var conn = mongoose.connection;
var gfs = Grid(conn.db, mongoose.mongo);
var id = req.params.ID;
gfs.exist({_id: id,root: 'resume'}, function (err, found) {
if (err) return handleError(err);
if (!found)
return res.send('Error on the database looking for the file.');
gfs.createReadStream({_id: id,root: 'resume'}).pipe(res);
});
};
Hope this helps!
exports.downloadResume = function(req, res){
var conn = mongoose.connection;
var gfs = Grid(conn.db, mongoose.mongo);
gfs.findOne({ _id: <resumeId>, root: <collectionName> }, function (err, file) {
if (err) {
return res.status(400).send(err);
}
else if (!file) {
return res.status(404).send('Error on the database looking for the file.');
}
res.set('Content-Type', file.contentType);
res.set('Content-Disposition', 'attachment; filename="' + file.filename + '"');
var readstream = gfs.createReadStream({
_id: <resumeId>,
root: '<collectionName>'
});
readstream.on("error", function(err) {
res.end();
});
readstream.pipe(res);
});
};
I took hints from accepted answer. But I had to jump through some hoops to get it working hope this helps.
const mongodb = require('mongodb');
const mongoose = require('mongoose');
const Grid = require('gridfs-stream');
eval(`Grid.prototype.findOne = ${Grid.prototype.findOne.toString().replace('nextObject', 'next')}`);
const mongoURI = config.mongoURI;
const connection = mongoose.createConnection(mongoURI);
app.get('/download', async (req, res) => {
var id = "<file_id_xyz>";
gfs = Grid(connection.db, mongoose.mongo);
gfs.collection("<name_of_collection>").findOne({ "_id": mongodb.ObjectId(id) }, (err, file) => {
if (err) {
// report the error
console.log(err);
} else {
// detect the content type and set the appropriate response headers.
let mimeType = file.contentType;
if (!mimeType) {
mimeType = mime.lookup(file.filename);
}
res.set({
'Content-Type': mimeType,
'Content-Disposition': 'attachment; filename=' + file.filename
});
const readStream = gfs.createReadStream({
_id: id
});
readStream.on('error', err => {
// report stream error
console.log(err);
});
// the response will be the file itself.
readStream.pipe(res);
}
});
I am creating a node server to upload files using 'express','fs' and 'busboy' module. The server is working as expected but when I cancel the upload before complete, the incomplete file is stored in the filesystem. How can I remove the incomplete file?
var express = require("express"),
fs = require("fs"),
Busboy = require("busboy");
app = express();
app.listen(7000);
app.get("/", display_form);
app.post("/upload", function(req, res) {
var busboy = new Busboy({
headers: req.headers
});
busboy.on("file", function(fieldname, file, filename, encoding, mime) {
var fstream = fs.createWriteStream("./uploads/" + filename);
file.pipe(fstream);
file.on("data", function(chunk) {
console.log(chunk.length);
});
file.on("end", function() {
console("end");
});
fstream.on("close", function() {
fstream.close();
console("fstream close");
});
fstream.on("error", function() {
console("fstream error ");
});
});
busboy.on("finish", function() {
console.log("uploaded");
res.send("file uploaded");
});
busboy.on("error", function() {
console("error busboy");
});
req.pipe(busboy);
});
Thanks for your help and I finally I found a way to this problem. I added under mentioned code snippet and its working fine.
req.on("close", function(err) {
fstream.end();
fs.unlink('./uploads/' + name);
console.log("req aborted by client");
});
I don't know busboy, but you open a stream and never close it.
Why don't you exploit the stream and filename to 'finish' and 'error' and act accordingly?
Example:
busboy.on('error', function() {
fs.unlink('./uploads/' + filename);
console.log('error busboy');
}
I am using the Knox S3 module, but when I eventually get the file, the resulting file is corrupt. Am I using Knox incorrectly?
var data;
client.getFile(path, function(err, file) {
file.on('data', function(chunk) { data += chunk; });
file.on('end', function() {
//Here I end up sending the response with new Buffer(data), but that produces a bad file.
});
});
Try using the writeStream:
var fs = require('fs');
var file = fs.createWriteStream(path);
client.getFile(path, function(err, stream) {
stream.on('data', function(chunk) { file.write(chunk); });
stream.on('end', function(chunk) { file.end(); });
});
and make sure to take a look at https://github.com/aws/aws-sdk-js