I use a ImageCropper (https://www.npmjs.com/package/cropper) and this generated by a cut-out image, the blob object. This Blob Object I would now like to also upload how is this possible?
My current not working approach:
Client:
$(elem).cropper('getCroppedCanvas').toBlob(function (blob) {
var file = new File([blob], "name");
Meteor.call('file-upload', file);
});
Server:
Meteor.methods({
'file-upload': function (file) {
var fs = Meteor.npmRequire('fs');
fs.writeFile("/tmp/test.jpg", file, function (err) {
if (err) {
return console.log(err);
}
console.log("The file was saved!");
});
}
});
See my answer in this SO post.
I highly recommend Slingshot. It's really easy to use and uploads blobs.
Related
The front-end is written in ReactJS, more specifically grommet. There are multiple pdf files to be served to the user on clicking the Download button. The files are stored in GridFS. I wish to give the user a zipped folder which contains all these files. How can I achieve this?
Thanks in advance.
I have it!! Super simple solution with archiver. Worked at first time.
Note: I am using sails.js. DBFile is my Model.
const GridFsAdapter = require('../adapters/gridfs-adapter');
const archiver = require('archiver');
async function downloadMultiple (query, res, filename) {
// create a stream for download
const archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
// catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', (err) => {
if (err.code === 'ENOENT') {
// log warning
sails.log.warn(err);
} else {
// throw error
throw err;
}
});
archive.on('error', (err) => {
throw err;
});
// set file name
res.attachment(filename);
// pipe the stream to response before appending files/streams
archive.pipe(res);
// add your streams
await DBFile
.stream(query)
// like mongoDBs cursor.forEach() function. Avoids to have all record in memory at once
.eachRecord(async (dbFile) => {
// get the stream from db
const {stream, data} = await GridFsAdapter().read(dbFile.fileId);
// append stream including filename to download stream
archive.append(stream, {name: data.filename});
});
// tell the download stream, you have all your files added
archive.finalize();
}
I want to save files that I am getting from another server on my server but the problem is when I am calling createWriteStream it giving me the error :
no such file or directory, open
E:\pathtoproject\myproject\public\profile_14454.jpg
Here is my code which is in E:\pathtoproject\myproject\modules\dowload.js :
request.head(infos.profile_pic, function(err, res, body) {
const completeFileName = '../public/profile_14454.' + res.headers['content-type'].split('/')[1];
var imageStream = fs.createWriteStream(completeFileName);
imageStream.on('open', function(fd) {
console.log("File open");
request(infos.profile_pic).pipe(imageStream).on('close', function(body) {
consoleLog('Profile pic saved');
console.log('This is the content of body');
console.log(body);
connection.query('UPDATE user set photo=? where id=?', [completeFileName, lastID], function(err, result, fields) {
if (err) {
consoleLog('Error while update the profile pic');
}
});
})
});
});
When I removed the directory ../public/ and leave only the name of the file
profile_14454.' + res.headers['content-type'].split('/')[1] , it worked but the file was saved in the root directory of the project (E:\pathtoproject\myproject\).
What's wrong in what I am doing? How can I have the file saved under public directory?
I am using nodeJS 8.9.4
I tried with my small code .
var fs = require("fs");
var data = 'Simply Easy Learning';
// Create a writable stream
var writerStream = fs.createWriteStream('./airo/output.txt');
// Write the data to stream with encoding to be utf8
writerStream.write(data,'UTF8');
// Mark the end of file
writerStream.end();
// Handle stream events --> finish, and error
writerStream.on('finish', function() {
console.log("Write completed.");
});
writerStream.on('error', function(err){
console.log(err.stack);
});
console.log("Program Ended");
My code is in this path E:\syed ayesha\nodejs\nodejs now I want to store my file in airo folder which is in this path. So I used one dot for storing. Hope this helps.
I am using Node.js to upload an image to azure storage https://github.com/Azure/azure-storage-node. The upload is successful, but I cannot see the image when I visit the URL.
The upload code looks like.
var file = 'tmp/myimage.png';
var blobService = azure.createBlobService(config.azure.connection_string);
blobService.createBlockBlobFromLocalFile(config.azure.container, 'taskblob', file, function(err, result, response) {
if(err) return console.log(err);
console.log(response);
callback();
});
In azure portal I can see something has been uploaded to my container, visiting the provided URL just loads a blank page.
https://<storage>.blob.core.windows.net/<container>/taskblob
I am also getting a success response back from Azure when logging 'response'
#wazzaday, Generally, we can upload the files into Azure Blob Stroage using the code as you provided.
var azure = require('azure-storage');
var blobSvc = azure.createBlobService("**","**");
var file = 'tmp/1.txt';
blobSvc.createContainerIfNotExists('mycontainer', function (error, result, response) {
if (!error) {
// Container exists and allows
// anonymous read access to blob
// content and metadata within this container
console.log('ok')
}
});
blobSvc.createBlockBlobFromLocalFile('mycontainer', 'myblob1', file, function (error, result, response) {
if (!error) {
console.log('file uploaded'+response)
} else {
console.log(error);
}
});
From above code, we need make sure the file path is right.
Because your file size is 0 on Azure Portal, I suggest you can try ReadStream to upload your file and check the file size again. Please refer to this code :
var azure = require('azure-storage');
var fs = require('fs');
var blobSvc = azure.createBlobService("**","**");
var file = 'tmp/1.txt';
var stream = fs.createReadStream(file)
var dataLength = 0;
// using a readStream that we created already
stream
.on('data', function (chunk) {
dataLength += chunk.length;
})
.on('end', function () { // done
console.log('The length was:', dataLength);
});
blobSvc.createContainerIfNotExists('mycontainer', function (error, result, response) {
if (!error) {
// Container exists and allows
// anonymous read access to blob
// content and metadata within this container
console.log('ok')
}
});
blobSvc.createBlockBlobFromStream('mycontainer', 'filename', stream,dataLength, function (error) {
if (!error) {
console.log('ok Blob uploaded')
}
});
Please try above code, any update, please let me know.
I'm currently building web using Sails.js and got stuck in retrieving image file from GridFS. I have successfully uploading the file using skipper-gridfs to my mongo gridfs. I have no idea to display the file in the correct way (I'm new in Sails.js and Node system)
Here is my code for retrieving image file from gridfs looks like in FileController.js (I'm using gridfs-stream):
show: function (req, res, next) {
var mongo = require('mongodb');
var Grid = require('gridfs-stream');
var buffer="";
// create or use an existing mongodb-native db instance
var db = new mongo.Db('testDb', new mongo.Server("192.168.0.2", 27017), {safe:true});
var gfs = Grid(db, mongo);
// streaming from gridfs
var readstream = gfs.createReadStream({
filename: 'e1ecfb02-e095-4e2f.png'
});
//check if file exist
gfs.exist({
filename: 'e1ecfb02-e095-4e2f.png'
}, function (err, found) {
if (err) return handleError(err);
found ? console.log('File exists') : console.log('File does not exist');
});
//buffer data
readstream.on("data", function (chunk) {
buffer += chunk;
console.log("adsf", chunk);
});
// dump contents to console when complete
readstream.on("end", function () {
console.log("contents of file:\n\n", buffer);
});
}
When I ran it, the console showed nothing.
There is no error either.
How should I fix this?
Additional Question:
Is it better & easier to store/read file to/from local disk instead of using gridfs?
Am I correct in choosing gridfs-stream to retrieve the file form gridfs?
In the skipper-gridfs codes and there's a 'read' method that accept fd value and returns the required file corresponding to that value. So, you just have to pull that file from mongo by that method and send as a response. It should work file.
download: function (req, res) {
var blobAdapter = require('skipper-gridfs')({
uri: 'mongodb://localhost:27017/mydbname.images'
});
var fd = req.param('fd'); // value of fd comes here from get request
blobAdapter.read(fd, function(error , file) {
if(error) {
res.json(error);
} else {
res.contentType('image/png');
res.send(new Buffer(file));
}
});
}
I hope it helps :)
Additional Questions:
Yes, using gridfs is better both in performance and efficiency. And normally mongodb has a limitation of 16MB probably for binary files, but using gridfs you can store any size file, it breaks them in chunks and stores them.
Retrieving has been shown above.
You can now use skipper-gridfs in sails to manage uploads/downloads.
var blobAdapter = require('skipper-gridfs')({uri: 'mongodb://jimmy#j1mtr0n1xx#mongo.jimmy.com:27017/coolapp.avatar_uploads' });
Upload:
req.file('avatar')
.upload(blobAdapter().receive(), function whenDone(err, uploadedFiles) {
if (err) return res.negotiate(err);
else return res.ok({
files: uploadedFiles,
textParams: req.params.all()
});
});
Download
blobAdapter.read(filename, callback);
Bear in mind the file name will change once you upload it to mongo, you have to use the file name returned in the first response.
I am creating an application that takes some file uploads and send them straight up to S3. I would prefer not to even have the tmp file on my server, so I am using the Knox module and would like to take the raw stream from Formidable and send it over Knox to S3. I have done something similar using Knox to download a file using this code:
knox.downloads.get(widget.download).on('response',function(sres){
res.writeHead(200, {
'Content-Type':'application/zip',
'Content-Length': sres.headers['content-length'],
'Content-Disposition':'attachment; filename=' + widget.download
});
util.pump(sres, res);
}).end();
Now I would like to do something similar in the oposite direction (File upload from the browser to S3).
So far I have written an event handler to capture each piece of data from the file as it's being uploaded:
var form = new formidable.IncomingForm();
form.onPart = function(part){
if(!part.filename){
form.handlePart(part);
}else{
if(part.name == 'download'){
// Upload to download bucket
controller.putDownload(part);
}else{
// Upload to the image bucket
controller.putImage(part);
}
//res.send(sys.inspect(part));
}
}
form.parse(req, function(err, fields, files){
if(err){
res.json(err);
}else{
res.send(sys.inspect({fields:fields, files:files}), {'content-type':'text/plain'});
//controller.createWidget(res,fields,files);
}
});
controller.putDownload = function(part){
part.addListener('data', function(buffer){
knox.download.putStream(data,part.filename, function(err,s3res){
if(err)throwError(err);
else{
console.log(s3res);
}
});
})
knox.downloads.putStream(part, part.filename, function(err,s3res){
if(err)throwError(err);
else{
console.log(s3res);
}
});
}
But the data event only give me the buffer. So is it possible to capture the stream itself and push it to S3?
What you want to do is override the Form.onPart method:
IncomingForm.prototype.onPart = function(part) {
// this method can be overwritten by the user
this.handlePart(part);
};
Formidable's default behavior is to write the part to a file. You don't want that. You want to handle the 'part' events to write to the knox download. Start with this:
form.onPart = function(part) {
if (!part.filename) {
// let formidable handle all non-file parts
form.handlePart(part);
return;
}
Then open the knox request and handle the raw part events yourself:
part.on('data', function(data) {
req.write(data);
});
part.on('end', function() {
req.end();
});
part.on('error', function(err) {
// handle this too
});
As a bonus, if the req.write(data) return false that means the send buffer is full. You should pause the Formidable parser. When you get a drain event from the Knox stream you should resume Formidable.
Use multiparty instead. It supports this kind of streaming like you want. It even has an example of streaming directly to s3: https://github.com/superjoe30/node-multiparty/blob/master/examples/s3.js
In an Express middleware, I use formidable together with PassThrough to stream-upload a file to S3 (in my case, to Minio which is S3 compatible through Minio SDK; and I believe it works for AWS S3 too with the same Minio SDK)
Here is the sample code.
const formidable = require('formidable')
const { PassThrough } = require('stream')
const form = new formidable.IncomingForm()
const pass = new PassThrough()
const fileMeta = {}
form.onPart = part => {
if (!part.filename) {
form.handlePart(part)
return
}
fileMeta.name = part.filename
fileMeta.type = part.mime
part.on('data', function (buffer) {
pass.write(buffer)
})
part.on('end', function () {
pass.end()
})
}
form.parse(req, err => {
if (err) {
req.minio = { error: err }
next()
} else {
handlePostStream(req, next, fileMeta, pass)
}
})
And handlePostStream looks like below, for your reference:
const uuidv1 = require('uuid/v1')
const handlePostStream = async (req, next, fileMeta, fileStream) => {
let filename = uuidv1()
try {
const metaData = {
'content-type': fileMeta.type,
'file-name': Buffer.from(fileMeta.name).toString('base64')
}
const minioClient = /* Get Minio Client*/
await minioClient.putObject(MINIO_BUCKET, filename, fileStream, metaData)
req.minio = { post: { filename: `${filename}` } }
} catch (error) {
req.minio = { error }
}
next()
}
You can find the source code on GitHub, and its unit tests too.
There is no way for you to capture the stream, because the data has to be translated by Formidable. The buffer you're given is the file contents in chunks of buffer.length: this might be a problem because looking at Formidable's docs it appears that until the file is completely uploaded it can't reliably report the file size and Knox's put method might need that.
Never used Knox this way before, but you might have some luck with something like this:
controller.putDownload = function(part){
var req = knox.download.put(part.filename, {
'Content-Type': 'text/plain'
});
part.addListener('data', function(buffer){
req.write(buffer);
});
req.on('response', function(res){
// error checking
});
req.end();
}
A little unsure about the response checking bits, but....see if you can whip that into shape. Also, Streaming an octet stream from request to S3 with knox on node.js also has a writeup that may be useful to you.