I can upload images via skipper and then resize them with sharp but it seems inefficient to me because skipper documentation outlines that file streams can be used for image thumbnailing before file upload.
Can I use sharp module to resize images on the fly? If you provide some example I'll be grateful. Thank you in advance.
With the help of this answer, and this blog post I've finally came up with a solution:
upload: function(req, res) {
var sharp = require('sharp');
var Writable = require('stream').Writable;
var receiver = new Writable({objectMode: true});
receiver._write = function(file, enc, cb) {
var output = require('fs').createWriteStream('./assets/avatar/' + file.fd);
var resizeTransform = sharp().resize(100);
file.pipe(resizeTransform).pipe(output);
cb();
};
req.file('avatar').upload(receiver, (err, files) => {
if (err) {return res.serverError(err);}
return res.ok();
});
}
Related
I'm trying to upload a large (8.3GB) video to my Node.js (Express) server by chunking using busboy. How to I receive each chunk (busboy is doing this part) and piece it together as one whole video?
I have been looking into readable and writable streams but I'm not ever getting the whole video. I keep overwriting parts of it, resulting in about 1 GB.
Here's my code:
req.busboy.on('file', (fieldname, file, filename) => {
logger.info(`Upload of '${filename}' started`);
const video = fs.createReadStream(path.join(`${process.cwd()}/uploads`, filename));
const fstream = fs.createWriteStream(path.join(`${process.cwd()}/uploads`, filename));
if (video) {
video.pipe(fstream);
}
file.pipe(fstream);
fstream.on('close', () => {
logger.info(`Upload of '${filename}' finished`);
res.status(200).send(`Upload of '${filename}' finished`);
});
});
After 12+ hours, I got it figured out using pieces from this article that was given to me. I came up with this code:
//busboy is middleware on my index.js
const fs = require('fs-extra');
const streamToBuffer = require('fast-stream-to-buffer');
//API function called first
uploadVideoChunks(req, res) {
req.pipe(req.busboy);
req.busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const fileNameBase = filename.replace(/\.[^/.]+$/, '');
//save all the chunks to a temp folder with .tmp extensions
streamToBuffer(file, function (error, buffer) {
const chunkDir = `${process.cwd()}/uploads/${fileNameBase}`;
fs.outputFileSync(path.join(chunkDir, `${Date.now()}-${fileNameBase}.tmp`), buffer);
});
});
req.busboy.on('finish', () => {
res.status(200).send(`Finshed uploading chunk`);
});
}
//API function called once all chunks are uploaded
saveToFile(req, res) {
const { filename, profileId, movieId } = req.body;
const uploadDir = `${process.cwd()}/uploads`;
const fileNameBase = filename.replace(/\.[^/.]+$/, '');
const chunkDir = `${uploadDir}/${fileNameBase}`;
let outputFile = fs.createWriteStream(path.join(uploadDir, filename));
fs.readdir(chunkDir, function(error, filenames) {
if (error) {
throw new Error('Cannot get upload chunks!');
}
//loop through the temp dir and write to the stream to create a new file
filenames.forEach(function(tempName) {
const data = fs.readFileSync(`${chunkDir}/${tempName}`);
outputFile.write(data);
//delete the chunk we just handled
fs.removeSync(`${chunkDir}/${tempName}`);
});
outputFile.end();
});
outputFile.on('finish', async function () {
//delete the temp folder once the file is written
fs.removeSync(chunkDir);
}
});
}
Use streams
multer allow you to easily handle file uploads as part of an express route. This works great for small files that don’t leave a significant memory footprint.
The problem with loading a large file into memory is that you can actually run out of memory and cause your application to crash.
use multipart/form-data request. This can be handled by assigning the readStream to that field instead in your request options
streams are extremely valuable for optimizing performance.
Try with this code sample, I think it will work for you.
busboy.on("file", function(fieldName, file, filename, encoding, mimetype){
const writeStream = fs.createWriteStream(writePath);
file.pipe(writeStream);
file.on("data", data => {
totalSize += data.length;
cb(totalSize);
});
file.on("end", () => {
console.log("File "+ fieldName +" finished");
});
});
You can refer this link also for resolve this problem
https://github.com/mscdex/busboy/issues/143
I think multer is good with this, did you try multer?
I'm developing a feature to allow user upload image to mongodb with nodejs :
My Problem :
Get image file from user's request and do 2 task: store current image to mongodb with collection name "Origin_image" for example and resize current image and store to mongodb with collection name "Thumbnail_image"
My solution so far:
I just only store success original image by using multer-gridfs-storage and multer like code below
const multer = require('multer');
const GridFsStorage = require('multer-gridfs-storage');
const multer = require('multer');
const GridFsStorage = require('multer-gridfs-storage');
let storageFS = new GridFsStorage({
db: app.get("mongodb"),
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
const filename = file.originalname;
const fileInfo = {
filename: filename,
bucketName: 'images'
};
resolve(fileInfo);
});
});
}
});
var upload = multer({ storage: storageFS }).single('image');
exports.uploadImage = async function (req, res) {
try {
upload(req, res, function (err) {
if (err) {
return res.send(err)
}
res.json({
status: true,
filePath: req.file.originalname
});
});
} catch (error) {
res.send(error);
}
}
Does anyone have any idea to solve my problem? thanks !
If you are using Angular on your frontend, let the end user handle the image resizing so that your server does not have to deal with the overhead. I am currently using ng2-img-max to resize images. You can initiate the resize as on file change.
I also wanted to have thumbnails and then the original, but this caused a huge issue in performance when resizing both and then again how to link them as GridFs stores them before you can do anything with them and all you have left is the response. So save yourself some time. Only resize once, to your limited size for the user and then for displaying thumbnail images, use sharp with custom query params to display the size you want.
Good luck and happy coding.
I've found few articles explaining the process but most of them are not up do date.
How do you handle image upload in node.js?
Im using multer and it works perfectly. It stores your image locally. You can also send it to mongodb if you want. This is how i am doing it.
var multer = require('multer');
var done = false;
//define the model you are working with*
var Slides = require('./models/work');
app.use(multer({
dest: './public/img',
rename: function (fieldname, filename) {
return filename+Date.now();
},
onFileUploadStart: function (file) {
console.log(file.originalname + ' is starting ...')
},
onFileUploadComplete: function (file) {
console.log(file.fieldname + ' uploaded to ' + file.path);
done = true;
var id= file.fieldname;
var str = file.path;
var image = str.replace('public', '');
var slidegegevens = {
"id": id,
"img": image
};
var s = new Slides(slidegegevens);
s.save(function (err, slidegegevens) {
console.log(err);
console.log('slidegegevens: ' + slidegegevens);
});
}
}));
I use busboy middleware in express to parse out images in a multipart/form-data request and it works pretty nice.
My code looks something like:
const busboy = require('connect-busboy');
//...
app.use(busboy());
app.use(function parseUploadMW(req,res,next){
req.busboy.on('file', function onFile(fieldname, file, filename, encoding, mimetype) {
file.fileRead = [];
file.on('data', function onData(chunk) {
this.fileRead.push(chunk);
});
file.on('error', function onError(err) {
console.log('Error while buffering the stream: ', err);
//handle error
});
file.on('end', function onEnd() {
var finalBuffer = Buffer.concat(this.fileRead);
req.files = req.files||{}
req.files[fieldname] = {
buffer: finalBuffer,
size: finalBuffer.length,
filename: filename,
mimetype: mimetype.toLowerCase()
};
});
});
req.busboy.on('finish', function onFinish() {
next()
});
req.pipe(req.busboy);
})
Then files will be in the req object for you at req.files in your express routes.
This technique works fine for small images. If you are doing some hardcore uploading, you may want to consider streaming the files (to save memory) to their destination - like s3 or similar - which can also be achieved with busboy
Another package that is popular and also decent is: https://github.com/andrewrk/node-multiparty.
I think is better use formidable to handle incoming images.
I am search a way in nodejs to convert an svg to png with the help of imagemagick https://github.com/rsms/node-imagemagick, without storing the resulting png in a temp file on the local filesystem.
Unfortunately, I am unable to do this. And I didn't find example in the internet. Can someone give me an example?
var im = require('imagemagick');
var fs = require('fs');
im.convert(['foo.svg', 'png:-'],
function(err, stdout){
if (err) throw err;
//stdout is your image
//just write it to file to test this:
fs.writeFileSync('test.png', stdout,'binary');
});
It just throws the 'raw' arguments to the command line convert, so for any more questions, just look at convert's docs.
oI found what I am looking for. Basically, I figured out how to pipe data into the std::in of the convert execution. This makes it possible for me to convert images without accessing the local file system.
Here is my demo code:
var im = require('imagemagick');
var fs = require('fs');
var svg = fs.readFileSync('/somepath/svg.svg', 'utf8');
var conv = im.convert(['svg:-', 'png:-'])
conv.on('data', function(data) {
console.log('data');
console.log(data);
});
conv.on('end', function() {
console.log('end');
});
conv.stdin.write(svg);
conv.stdin.end();
you can also use streams and pipe the result somewhere without storing the result as a temp file. Below is some sample code take from the github repo
var fs = require('fs');
im.resize({
srcData: fs.readFileSync('kittens.jpg', 'binary'),
width: 256,
format: 'png'
}, function(err, stdout, stderr){
if (err) throw err
fs.writeFileSync('kittens-resized.png', stdout, 'binary'); // change this part
console.log('resized kittens.jpg to fit within 256x256px')
});
btw: your acceptance rate is 0%
You can also use svgexport (I'm its author):
var svgexport = require('svgexport');
svgexport.render({input: 'file.svg', output: 'file.png'}, callback);
Using nodejs and imagemagick am able to re-size an image and send it to the browser with this.
var http = require('http'),
spawn = require('child_process').spawn;
http.createServer(function(req, res) {
var image = 'test.jpg';
var convert = spawn('convert', [image, '-resize', '100x100', '-']);
convert.stdout.pipe(res);
convert.stderr.pipe(process.stderr);
}).listen(8080);
The test image is read from the file-system, I want to alter so that test image is a binary string.
var image = 'some long binray string representing an image.......';
My plan is to store the binary strings in Mongodb and read them of dynamically.
Take a look at the node module node-imagemagick. There is the following example on the module's page to resize and image and write it to a file...
var fs = require('fs');
im.resize({
srcData: fs.readFileSync('kittens.jpg', 'binary'),
width: 256
}, function(err, stdout, stderr){
if (err) throw err
fs.writeFileSync('kittens-resized.jpg', stdout, 'binary');
console.log('resized kittens.jpg to fit within 256x256px')
});
You can alter this code to do the following...
var mime = require('mime') // Get mime type based on file extension. use "npm install mime"
, fs = require('fs')
, util = require('util')
, http = require('http')
, im = require('imagemagick');
http.createServer(function (req, res) {
var filePath = 'test.jpg';
fs.stat(filePath, function (err, stat) {
if (err) { throw err; }
fs.readFile(filePath, 'binary', function (err, data) {
if (err) { throw err; }
im.resize({
srcData: data,
width: 256
}, function (err, stdout, stderr) {
if (err) { throw err; }
res.writeHead(200, {
'Content-Type': mime.lookup(filePath),
'Content-Length': stat.size
});
var readStream = fs.createReadStream(filePath);
return util.pump(readStream, res);
});
});
});
}).listen(8080);
Ps. Haven't run the code above yet. Will try do it shortly, but it should give you an idea of how to asynchronously resize and stream a file.
Since you are using spawn() to invoke the ImageMagick command line convert, the normal approach is to write intermediate files to a temp directory where they will get cleaned up either immediately after use or as a scheduled/cron job.
If you want to avoid writing the file to convert, one option to try is base64 encoding your images and using the inline format. This is similar to how images are encoded in some HTML emails or web pages.
inline:{base64_file|data:base64_data}
Inline images let you read an image defined in a special base64 encoding.
NOTE: There is a limit on the size of command-line options you can pass .. Imagemagick docs suggest 5000 bytes. Base64-encoded strings are larger than the original (Wikipedia suggests a rough guide of 137% larger) which could be very limiting unless you're showing thumbnails.
Another ImageMagick format option is ephemeral:
ephemeral:{image_file}
Read and then Delete this image file.
If you want to avoid the I/O passing altogether, you would need a Node.js module that directly integrates a low-level library like ImageMagick or GD rather than wrapping command line tools.
What have you tried so far? You can use GridFS to store the image data and retrieve as a stream from there.. This in C#..Not sure if this helps..
public static void UploadPhoto(string name)
{
var server = MongoServer.Create("mongodb://localhost:27017");
var database = server.GetDatabase("MyDB");
string fileName = name;
using (var fs = new FileStream(fileName, FileMode.Open))
{
var gridFsInfo = database.GridFS.Upload(fs, fileName);
var fileId = gridFsInfo.Id;
//ShowPhoto(filename);
}
}
public static Stream ShowPhoto(string name)
{
var server = MongoServer.Create("mongodb://localhost:27017");
var database = server.GetDatabase("MyDB");
var file = database.GridFS.FindOne(Query.EQ("filename",name));
var stream = file.OpenRead())
var bytes = new byte[stream.Length];
stream.Read(bytes,0,(int)stream.Length);
return stream;
}
You can now use the stream returned by ShowPhoto.