Check uploaded file extension in Sails js - node.js

How we can check uploaded file extension in sails js?
I tried on skipper and multer but have no result.
any suggestion?

You should use saveAs options for each file before saving.
var md5 = require('md5');
module.exports = {
testUpload:function(req,res){
// setting allowed file types
var allowedTypes = ['image/jpeg', 'image/png'];
// skipper default upload directory .tmp/uploads/
var allowedDir = "../../assets/images";
// don not define dirname , use default path
req.file("uploadFiles").upload({
saveAs:function(file, cb) {
var d = new Date();
var extension = file.filename.split('.').pop();
// generating unique filename with extension
var uuid=md5(d.getMilliseconds())+"."+ extension;
// seperate allowed and disallowed file types
if(allowedTypes.indexOf(file.headers['content-type']) === -1) {
// save as disallowed files default upload path
cb(null,uuid);
}else{
// save as allowed files
cb(null,allowedDir+"/"+uuid);
}
}
},function whenDone(err,files){
return res.json({
files:files,
err:err
});
});
}
}

Just get uploaded files array and check last chunk of string after dot.
req.file('file').upload({
maxBytes: 2000000,
dirname: 'uploadFolder'
}, function (error, files) {
if (error) return sails.log.error(error);
// You have files array, so you can do this
files[0].fd.split('.').pop(); // You get extension
}
What is going on here? When upload is finished you will get array of files with their filenames. You can get data from that array and see where this file is located (full path).
The last thing is splitting string by dots and get last item from the array with pop() method.

Related

Bulk Renaming Files Based on JSON in Node Recurisvely

I'm wanting to bulk rename files within a folder based on a JSON file that I have with the following format:
{
"1": {
"Filename": "Background-1",
"New Filename": "Background-1#4"
},
"2": {
"Filename": "Background-2",
"New Filename": "Background-2#6"
},
The original Filenames are within a folder structure such as
Background
--Background-1
--Background-2
Other Folder
--Another-Filename
--Another-Filename-2
And so on and so forth. I want to copy the files with the new names, while retaining the name of the folder they're in, over to a new folder.
So far I've tried using fs and klaw-sync to read the filenames, traverse through directories, etc, but it seems wildly inefficient to run through each key and then run through each folder recurisvely to find a matching file, then rename it and copy. There's over 180 files and ~15 folders.
Any idea how I can approach this better, or any suggestions/examples I could use?
Here's what I've got so far.
Thanks.
// Require Node's File System module
const fs = require('fs');
var path = require('path');
var klawSync = require('klaw-sync');
// Read the JSON file
fs.readFile(__dirname + '/rename_config.json', function (error, data) {
if (error) {
console.log(error);
return;
}
const obj = JSON.parse(data);
// Iterate over the object
Object.keys(obj).forEach(key => {
// Create an empty variable to be accesible in the closure
var paths;
// The directory that you want to explore
var directoryToExplore = path.join(__dirname, '../art');
try {
paths = klawSync(directoryToExplore);
} catch (err) {
console.error(err);
}
//console.log(paths);
//traverse through paths to find an equal name
//find the path of that equivalent name, then rename to new directory
});
});

Adding files in directory to an array

I am really new to node.js. I need to read .json files from a directory and then add them to an array and return it. I am able to read each file separately by passing the address:
const fs = require("fs");
fs.readFile("./fashion/customer.json", "utf8", (err, jsonString) => {
if (err) {
console.log("Error reading file from disk:", err);
return;
}
try {
const customer = JSON.parse(jsonString);
console.log("Customer address is:", customer.address); // => "Customer address is: Infinity Loop Drive"
} catch (err) {
console.log("Error parsing JSON string:", err);
}
});
But the same fashion folder has multiple json files. I want to add these files to an array and then return it. I tried using readdirSync but that just returned the file names. Is it possible to add json files to an array and return it?
Basically I require an array of this format:
Array[{contents of json file1}, {contents of json file2}, .....]
Any help is appreciated!
Here is a simple solution to your question:
const fs = require("fs");
const jsonFolder = './fashion'
var customerDataArray = []
fs.readdirSync(jsonFolder).forEach(file => {
let fileData = JSON.parse(fs.readFileSync(jsonFolder+'/'+file))
customerDataArray.push(fileData)
});
console.log(customerDataArray)
readdirSync returns an array with all the file names or objects in the directory. You can use forEach to iterate through every item in the array, which will be the file names in this scenario. To read the contents of each file, use readFileSync and specify the path to the file as the name of the directory plus the name of the file. The data is returned as a buffer and needs to be parsed using JSON.parse(), and then it is pushed to the customerDataArray.
I hope this answers your question!

How to use imagemin to compress images and busboy

I want to use the following library to compress images
https://github.com/imagemin/imagemin
The problem is when the user uploads using a form, how do I plug in the file details from the form to the image min plugin? Like for example, if the file form field is call example-image, how do I plug that file form field to image min plugin so that it can compress the images?
I tried:
req is from the express/nodejs req
var filebus = new Busboy({ headers: req.headers }),
promises = [];
filebus.on('file', function(fieldname, file, filename, encoding, contentType) {
var fileBuffer = new Buffer(0),
s3ImgPath;
if (file) {
file.on('data', function (d) {
fileBuffer = Buffer.concat([fileBuffer, d]);
}).on('end', function () {
Imagemin.buffer(fileBuffer, {
plugins: [
imageminMozjpeg(),
imageminPngquant({quality: '80'})
]
}).then(function (data) {
console.log(data[0]);
if (s3ImgPath) {
promises.push(this.pushImageToS3(s3ImgPath, data[0].data, contentType));
}
});
}.bind(this));
}
});
But the problem is I rather have a buffer of the file that I can upload to S3. I don't want to come the files to a build/images folder. I want to get a buffer for the file, compress it, and upload that buffer to s3. How can I use image min to get a buffer of the file upload via html form and upload that to s3?
The documentation for the output parameter shows that it is optional (though admittedly, the function declaration did not, which might be confusing).
output
Type: string
Set the destination folder to where your files will be written. If no
destination is specified no files will be written.
Therefore, you can opt out of writing the files to storage and just use the output in memory:
imagemin([file.path], {
plugins: [
imageminMozjpeg(),
imageminPngquant({quality: '65-80'})
]
}).then(files => {
// upload file to S3
});

Create an archive and add files in it before sending to client

I'd like to create an archive with archiver and put some files in it. Client's side, an user will click on a button and it'll generate the archive (server's side).
I'm using Express.js, this is my server side code where the archive will be generated. I did something like this :
app.get('/export/siteoccupancy', function(req,res){
if(_.isEmpty(req.query)){
res.status(404).send('requires a start and end date');
}else{
//getting paramas
var sDate = req.query.startDate;
var eDate = req.query.endDate;
}
var fs = require('fs');
var archiver = require('archiver');
var archive = archiver('zip');
archive.on('err',function(err){
res.status(500).send({error : err.message});
});
res.on('close',function(){
console.log('Archive size : %d b',archive.pointer());
return res.status(200).send('OK').end();
});
res.attachment('data-export.zip');
archive.pipe(res);
var stream = fs.createWriteStream("data-report.txt')");
stream.once('open',function(fd) {
stream.write('test1');
stream.write('\n test2');
stream.write('\n test3');
stream.end();
});
archive.append(stream);
archive.finalize();
});
This is totally new for me and I'd like to understand why the console tells me the stream file is empty ?
Error: append: entry name must be a non-empty string value
at Archiver.append
Regards
you can only append a readStream, because an archive can only take data from readstreams. You can use the method named archive.append, and you should pass as a second argument with the name property to name the file. Like so :
archive.append(myReadStream,{ name : 'myTest.txt'});

Meteor/Node writeFile crashes server

I have the following code:
Meteor.methods({
saveFile: function(blob, name, path, encoding) {
var path = cleanPath(path), fs = __meteor_bootstrap__.require('fs'),
name = cleanName(name || 'file'), encoding = encoding || 'binary',
chroot = Meteor.chroot || 'public';
// Clean up the path. Remove any initial and final '/' -we prefix them-,
// any sort of attempt to go to the parent directory '..' and any empty directories in
// between '/////' - which may happen after removing '..'
path = chroot + (path ? '/' + path + '/' : '/');
// TODO Add file existance checks, etc...
fs.writeFile(path + name, blob, encoding, function(err) {
if (err) {
throw (new Meteor.Error(500, 'Failed to save file.', err));
} else {
console.log('The file ' + name + ' (' + encoding + ') was saved to ' + path);
}
});
function cleanPath(str) {
if (str) {
return str.replace(/\.\./g,'').replace(/\/+/g,'').
replace(/^\/+/,'').replace(/\/+$/,'');
}
}
function cleanName(str) {
return str.replace(/\.\./g,'').replace(/\//g,'');
}
}
});
Which I took from this project
https://gist.github.com/dariocravero/3922137
The code works fine, and it saves the file, however it repeats the call several time and each time it causes meteor to reset using windows version 0.5.4. The F12 console ends up looking like this: . The meteor console loops over the startup code each time the 503 happens and repeats the console logs in the saveFile function.
Furthermore in the target directory the image thumbnail keeps displaying and then display as broken, then a valid thumbnail again, as if the fs is writing it multiple times.
Here is the code that calls the function:
"click .savePhoto":function(e, template){
e.preventDefault();
var MAX_WIDTH = 400;
var MAX_HEIGHT = 300;
var id = e.srcElement.id;
var item = Session.get("employeeItem");
var file = template.find('input[name='+id+']').files[0];
// $(template).append("Loading...");
var dataURL = '/.bgimages/'+file.name;
Meteor.saveFile(file, file.name, "/.bgimages/", function(){
if(id=="goodPhoto"){
EmployeeCollection.update(item._id, { $set: { good_photo: dataURL }});
}else{
EmployeeCollection.update(item._id, { $set: { bad_photo: dataURL }});
}
// Update an image on the page with the data
$(template.find('img.'+id)).delay(1000).attr('src', dataURL);
});
},
What's causing the server to reset?
My guess would be that since Meteor has a built-in "automatic directories scanning in search for file changes", in order to implement auto relaunching of the application to newest code-base, the file you are creating is actually causing the server reset.
Meteor doesn't scan directories beginning with a dot (so called "hidden" directories) such as .git for example, so you could use this behaviour to your advantage by setting the path of your files to a .directory of your own.
You should also consider using writeFileSync insofar as Meteor methods are intended to run synchronously (inside node fibers) contrary to the usual node way of asynchronous calls, in this code it's no big deal but for example you couldn't use any Meteor mechanics inside the writeFile callback.
asynchronousCall(function(error,result){
if(error){
// handle error
}
else{
// do something with result
Collection.update(id,result);// error ! Meteor code must run inside fiber
}
});
var result=synchronousCall();
Collection.update(id,result);// good to go !
Of course there is a way to turn any asynchronous call inside a synchronous one using fibers/future, but that's beyond the point of this question : I recommend reading this EventedMind episode on node future to understand this specific area.

Resources