Adding files to multiple folders in s3 - node.js

I want to upload files which is present in tempFolder to 2 different buckets of s3.My code works as expected to upload multiple files to foldername1.
tempFolder has 8 files. i have upload all 8 to foldername1 and only last 2 files to my foldername2 in s3.
how can i add them.
function foo() {
let tempFolder = // folder path
return fs.readdirSync(tempFolder ).map((file) => {
let path = path.join(tempFolder , file);
let awsPath1 = // s3 bucket foldername1
let awsPath2 = // s3 bucket foldername2
let total = fs.statSync(path);
return s3Upload(awsPath1, () => fs.createReadStream(path), total.size)
.then(
() => {
s3Uploaded.push("https://aws.amazon.com/s3/bucketname/" + awsPath);
});
});
}

Related

How to gzip a directory/folder using pako module in Nodejs?

I am trying to gzip my folder with the help of Pako library. I couldn't found any related content about it. Can someone explain me how to use pako to gzip directory. I am using it in my lambda function along with EFS.
let bundle = fs.readdirSync(tempDir);
let zippedFile = pako.gzip(bundle);
My folder location looks like this data/temp/
Error
TypeError: strm.input.subarray is not a function
You can use fs.readdirSync() to list files in the directory, then check the stat of each file before compressing it using fs.lstatSync(). I have tested this locally on my mac.
Then you can write the compressed file to your file system.
const fs = require('fs');
const pako = require('pako');
let files = fs.readdirSync('/tmp/');
let fileContent = '';
files.forEach(file => {
let path = '/tmp/' + file;
let stats = fs.lstatSync(path);
if (stats.isFile()) {
let data = fs.readFileSync(path);
fileContent += data;
}
});
let zippedFile = pako.gzip(fileContent);
const stream = fs.createWriteStream('/tmp/dir.gz');
stream.write(zippedFile);
stream.end();

Load random file from directory

So i'm building a game and i want to choose a random file from a directory as a map. I found this old topic which gave the answer
const randomFile = require('random-file')
const dir = '/tmp/whatever'
randomFile(dir, (err, file) => {
console.log(`The random file is: ${file}.`)
})
but it seems that fs is no longer in use, but fs.realpath
Its pretty simple:
const fs = require("fs");
const path = require("path");
fs.readdir(path.join(process.cwd(), "maps"), (err, files) => {
console.log(err, files)
let max = files.length - 1;
let min = 0;
let index = Math.round(Math.random() * (max - min) + min);
let file = files[index];
console.log("Random file is", file);
});
In less then 20 lines.
Why do people use for every simple task a external module?!
Regardless that the package does not what you want...

Read files from inout dir, apply the regex, write all files to output dir all at once

I have two folders, input and output folder with many text files in the below format. How do I read all the files from the input folder,run the regex and write all the updated files to another output folder?I am using nodejs.
Input: $.Carpool[0].NoofSeats], [$.Carpool[1].NoofSeats]
So after replace with regex the updated text file should be:
Regex: str = str.replace(/\.[A-Z]/g, (m0) => m0.toLowerCase());
Output: [$.carpool[0].noOfSeats], [$.carpool[1].noOfSeats]
So far I got to reading files from the directory:
const fs= require("fs");
let directory = "Input" // Desktop/Input
let files = fs.readdirSync(directory)
console.log(files);
You want to loop through the files, assuming if the contents are a text file in UTF-8 format here is an example.
You use fs.readFile to read a specific file after listing directory.
Then use fs.writeFile to write a new file with contents.
I use /directory/${f} for the new file directory path and ${f} for filename that was opened.
const fs = require("fs");
// Directory
let directory = "/";
// Files
let files = fs.readdirSync(directory);
// Loop through the files
files.forEach(f => {
// Read the contents in UTF-8 format
fs.readFile(f, 'utf8', function(err, contents) {
if (err) { console.log(err); }
// Output Contents
console.log(contents);
// Perform regex here
contents = contents.replace(/\.[A-Z]/g, (m0) => m0.toLowerCase());
// Write new file to path /new, with contents
fs.writeFile(`/directory/${f}`, contents, function(err) {
if (err) {
// Error writing
return console.log(err);
}
console.log("The file was saved!");
});
});
});

Some files missing in zip archive

I am adding pdf files created by wkhtmptopdf to archiver.My purpose is to create a zip file of all the pdf's.I am posting the zip to s3 bucket. BUt in the bucket some files are missing in the zip file. Randomly out of all files some files are empty in the zip.
for (var i = 1; i <= dataCount; i++) {
var output_filename = 'testing' +'_'+i+'.pdf';
const html = "sdfsdfsdf";
var obg= wkhtmltopdf(html, wkhtmltopdfOptions);
var results = [];
archive.append(obg, { name: output_filename });
} // closing the for loop
archive.finalize();

Check uploaded file extension in Sails js

How we can check uploaded file extension in sails js?
I tried on skipper and multer but have no result.
any suggestion?
You should use saveAs options for each file before saving.
var md5 = require('md5');
module.exports = {
testUpload:function(req,res){
// setting allowed file types
var allowedTypes = ['image/jpeg', 'image/png'];
// skipper default upload directory .tmp/uploads/
var allowedDir = "../../assets/images";
// don not define dirname , use default path
req.file("uploadFiles").upload({
saveAs:function(file, cb) {
var d = new Date();
var extension = file.filename.split('.').pop();
// generating unique filename with extension
var uuid=md5(d.getMilliseconds())+"."+ extension;
// seperate allowed and disallowed file types
if(allowedTypes.indexOf(file.headers['content-type']) === -1) {
// save as disallowed files default upload path
cb(null,uuid);
}else{
// save as allowed files
cb(null,allowedDir+"/"+uuid);
}
}
},function whenDone(err,files){
return res.json({
files:files,
err:err
});
});
}
}
Just get uploaded files array and check last chunk of string after dot.
req.file('file').upload({
maxBytes: 2000000,
dirname: 'uploadFolder'
}, function (error, files) {
if (error) return sails.log.error(error);
// You have files array, so you can do this
files[0].fd.split('.').pop(); // You get extension
}
What is going on here? When upload is finished you will get array of files with their filenames. You can get data from that array and see where this file is located (full path).
The last thing is splitting string by dots and get last item from the array with pop() method.

Resources