node.js - Archiving folder using archiver generate an empty zip - node.js

I am trying to archive a folder using archiver, the path of the folder that i'd like to archive look like this :
Project
| app.js
| tmp
|
folderToArchive
│file1.txt
│file2.txt
│file3.txt
my server side code where the zip file will be generated look like this :
var archiver = require("archiver");
app.get("/download/:folder", (req, res) => {
var FolderName = req.params.folder;
var zipName = FolderName + ".zip";
var source = path.join(__dirname, "tmp", FolderName);
var out = path.join(__dirname, "tmp", zipName);
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(out);
return new Promise((resolve, reject) => {
archive
.directory(source, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
console.log("zip file created");
});
});
The issue is when i run the app it will create an empty zip file in the right destination.
This is totally new for me and I'd like to understand why the zip file is empty ?
Regards

It could likely be the same reason that I had an empty archive, which is the main process ending before the archive get to write into zip.
Hence it should be wrapped in a promise, something like this:
async function zipDirectory(source, dest) {
const stream = fs.createWriteStream(dest);
const archive = archiver('zip', { zlib: { level: 9 } });
archive.on('error', function(err) {
throw err;
});
await new Promise((resolve, reject) => {
archive.pipe(stream);
archive.directory(source, false);
archive.on('error', err => {throw err;});
archive.finalize();
stream
.on('close', function() {
console.log(`zipped ${archive.pointer()} total bytes.`);
resolve();
});
});
}

Tested the following code and it works:
const path = require('path');
const archiver = require('archiver');
const fs = require('fs');
const FolderName = "fol";
const zipName = FolderName + ".zip";
const source = path.join(__dirname, "tmp", FolderName);
const out = path.join(__dirname, "tmp", zipName);
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(out);
archive
.directory(source, false)
.on('error', err => {throw err;})
.pipe(stream);
stream.on('close', () => console.log("closed"));
archive.finalize();
console.log("zip file created");
So I suggest:
const path = require('path');
const archiver = require('archiver');
const fs = require('fs');
const archive = (folderName) => {
const zipName = folderName + ".zip";
const source = path.join(__dirname, "tmp", folderName);
const out = path.join(__dirname, "tmp", zipName);
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(out);
archive
.directory(source, false)
.on('error', err => {throw err;})
.pipe(stream);
stream.on('close', () => console.log("closed"));
archive.finalize();
console.log("zip file created");
};
app.get("/download/:folder", (req, res) => {
const folderName = req.params.folder;
archive(folderName);
});

Related

How can I use Readable() constructor to read from a text file and write it in a Writable() constructor?

I am trying to read from a text file using new Readable() constructor,but I don't know how to make the code receive it as a file path. I would like the result to be displayed in the console using new Writable() constructor.
const fs = require('fs');
const {Readable} = require('stream');
const {Writable} = require('stream');
const userData = __dirname + '/files/data.txt';
const rStream = new Readable({
read() {}
});
const wStream = new Writable({
write(chunk, encoding, callback) {
console.log("Readable data: ", chunk.toString());
callback();
}
});
rStream.pipe(wStream);
rStream.push(userData);
rStream.on('close', () =>
wStream.end());
wStream.on('close', () =>
console.log('No more data in file...')
);
rStream.on('error', (err) =>
console.log('Readable error!', err.message)
);
wStream.on('error', (err) =>
console.log('Writable error!', err.message)
);
rStream.destroy();

Upload multiple files with busboy?

I can upload one file, but I can't seem to upload multiple, it gets stuck on the first file.
Client
async function uploadFiles() {
let formData = new FormData();
formData.append("recordUid", recordUid);
formData.append("fieldUid", fieldUid);
[...pendingUploadFiles].forEach((file) => {
formData.append("uploaded_files", file);
});
await fetchPostFiles("/api/files", formData);
}
Server
const busboy = require("busboy");
const path = require("path");
const fs = require("fs");
router.post("/api/files", async (req, res, next) => {
try {
let bb = busboy({
headers: req.headers,
limits: {
fileSize: 10 * 1024 * 1024, // 10 mb
},
});
bb.on("file", (fieldname, file, filename, encoding, mimetype) => {
let parts = filename.filename.split(".");
let name = parts[0];
let extension = parts[parts.length - 1]; // without the . from .jpeg
let finalName = `${name}-${+new Date()}.${extension}`;
let saveTo = `${filesFolderPath}${finalName}`;
// Open writeable stream to path
let writeStream = fs.createWriteStream(saveTo);
// Pipe the file to the opened stream
file.pipe(writeStream);
// Check for errors
writeStream.on("error", (err) => {
console.log(err);
});
});
bb.on("finish", function () {
console.log("uploaded");
});
return req.pipe(bb);
});

read files and read directory in node.js

I want to know whtether the read file and read directory functions - fs.readdir(path, callback) and fs.readFile(path, options, callback) have similar functions without callback.Here, I first read all the files in given directory, and loop through all the files and upload their content to S3 bucket.
Please see the working code below.
const s3Upload = async (req, res) => {
const directoryName = "MAXIS_GAMING/Daily/"
var data = {}
let files = {}
await readFiles1(directoryName)
}
const readFiles1 = async(dirname) => {
let _files
fs.readdir(dirname, (err, files) => {
// On error, show it and return
if(err) return console.error(err);
// files is an array containing the names of all entries
// in the directory, excluding '.' (the directory itself)
// and '..' (the parent directory).
// Display directory entries
console.log(files.join(' '));
files.forEach(function(filename){
fs.readFile(dirname + filename, 'utf-8', function(err, content){
if(err) {
// onError(err);
throw err
return;
}
console.log('cont..............................',content)
console.log('filename', filename)
//await
uploadFiles(filename, content)
//onFileContent(filename, content);
})
})
})
}
const uploadFiles = async (fileName, fileContent) => {
console.log('in uploadd..........')
const GLOBAL_ACCESS_KEY_ID = 'AKIDAQWZX6B3XUBDIFHLPC5LYFTJF15XPIQ';
const GLOBAL_SECRET_ACCESS_KEY = 'Sv4Fe4h4QgErG5XoZbgeC63oczkdW3bMQfC0jvyR8bPbJ9Y97k+'
const GLOBAL_DEFAULT_REGION = 'ap-southeast-1';
const S3_IMAGE_BUCKET ='max-stg-image/stage/reports'//"max-stg-image";
const S3_IMAGE_PATH = "stage";
AWS.config.update({
accessKeyId: GLOBAL_ACCESS_KEY_ID,
secretAccessKey: GLOBAL_SECRET_ACCESS_KEY,
region: GLOBAL_DEFAULT_REGION,
});
const s3 = new AWS.S3()
const bucket = new AWS.S3()
const params = {
Bucket: S3_IMAGE_BUCKET,
Key: fileName,
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
}
app.get('/home/s3Upload', s3Upload)
You can do something like this:
import { readdir } from 'fs/promises';
//or with require
const readdir = require('fs/promises').readdir;
try {
const files = await readdir(path);
for (const file of files)
console.log(file);
} catch (err) {
console.error(err);
}
Check here all the promise API provided by FS in Node.js.

Uploads multiples images with multiples files field

I'm having hard time for the last few day on how to upload multiples images on firebase with busboy.
I want to use 3 fields with 3 different images. so I can store it in one folder.
I also want the image to have the field name
I found one topic that helped me use Promise.all and forEach but it didn't worked out for me
storing all files in an array
var Promise = require('promise');
const Busboy = require("busboy");
const fs = require("fs");
const os = require("os");
const path = require("path");
const busboy = new Busboy({ headers: req.headers });
let imageToAdd = {};
let imagesToUpload = []
let newFileName;
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const imageExtension = filename.split('.')[filename.split('.').length - 1];
filename = `${fieldname}.${imageExtension} `
newFileName = filename
const filepath = path.join(os.tmpdir(), filename);
imageToAdd = { file: filepath, type: mimetype };
file.pipe(fs.createWriteStream(filepath));
imagesToUpload = [...imagesToUpload, imageToAdd]
});
loop over the files array and store the promises in a new array
then wait all the promise to resolve with Promise.all
busboy.on("finish", () => {
let promises = []
imagesToUpload.forEach((imageToBeUploaded) => {
promises.push(
admin
.storage()
.bucket(`${config.storageBucket}`)
.upload(imageToBeUploaded.file, {
resumable: false,
destination: `projectname/${newFileName}`,
metadata: {
metadata: {
contentType: imageToBeUploaded.type,
}
}
})
)
})
Promise.all(promises)
.then(res => {
res.status(200).json({msg: 'Successfully uploaded all images')
})
.catch(err => {
res.status(500).json({error: err.code})
})
})
busboy.end(req.rawBody);
})
Only the last image is stored in my firebase storage.
Is someone can help me with this ?
thanks
You only have one newFileName in your code, while you have an array of imagesToUpload. So you're uploading each of those images to the same newFileName and end up with whichever of the uploads completes last.
You'll want to keep an array of newFileNames, to match up with the array of imagesToUpload.
Something like:
const busboy = new Busboy({ headers: req.headers });
let imageToAdd = {};
let imagesToUpload = []
let newFileNames = [];
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const imageExtension = filename.split('.')[filename.split('.').length - 1];
filename = `${fieldname}.${imageExtension} `
const filepath = path.join(os.tmpdir(), filename);
imageToAdd = { file: filepath, type: mimetype };
file.pipe(fs.createWriteStream(filepath));
imagesToUpload.push(imageToAdd);
newFileNames.push(filename);
});
...
busboy.on("finish", () => {
let promises = imagesToUpload.map((imageToBeUploaded, index) => {
admin
.storage()
.bucket(`${config.storageBucket}`)
.upload(imageToBeUploaded.file, {
resumable: false,
destination: `projectname/${newFileNames[index]}`,
metadata: {
metadata: {
contentType: imageToBeUploaded.type,
}
}
})
})
Promise.all(promises)
...

Streaming image data from node server results in corrupted file (gridfs-stream)

I decided to post this after extensive searching here (1, 2, 3 ) and here (1, 2) and many, many other related posts. I am loosing hope, but will not give up that easily :)
I'm using multer to upload a PNG image to mongo database:
const storage = new GridFsStorage({
url: 'mongodb://my_database:thisIsfake#hostName/my_database',
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => { // generating unique names to avoid duplicates
if (err) {
return reject(err);
}
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: 'media',
metadata : {
clientId : req.body.client_id // added metadata to have a reference to the client to whom the image belongs
}
};
resolve(fileInfo);
});
});
}
});
const upload = multer({storage}).single('image');
Then I create a stream and pipe it to response:
loader: function (req, res) {
var conn = mongoose.createConnection('mongodb://my_database:thisIsfake#hostName/my_database');
conn.once('open', function () {
var gfs = Grid(conn.db, mongoose.mongo);
gfs.collection('media');
gfs.files.find({ metadata : {clientId : req.body.id}}).toArray(
(err, files) => {
if (err) throw err;
if (files) {
const readStream = gfs.createReadStream(files[0].filename); //testing only with the first file in the array
console.log(readStream);
res.set('Content-Type', files[0].contentType)
readStream.pipe(res);
}
});
});
}
Postman POST request to end point results in response body being displayed as an image file:
In the front end I pass the response in a File object, read it and save the result in a src attribute of img:
findAfile(){
let Data = {
id: this.$store.state.StorePatient._id,
};
console.log(this.$store.state.StorePatient._id);
visitAxios.post('http://localhost:3000/client/visits/findfile', Data )
.then(res => {
const reader = new FileReader();
let file = new File([res.data],"image.png", {type: "image/png"});
console.log('this is file: ',file);
reader.readAsDataURL(file); // encode a string
reader.onload = function() {
const img = new Image();
img.src = reader.result;
document.getElementById('imgContainer').appendChild(img);
};
})
.catch( err => console.error(err));
}
My File object is similar to the one I get when using input field only bigger:
This is original file:
When inspecting element I see this:
Looks like data URI is where it should be, but it's different from the original image on file input:
Again, when I want to display it through input element:
onFileSelected(event){
this.file = event.target.files[0];
this.fileName = event.target.files[0].name;
const reader = new FileReader();
console.log(this.file);
reader.onload = function() {
const img = new Image();
img.src = reader.result;
document.getElementById('imageContainer').appendChild(img);
};
reader.readAsDataURL(this.file);
}
I get this:
But when reading it from the response, it is corrupted:
Postman gets it right, so there must be something wrong with my front-end code, right? How do I pass this gfs stream to my html?
I managed to make a POST request to fetch an image from MongoDB and save it in the server dir:
const readStream = gfs.createReadStream(files[0].filename);
const wstream = fs.createWriteStream(path.join(__dirname,"uploads", "fileToGet.jpg"));
readStream.pipe(wstream);
Then, I just made a simple GET request by adding an absolute path to the and finally delete the file after successful response:
app.get('/image', function (req, res) {
var file = path.join(dir, 'fileToGet.jpg');
if (file.indexOf(dir + path.sep) !== 0) {
return res.status(403).end('Forbidden');
}
var type = mime[path.extname(file).slice(1)] || 'text/plain';
var s = fs.createReadStream(file);
s.on('open', function () {
res.set('Content-Type', type);
s.pipe(res);
});
s.on('end', function () {
fs.unlink(file, ()=>{
console.log("file deleted");
})
});
s.on('error', function () {
res.set('Content-Type', 'text/plain');
res.status(404).end('Not found');
});

Resources