Upload multiple images on Firebase using NodeJS and Busboy - node.js

I created function for uploading a single image on Firebase using NodeJS and Busboy, which returns image url. Allowed image extensions are only .jpg and .png. It will generate random filename and create filepath with storageBucket.
However, I am struggling to refactor this function, so I could upload multiple images. I tried several attempts, but no luck. It should return array of image urls, if all images were uploaded successfully.
Here is my function with single image upload:
const { admin, db } = require("./admin");
const config = require("./config");
exports.uploadImage = (req, res, url, folder) => {
const BusBoy = require("busboy");
const path = require("path");
const os = require("os");
const fs = require("fs");
const busboy = new BusBoy({ headers: req.headers });
let imageFileName;
let imageToBeUploaded = {};
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
if (mimetype !== "image/jpeg" && mimetype !== "image/png") {
return res
.status(400)
.json({ error: "Wrong file type submitted!" });
}
// Getting extension of any image
const imageExtension = filename.split(".")[
filename.split(".").length - 1
];
// Setting filename
imageFileName = `${Math.round(
Math.random() * 1000000000
)}.${imageExtension}`;
// Creating path
const filepath = path.join(os.tmpdir(), imageFileName);
imageToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on("finish", () => {
admin
.storage()
.bucket()
.upload(imageToBeUploaded.filepath, {
destination: `${folder}/${imageFileName}`,
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype
}
}
})
.then(() => {
const imageUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o${folder}%2F${imageFileName}?alt=media`;
if (url === `/users/${req.user.alias}`) {
return db.doc(`${url}`).update({ imageUrl });
} else {
return res.json({ imageUrl });
}
})
.then(() => {
return res.json({
message: "Image uploaded successfully!"
});
})
.catch(err => {
console.log(err);
return res.status(500).json({ error: err.code });
});
});
busboy.end(req.rawBody);
};
Any suggestions how to move on?

You've the code almost done, all you've got to do is to create an array of promises and wait for all to resolve.
let imageFileName = {}
let imagesToUpload = []
let imageToAdd = {}
//This triggers for each file type that comes in the form data
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
if (mimetype !== "image/jpeg" && mimetype !== "image/png") {
return res
.status(400)
.json({ error: "Wrong file type submitted!" });
}
// Getting extension of any image
const imageExtension = filename.split(".")[
filename.split(".").length - 1
];
// Setting filename
imageFileName = `${Math.round(
Math.random() * 1000000000
)}.${imageExtension}`;
// Creating path
const filepath = path.join(os.tmpdir(), imageFileName);
imageToAdd = {
imageFileName
filepath,
mimetype };
file.pipe(fs.createWriteStream(filepath));
//Add the image to the array
imagesToUpload.push(imageToAdd);
});
busboy.on("finish", () => {
let promises = []
let imageUrls = []
imagesToUpload.forEach(imageToBeUploaded => {
imageUrls.push(`https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o${folder}%2F${imageFileName}?alt=media`)
promises.push(admin
.storage()
.bucket()
.upload(imageToBeUploaded.filepath, {
destination: `${folder}/${imageFileName}`,
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype
}
}
}))
})
try{
await Promises.all(resolve)
res.status(200).json({msg: 'Successfully uploaded all images', imageUrls})
}catch(err){ res.status(500).json(err) }
});
busboy.end(req.rawBody);
With that you should be able to upload them all, it's just a matter of putting all promises inside an array and use the Promise.all method to wait for them to resolve. I made it with async/await because that's how I've been doing it but I suppose you would have no problem in doing it with the callbacks.
Also the code is messy but that's mostly because I dont know how to use this text editor, I hope you can still understand it 👀

Samuel Vera's answer is almost correct. There are some typos and a logic error when push to imageUrls array.
Here, the complete code fixed:
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
let fields = {};
const busboy = new BusBoy({ headers: request.headers });
let imageFileName = {};
let imagesToUpload = [];
let imageToAdd = {};
let imageUrls = [];
busboy.on('field', (fieldname, fieldvalue) => {
fields[fieldname] = fieldvalue;
});
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
if (mimetype !== 'image/jpeg' && mimetype !== 'image/png') {
return res
.status(400)
.json({ error: 'Wrong file type submitted!' });
}
// Getting extension of any image
const imageExtension = filename.split('.')[
filename.split('.').length - 1
];
// Setting filename
imageFileName = `${Math.round(Math.random() * 1000000000)}.${imageExtension}`;
// Creating path
const filepath = path.join(os.tmpdir(), imageFileName);
imageToAdd = {
imageFileName,
filepath,
mimetype,
};
file.pipe(fs.createWriteStream(filepath));
//Add the image to the array
imagesToUpload.push(imageToAdd);
});
busboy.on('finish', async () => {
let promises = [];
imagesToUpload.forEach((imageToBeUploaded) => {
imageUrls.push(
`https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${imageToBeUploaded.imageFileName}?alt=media`
);
promises.push(
admin
.storage()
.bucket()
.upload(imageToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype,
},
},
})
);
});
try {
await Promise.all(promises);
return response.json({
message: `Images URL: ${imageUrls}`,
});
} catch (err) {
console.log(err);
response.status(500).json(err);
}
});
busboy.end(request.rawBody);
Anyway, thank you Samuel :)

Related

Get progress of firebase admin file upload

I'm trying to get the progress of a 1 minute video uploading to firebase bucket storage using the admin sdk. I've seen a lot about using firebase.storage().ref.child..... but I'm unable to do that with the admin sdk since they don't have the same functions. This is my file upload:
exports.uploadMedia = (req, res) => {
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
const busboy = new BusBoy({ headers: req.headers, limits: { files: 1, fileSize: 200000000 } });
let mediaFileName;
let mediaToBeUploaded = {};
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
if(mimetype !== 'image/jpeg' && mimetype !== 'image/png' && mimetype !== 'video/quicktime' && mimetype !== 'video/mp4') {
console.log(mimetype);
return res.status(400).json({ error: 'Wrong file type submitted, only .png, .jpeg, .mov, and .mp4 files allowed'})
}
// my.image.png
const imageExtension = filename.split('.')[filename.split('.').length - 1];
//43523451452345231234.png
mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const filepath = path.join(os.tmpdir(), mediaFileName);
mediaToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
file.on('limit', function(){
fs.unlink(filepath, function(){
return res.json({'Error': 'Max file size is 200 Mb, file size too large'});
});
});
});
busboy.on('finish', () => {
admin
.storage()
.bucket()
.upload(mediaToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: mediaToBeUploaded.mimetype
}
}
})
.then(() => {
const meadiaUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${mediaFileName}?alt=media`;
return res.json({mediaUrl: meadiaUrl});
})
.catch((err) => {
console.error(err);
return res.json({'Error': 'Error uploading media'});
});
});
req.pipe(busboy);
}
This works okay right now, but the only problem is that the user can't see where their 1 or 2 minute video upload is at. Currently, it's just a activity indicator and the user just sits their waiting without any notice. I'm using react native on the frontend if that helps with anything. Would appreciate any help!
I was able to implement on the client side a lot easier... but it works perfect with image and video upload progress. On the backend, I was using the admin sdk, but frontend I was originally using the firebase sdk.
this.uploadingMedia = true;
const imageExtension = this.mediaFile.split('.')[this.mediaFile.split('.').length - 1];
const mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const response = await fetch(this.mediaFile);
const blob = await response.blob();
const storageRef = storage.ref(`${mediaFileName}`).put(blob);
storageRef.on(`state_changed`,snapshot=>{
this.uploadProgress = (snapshot.bytesTransferred/snapshot.totalBytes);
}, error=>{
this.error = error.message;
this.submitting = false;
this.uploadingMedia = false;
return;
},
async () => {
storageRef.snapshot.ref.getDownloadURL().then(async (url)=>{
imageUrl = [];
videoUrl = [url];
this.uploadingMedia = false;
this.submitPost(imageUrl, videoUrl);
});
});
export const uploadFile = (
folderPath,
fileName,
file,
generateDownloadURL = true,
updateInformationUploadProgress
) => {
return new Promise((resolve, reject) => {
try {
const storageRef = firebaseApp.storage().ref(`${folderPath}/${fileName}`)
const uploadTask = storageRef.put(file)
uploadTask.on(
'state_changed',
snapshot => {
if (updateInformationUploadProgress) {
const progress =
(snapshot.bytesTransferred / snapshot.totalBytes) * 100
updateInformationUploadProgress({
name: fileName,
progress: progress,
})
}
},
error => {
console.log('upload error: ', error)
reject(error)
},
() => {
if (generateDownloadURL) {
uploadTask.snapshot.ref
.getDownloadURL()
.then(url => {
resolve(url)
})
.catch(error => {
console.log('url error: ', error.message)
reject(error)
})
} else {
resolve(uploadTask.snapshot.metadata.fullPath)
}
}
)
} catch (error) {
reject(error)
}
})
}

Uploads multiples images with multiples files field

I'm having hard time for the last few day on how to upload multiples images on firebase with busboy.
I want to use 3 fields with 3 different images. so I can store it in one folder.
I also want the image to have the field name
I found one topic that helped me use Promise.all and forEach but it didn't worked out for me
storing all files in an array
var Promise = require('promise');
const Busboy = require("busboy");
const fs = require("fs");
const os = require("os");
const path = require("path");
const busboy = new Busboy({ headers: req.headers });
let imageToAdd = {};
let imagesToUpload = []
let newFileName;
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const imageExtension = filename.split('.')[filename.split('.').length - 1];
filename = `${fieldname}.${imageExtension} `
newFileName = filename
const filepath = path.join(os.tmpdir(), filename);
imageToAdd = { file: filepath, type: mimetype };
file.pipe(fs.createWriteStream(filepath));
imagesToUpload = [...imagesToUpload, imageToAdd]
});
loop over the files array and store the promises in a new array
then wait all the promise to resolve with Promise.all
busboy.on("finish", () => {
let promises = []
imagesToUpload.forEach((imageToBeUploaded) => {
promises.push(
admin
.storage()
.bucket(`${config.storageBucket}`)
.upload(imageToBeUploaded.file, {
resumable: false,
destination: `projectname/${newFileName}`,
metadata: {
metadata: {
contentType: imageToBeUploaded.type,
}
}
})
)
})
Promise.all(promises)
.then(res => {
res.status(200).json({msg: 'Successfully uploaded all images')
})
.catch(err => {
res.status(500).json({error: err.code})
})
})
busboy.end(req.rawBody);
})
Only the last image is stored in my firebase storage.
Is someone can help me with this ?
thanks
You only have one newFileName in your code, while you have an array of imagesToUpload. So you're uploading each of those images to the same newFileName and end up with whichever of the uploads completes last.
You'll want to keep an array of newFileNames, to match up with the array of imagesToUpload.
Something like:
const busboy = new Busboy({ headers: req.headers });
let imageToAdd = {};
let imagesToUpload = []
let newFileNames = [];
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const imageExtension = filename.split('.')[filename.split('.').length - 1];
filename = `${fieldname}.${imageExtension} `
const filepath = path.join(os.tmpdir(), filename);
imageToAdd = { file: filepath, type: mimetype };
file.pipe(fs.createWriteStream(filepath));
imagesToUpload.push(imageToAdd);
newFileNames.push(filename);
});
...
busboy.on("finish", () => {
let promises = imagesToUpload.map((imageToBeUploaded, index) => {
admin
.storage()
.bucket(`${config.storageBucket}`)
.upload(imageToBeUploaded.file, {
resumable: false,
destination: `projectname/${newFileNames[index]}`,
metadata: {
metadata: {
contentType: imageToBeUploaded.type,
}
}
})
})
Promise.all(promises)
...

configuring the image file reducer Google Firestore

The following google cloud function properly uploads an image, but I would also like to compress the image as to avoid unnecessary charges due to large files being uploaded. I am using the image reducer extension from Firebase and it works but the issue is that the image file no longer shows up on my user table. is there something i need to configure in the extension so that the image url in the user table is overwritten by the reduced image??
exports.uploadImage = (req, res) => {
const BusBoy = require("busboy")
const path = require("path")
const os = require("os")
const fs = require("fs")
const busboy = new BusBoy({ headers: req.headers })
let imageToBeUploaded = {}
let imageFileName
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
if (mimetype !== `image/jpeg` && mimetype !== `image/png`) {
return res.status(400).json({ error: `Not an acceptable file type` })
}
// my.image.png => ['my', 'image', 'png']
const imageExtension = filename.split(".")[filename.split(".").length - 1]
// 32756238461724837.png
imageFileName = `${Math.round(
Math.random() * 1000000000000
).toString()}.${imageExtension}`
const filepath = path.join(os.tmpdir(), imageFileName)
imageToBeUploaded = { filepath, mimetype }
file.pipe(fs.createWriteStream(filepath))
})
busboy.on("finish", () => {
admin
.storage()
.bucket(config.storageBucket)
.upload(imageToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype
}
}
})
.then(() => {
const imageUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${imageFileName}?alt=media`
return db.doc(`/users/${req.user.uid}`).update({ imageUrl })
})
.then(() => {
return res.json({ message: "image uploaded successfully" })
})
.catch(err => {
console.error(err)
return res.status(500).json({ error: "something went wrong" })
})
})
busboy.end(req.rawBody)
}
The Resize Images extension only handles the resizing of the image in Cloud Storage. It does not update data in any other location, including Cloud Firestore. If you need such functionality, you'll need to create it yourself.
Also see:
this discussion on the extensions open-source repo about allowing to specify a callback that gets invoked after resizing.

How do I upload an image from a URL to Firebase Storage?

I am using Node.JS.
I can't seem to figure it out. I've updated the modules.
Old code:
var mimeType = 'image/jpeg';
var randomString = uuidv4();
var options = {
destination: 'test',
metadata: {
contentType: mimeType,
metadata: {
firebaseStorageDownloadTokens: randomString
}
}
};
return admin.storage().bucket().upload(userPhotoDownloadLink, options).then(()=>{
//
}).catch((err) => {
print(`Error Uploading Image:${err}`);
//
});
This code no longer works. How do I upload via an http link. Google Cloud 2.4.x only shows guides on local file uploads.
EDIT:
var uploadPhoto = async function(public_courier_id,userPhotoDownloadLink){
var mimeType = 'image/jpeg';
var randomString = "testkey123";
var options = {
//destination: 'courierPhotos/' + public_courier_id,
metadata: {
contentType: mimeType,
metadata: {
firebaseStorageDownloadTokens: randomString
}
}
};
const file = admin.storage().bucket().file(`courierPhotos/${public_courier_id}`)
console.log(`Fetching File...`);
return fetch(userPhotoDownloadLink).then(res=>{
console.log(`Successfully Fetched, Piping File...`)
return new Promise((resolve,reject)=>{
res.body.pipe(file.createWriteStream(options))
.on('error', function(err) {
reject(`err: ${err}`)
})
.on('finish', function() {
resolve(`complete`);
});
})
});
}
return uploadPhoto("TESTUID","https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png")
.then(res => console.log(`Printing after function ${res}`))
.catch(e => console.log(e));

Create a zip file on S3 from files on S3 using Lambda Node

I need to create a Zip file that consists of a selection of files (videos and images) located in my s3 bucket.
The problem at the moment using my code below is that I quickly hit the memory limit on Lambda.
async.eachLimit(files, 10, function(file, next) {
var params = {
Bucket: bucket, // bucket name
Key: file.key
};
s3.getObject(params, function(err, data) {
if (err) {
console.log('file', file.key);
console.log('get image files err',err, err.stack); // an error occurred
} else {
console.log('file', file.key);
zip.file(file.key, data.Body);
next();
}
});
},
function(err) {
if (err) {
console.log('err', err);
} else {
console.log('zip', zip);
content = zip.generateNodeStream({
type: 'nodebuffer',
streamFiles:true
});
var params = {
Bucket: bucket, // name of dest bucket
Key: 'zipped/images.zip',
Body: content
};
s3.upload(params, function(err, data) {
if (err) {
console.log('upload zip to s3 err',err, err.stack); // an error occurred
} else {
console.log(data); // successful response
}
});
}
});
Is this possible using Lambda, or should I look at a different
approach?
Is it possible to write to a compressed zip file on the fly, therefore eliminating the memory issue somewhat, or do I need to have the files collected before compression?
Any help would be much appreciated.
Okay, I got to do this today and it works. Direct Buffer to Stream, no disk involved. So memory or disk limitation won't be an issue here:
'use strict';
const AWS = require("aws-sdk");
AWS.config.update( { region: "eu-west-1" } );
const s3 = new AWS.S3( { apiVersion: '2006-03-01'} );
const _archiver = require('archiver');
//This returns us a stream.. consider it as a real pipe sending fluid to S3 bucket.. Don't forget it
const streamTo = (_bucket, _key) => {
var stream = require('stream');
var _pass = new stream.PassThrough();
s3.upload( { Bucket: _bucket, Key: _key, Body: _pass }, (_err, _data) => { /*...Handle Errors Here*/ } );
return _pass;
};
exports.handler = async (_req, _ctx, _cb) => {
var _keys = ['list of your file keys in s3'];
var _list = await Promise.all(_keys.map(_key => new Promise((_resolve, _reject) => {
s3.getObject({Bucket:'bucket-name', Key:_key})
.then(_data => _resolve( { data: _data.Body, name: `${_key.split('/').pop()}` } ));
}
))).catch(_err => { throw new Error(_err) } );
await new Promise((_resolve, _reject) => {
var _myStream = streamTo('bucket-name', 'fileName.zip'); //Now we instantiate that pipe...
var _archive = _archiver('zip');
_archive.on('error', err => { throw new Error(err); } );
//Your promise gets resolved when the fluid stops running... so that's when you get to close and resolve
_myStream.on('close', _resolve);
_myStream.on('end', _resolve);
_myStream.on('error', _reject);
_archive.pipe(_myStream); //Pass that pipe to _archive so it can push the fluid straigh down to S3 bucket
_list.forEach(_itm => _archive.append(_itm.data, { name: _itm.name } ) ); //And then we start adding files to it
_archive.finalize(); //Tell is, that's all we want to add. Then when it finishes, the promise will resolve in one of those events up there
}).catch(_err => { throw new Error(_err) } );
_cb(null, { } ); //Handle response back to server
};
I formated the code according to #iocoker.
main entry
// index.js
'use strict';
const S3Zip = require('./s3-zip')
const params = {
files: [
{
fileName: '1.jpg',
key: 'key1.JPG'
},
{
fileName: '2.jpg',
key: 'key2.JPG'
}
],
zippedFileKey: 'zipped-file-key.zip'
}
exports.handler = async event => {
const s3Zip = new S3Zip(params);
await s3Zip.process();
return {
statusCode: 200,
body: JSON.stringify(
{
message: 'Zip file successfully!'
}
)
};
}
Zip file util
// s3-zip.js
'use strict';
const fs = require('fs');
const AWS = require("aws-sdk");
const Archiver = require('archiver');
const Stream = require('stream');
const https = require('https');
const sslAgent = new https.Agent({
KeepAlive: true,
rejectUnauthorized: true
});
sslAgent.setMaxListeners(0);
AWS.config.update({
httpOptions: {
agent: sslAgent,
},
region: 'us-east-1'
});
module.exports = class S3Zip {
constructor(params, bucketName = 'default-bucket') {
this.params = params;
this.BucketName = bucketName;
}
async process() {
const { params, BucketName } = this;
const s3 = new AWS.S3({ apiVersion: '2006-03-01', params: { Bucket: BucketName } });
// create readstreams for all the output files and store them
const createReadStream = fs.createReadStream;
const s3FileDwnldStreams = params.files.map(item => {
const stream = s3.getObject({ Key: item.key }).createReadStream();
return {
stream,
fileName: item.fileName
}
});
const streamPassThrough = new Stream.PassThrough();
// Create a zip archive using streamPassThrough style for the linking request in s3bucket
const uploadParams = {
ACL: 'private',
Body: streamPassThrough,
ContentType: 'application/zip',
Key: params.zippedFileKey
};
const s3Upload = s3.upload(uploadParams, (err, data) => {
if (err) {
console.error('upload err', err)
} else {
console.log('upload data', data);
}
});
s3Upload.on('httpUploadProgress', progress => {
// console.log(progress); // { loaded: 4915, total: 192915, part: 1, key: 'foo.jpg' }
});
// create the archiver
const archive = Archiver('zip', {
zlib: { level: 0 }
});
archive.on('error', (error) => {
throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
});
// connect the archiver to upload streamPassThrough and pipe all the download streams to it
await new Promise((resolve, reject) => {
console.log("Starting upload of the output Files Zip Archive");
streamPassThrough.on('close', resolve());
streamPassThrough.on('end', resolve());
streamPassThrough.on('error', reject());
archive.pipe(streamPassThrough);
s3FileDwnldStreams.forEach((s3FileDwnldStream) => {
archive.append(s3FileDwnldStream.stream, { name: s3FileDwnldStream.fileName })
});
archive.finalize();
}).catch((error) => {
throw new Error(`${error.code} ${error.message} ${error.data}`);
});
// Finally wait for the uploader to finish
await s3Upload.promise();
}
}
The other solutions are great for not so many files (less than ~60). If they handle more files, they just quit into nothing with no errors. This is because they open too many streams.
This solution is inspired by https://gist.github.com/amiantos/16bacc9ed742c91151fcf1a41012445e
It is a working solution, which works well even with many files (+300) and returns a presigned URL to the zip which contains the files.
Main Lambda:
const AWS = require('aws-sdk');
const S3 = new AWS.S3({
apiVersion: '2006-03-01',
signatureVersion: 'v4',
httpOptions: {
timeout: 300000 // 5min Should Match Lambda function timeout
}
});
const archiver = require('archiver');
import stream from 'stream';
const UPLOAD_BUCKET_NAME = "my-s3-bucket";
const URL_EXPIRE_TIME = 5*60;
export async function getZipSignedUrl(event) {
const prefix = `uploads/id123123/}`; //replace this with your S3 prefix
let files = ["12314123.png", "56787567.png"] //replace this with your files
if (files.length == 0) {
console.log("No files to zip");
return result(404, "No pictures to download");
}
console.log("Files to zip: ", files);
try {
files = files.map(file => {
return {
fileName: file,
key: prefix + '/' + file,
type: "file"
};
});
const destinationKey = prefix + '/' + 'uploads.zip'
console.log("files: ", files);
console.log("destinationKey: ", destinationKey);
await streamToZipInS3(files, destinationKey);
const presignedUrl = await getSignedUrl(UPLOAD_BUCKET_NAME, destinationKey, URL_EXPIRE_TIME, "uploads.zip");
console.log("presignedUrl: ", presignedUrl);
if (!presignedUrl) {
return result(500, null);
}
return result(200, presignedUrl);
}
catch(error) {
console.error(`Error: ${error}`);
return result(500, null);
}
}
Helper functions:
export function result(code, message) {
return {
statusCode: code,
body: JSON.stringify(
{
message: message
}
)
}
}
export async function streamToZipInS3(files, destinationKey) {
await new Promise(async (resolve, reject) => {
var zipStream = streamTo(UPLOAD_BUCKET_NAME, destinationKey, resolve);
zipStream.on("error", reject);
var archive = archiver("zip");
archive.on("error", err => {
throw new Error(err);
});
archive.pipe(zipStream);
for (const file of files) {
if (file["type"] == "file") {
archive.append(getStream(UPLOAD_BUCKET_NAME, file["key"]), {
name: file["fileName"]
});
}
}
archive.finalize();
})
.catch(err => {
console.log(err);
throw new Error(err);
});
}
function streamTo(bucket, key, resolve) {
var passthrough = new stream.PassThrough();
S3.upload(
{
Bucket: bucket,
Key: key,
Body: passthrough,
ContentType: "application/zip",
ServerSideEncryption: "AES256"
},
(err, data) => {
if (err) {
console.error('Error while uploading zip')
throw new Error(err);
reject(err)
return
}
console.log('Zip uploaded')
resolve()
}
).on("httpUploadProgress", progress => {
console.log(progress)
});
return passthrough;
}
function getStream(bucket, key) {
let streamCreated = false;
const passThroughStream = new stream.PassThrough();
passThroughStream.on("newListener", event => {
if (!streamCreated && event == "data") {
const s3Stream = S3
.getObject({ Bucket: bucket, Key: key })
.createReadStream();
s3Stream
.on("error", err => passThroughStream.emit("error", err))
.pipe(passThroughStream);
streamCreated = true;
}
});
return passThroughStream;
}
export async function getSignedUrl(bucket: string, key: string, expires: number, downloadFilename?: string): Promise<string> {
const exists = await objectExists(bucket, key);
if (!exists) {
console.info(`Object ${bucket}/${key} does not exists`);
return null
}
let params = {
Bucket: bucket,
Key: key,
Expires: expires,
};
if (downloadFilename) {
params['ResponseContentDisposition'] = `inline; filename="${encodeURIComponent(downloadFilename)}"`;
}
try {
const url = s3.getSignedUrl('getObject', params);
return url;
} catch (err) {
console.error(`Unable to get URL for ${bucket}/${key}`, err);
return null;
}
};
Using streams may be tricky as I'm not sure how you could pipe multiple streams into an object. I've done this several times using standard file object. It's a multistep process and it's quite fast. Remember that Lambda operates in Linux so you have all Linux resources at hand including the system /tmp directory.
Create a sub-directory in /tmp call "transient" or whatever works for you
Use s3.getObject() and write file objects to /tmp/transient
Use the GLOB package to generate an array[] of paths from /tmp/transient
Loop the array and zip.addLocalFile(array[i]);
zip.writeZip('tmp/files.zip');
I've used a similar approach, but I'm facing the issue that some of the files in the generated ZIP file don't have the correct size (and corresponding data). Is there any limitation on the size of the files this code can manage? In my case I'm zipping large files (a few larger than 1GB) and the overall amount of data may reach 10GB.
I do not get any error/warning message, so it seems it all works fine.
Any idea what may be hapenning?

Resources