My lambda is triggered by a request from the browser. The browser sends an image as multipart/form-data.
The lambda uses busboy to parse the request:
function parseForm(event: IHttpEvent) {
return new Promise(
(resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 10 },
});
const imageResponse = new Map<string, IImageParseResponse>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
imageResponse.set(id, { file, filename, mimeType });
});
busboy.on("error", (error) => reject(`Parse error: ${error}`));
busboy.on("finish", () => resolve(imageResponse));
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
When I parsed the request I want to upload the file to AWS S3.
export async function handler(event: IHttpEvent) {
var res = await parseForm(event);
const s3 = new S3Client({ region: "eu-central-1" });
for (const [k, v] of res) {
console.log(`File ${v.filename} ${v.mimeType} streaming`);
const stream = new Readable().wrap(v.file);
const upload = new Upload({
client: s3,
params: {
Key: v.filename,
Bucket: "my-image-bucket",
Body: stream,
ContentType: v.mimeType,
},
});
upload.on("httpUploadProgress", (p) => console.log(p));
const result = await upload.done();
console.log(result);
return result;
}
}
This does not work. However the Browser will receive a 200 OK with a null body response. What confuses me even more is that console.log(result); does not log anything to console.
Where is my mistake? I dont't fully understand the mechanics of streams. But as far as I understand it will be more memory-efficient. In the future I plan to upload multiple images at once. And in order to save cost I want my method to be as efficient as possible.
In general I did 2 mistakes.
Tried to upload the stream when it was already read to the end by busboy
I did not properly wait for the completion of the upload to s3 before terminating the function.
In the end i ended up with the following:
const s3 = new S3Client({ region: "eu-central-1" });
const { BUCKET_NAME, MAX_IMAGE_SIZE } = process.env;
export async function handler(event: IHttpEvent) {
const results = await parseForm(event);
const response = [];
for (const r of results) {
if (r.status === "fulfilled") {
const value: any = r.value.result;
response.push({
id: r.value.id,
key: value.Key,
url: value.Location,
});
}
if (r.status === "rejected")
response.push({ id: r.reason.id, reason: r.reason.error });
}
return response;
}
async function doneHandler(
id: string,
uploadMap: Map<string, Upload>
): Promise<{ id: string; result: ServiceOutputTypes }> {
try {
var result = await uploadMap.get(id).done();
} catch (e: any) {
var error = e;
} finally {
uploadMap.delete(id);
if (error) throw { id, error };
return { id, result };
}
}
function parseForm(event: IHttpEvent) {
return new Promise( (resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 1, fileSize: parseInt(MAX_IMAGE_SIZE) },
});
const responses: Promise<{
id: string;
result: ServiceOutputTypes;
}>[] = [];
const uploads = new Map<string, Upload>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
uploads.set(
id,
new Upload({
client: s3,
params: {
Bucket: BUCKET_NAME,
Body: new Readable().wrap(file),
Key: filename,
ContentType: mimeType,
ContentEncoding: encoding,
},
})
);
responses.push(doneHandler(id, uploads));
file.on("limit", async () => {
const aborts = [];
for (const [k, upload] of uploads) {
aborts.push(upload.abort());
}
await Promise.all(aborts);
return reject(new Error("File is too big."));
});
});
busboy.on("error", (error: any) => {
reject(new Error(`Parse error: ${error}`));
});
busboy.on("finish", async () => {
const res = await Promise.allSettled(responses);
resolve(res);
});
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
This solution also handles file-limits and tries to abort all pending uploads to S3
Related
I need to download folder from aws s3.
i have route "api/uploadFiles/download/all" who is calling a controller method downloadAll
My controller method call downloadFolder function.
downloadAll: async (req, res) => {
await downloadFolder(req, res)
res.status(200)
}
My downloadFolder function create archive with content data fils in aws bucket folder.
function downloadFolder(req, res) {
const archive = archiver('zip', { gzip: true, zlib: { level: 9 }});
const params = {
Bucket: bucketName,
Prefix: `folder/`,
};
return new Promise((resolve, reject) => {
s3.listObjects(params, (err, data) => {
if (err) {
reject(err)
} else {
data.Contents.forEach((content) => {
const file = s3.getObject({
Bucket: bucketName, Key: content.Key
}).createReadStream()
archive.append(file, { name: content.Key })
});
archive.finalize()
archive.pipe(res)
resolve(archive)
}
});
})
}
exports.downloadFolder = downloadFolder
My handler in React call route and create zip file for save.
const handleDownloadAll = () => {
axios.get('http://localhost:8000/api/uploadFiles/download/all')
.then(res => {
console.log(res.data)
const zip = new JSZip();
zip.file('download.zip', res.data);
zip.generateAsync({type:"blob"})
.then(function(content) {
saveAs(content, "download.zip");
});
})
.catch(err => {
console.log(err)
})
}
When i unzip my zip file i have a zip file inside. And when i unzip this zip file, i have error : data damaged
You have to wait for archiver pipe to be finished, instead of finishing right after you append stream to archive variable:
async function downloadFolder(req, res) { // async function
const params = {
Bucket: bucketName,
Prefix: `folder/`,
};
const { Contents } = await s3.listObjects(params).promise(); // convert request to promise
return new Promise((resolve, reject) => { // wrap into a promise
const archive = archiver('zip', { gzip: true, zlib: { level: 9 } });
archive.on('finish', () => { // end | close
resolve(archive); // return to main function
});
archive.pipe(res);
// error handler
for (const content of Contents) {
const file = s3.getObject({
Bucket: bucketName, Key: content.Key
}).createReadStream();
archive.append(file, { name: content.Key });
}
archive.finalize();
});
}
exports.downloadFolder = downloadFolder
I am using aws sdk to uplod user input image and then get the image link from aws and i will store the link in mongoDB. In that case when i run .upload() it is async.
const imgSRC = [];
for (let img of image) {
console.log(img);
const params = {
Bucket: process.env.AWS_BUCKET,
Key: `${img.originalname}_${userID}`,
Body: img.buffer,
};
s3.upload(params, (error, data) => {
if (error) {
console.log(error);
res.status(500).json({ msg: "server error" });
}
imgSRC.push(data.Location);
console.log(imgSRC);
});
}
const newPost = new Post({
userID: userID,
contentID: contentID,
posts: [
{
caption: caption,
data: imgSRC,
},
],
});
const post = await newPost.save();
in that case when the .save to mongodb run, there is no imgLinks from aws yet. How can i fix that things.
I've already tried async and it didn't work
You need to use Promise.all() in this manner
const uploadImage = (obj) => {
return new Promise((resolve, reject) => {
const params = {
Bucket: process.env.AWS_BUCKET,
Key: obj.key,
Body: obj.body,
}
s3.upload(params, (error, data) => {
if (error) {
console.log(error);
return reject(error);
}
return data;
});
})
}
const mainFunction = async () => {
const promises = [];
for (let img of image) {
const options = {
key: `${img.originalname}_${userID}`,
body: img.buffer
};
promises.push(uploadImage(options));
}
const result = await Promise.all(promises);
const imgSRC = result.map((r) => { return r.Location });
return imgSRC;
}
If you use await on s3.upload method you should remove the callback for this method.
try {
const data = await s3.upload(params);
imgSRC.push(data.Location);
console.log(imgSRC);
} catch(e) {
console.log(error);
res.status(500).json({ msg: "server error" });
}
Let me know if it works.
I've been trying to figure this out for two days now.
I have the below function working, it basically takes URL and upload the file to S3. However the problem is I'm trying to return the file location on S3 (data.key) out of the main function but I'm either getting undefined or promise(pending).
var express = require("express"),
axios = require('axios').default,
stream = require("stream"),
aws = require('aws-sdk')
async function downloadImage(url) {
let contentType = 'application/octet-stream'
const s3 = new aws.S3();
// This returns undefined
var imageUrl
// This returns Promise { <pending>}
// var imageUrl = await uploadS3()
var imageRequest = axios({
method: 'get',
url: url,
responseType: 'stream'
}).then(function (response) {
if (response.status === 200) {
contentType = response.headers['content-type'];
response.data.pipe(uploadS3());
}
}).catch(function (error) {
console.log(error.message);
})
// This is where I can't get return
return imageUrl
function uploadS3() {
var pass = new stream.PassThrough();
var params = {
Bucket: "test_bucket",
Key: "test/" + Date.now().toString(),
Body: pass,
ContentType: contentType,
}
s3.upload(params, function (err, data) {
if (err) {
console.log(err)
} else {
// I have access to data.key only here
imageUrl = data.key
console.log(imageUrl)
}
})
return pass
}
}
I was trying something like this. This gets the value up to the Then of the first function, but still no luck getting it out of the main funciton
async function uploadImage(url) {
let contentType = 'application/octet-stream'
const s3 = new aws.S3();
// var imageUrl = await uploadStream()
// var imageUrl = await Promise.all
var imageUrl
var imageRequest = axios({
method: 'get',
url: url,
responseType: 'stream'
}).then(function (response) {
if (response.status === 200) {
const { writeStream, promise } = uploadStream();
contentType = response.headers['content-type'];
upload = response.data.pipe(writeStream);
promise.then((data) => {
// I have access to it here, but not
console.log(data.key)
imageUrl = data.key
}).catch((err) => {
console.log('upload failed.', err.message);
});
}
}).catch(function (error) {
console.log(error.message);
})
return imageUrl
function uploadStream() {
var pass = new stream.PassThrough();
var params = {
Bucket: "test_bucket",
Key: "test/" + Date.now().toString(),
Body: pass,
ContentType: contentType,
}
return {
writeStream: pass,
promise: s3.upload(params).promise(),
};
}
}
I was able to get it to work the following way if anybody is interested. I have a Promise created which returns a result (which is file location on S3) once upload is completed.
I then use this inside of my main functions to basically do download and then return the location.
var express = require("express"),
axios = require('axios').default,
stream = require("stream"),
aws = require('aws-sdk')
// This is now can be run from any other function while getting access to file location (result)
uploadImageAsync("imageURLGoesHere").then(function (result) {
// result contains the location on S3 data.key (this can be changed in the code to return anything else from data)
console.log(result)
}, function (err) {
console.log(err);
})
async function uploadImageAsync(url) {
let contentType = 'application/octet-stream'
const s3 = new aws.S3();
return new Promise(function (resolve, reject) {
var imageRequest = axios({
method: 'get',
url: url,
responseType: 'stream'
}).then(function (response) {
if (response.status === 200) {
const { writeStream, promise } = uploadStream();
contentType = response.headers['content-type'];
response.data.pipe(writeStream);
return new Promise(function (resolve, reject) {
promise.then((data) => {
resolve(data.key);
}).catch((err) => {
console.log('upload failed.', err.message);
});
});
}
}).catch(function (error) {
console.log(error.message);
})
imageRequest.then(function (result) {
resolve(result);
})
function uploadStream() {
var pass = new stream.PassThrough();
var params = {
Bucket: "test_bucket",
Key: "test/" + Date.now().toString(),
Body: pass,
ContentType: contentType,
}
return {
writeStream: pass,
promise: s3.upload(params).promise(),
};
}
})
}
I'm trying to implement an API endpoint that allows for multiple file uploads.
I don't want to write any file to disk, but to buffer them and pipe to S3.
Here's my code for uploading a single file. Once I attempt to post multiple files to the the endpoint in route.js, it doesn't work.
route.js - I'll keep this as framework agnostic as possible
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
let chunks = []
form.on('file', (field, file, filename, enc, mime) => {
file.on('data', data => {
chunks.push(data)
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
const buf = Buffer.concat(chunks)
resolve({
fileBuffer: buf,
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
req.pipe(form)
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const { fileBuffer, ...fileParams } = await parseForm(req)
const result = uploadFile(fileBuffer, fileParams)
res.status(200).json({ success: true, fileUrl: result.Location })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}
I couldn't find a lot of documentation for this but I think I've patched together a solution.
Most implementations appear to write the file to disk before uploading it to S3, but I wanted to be able to buffer the files and upload to S3 without writing to disk.
I created this implementation that could handle a single file upload, but when I attempted to provide multiple files, it merged the buffers together into one file.
The one limitation I can't seem to overcome is the field name. For example, you could setup the FormData() like this:
const formData = new FormData()
fileData.append('file[]', form.firstFile[0])
fileData.append('file[]', form.secondFile[0])
fileData.append('file[]', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
This structure is laid out in the FormData.append() MDN example. However, I'm not certain how to process that in. In the end, I setup my FormData() like this:
Form Data
const formData = new FormData()
fileData.append('file1', form.firstFile[0])
fileData.append('file2', form.secondFile[0])
fileData.append('file3', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
As far as I can tell, this isn't explicitly wrong, but it's not the preferred method.
Here's my updated code
route.js
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
const files = [] // create an empty array to hold the processed files
const buffers = {} // create an empty object to contain the buffers
form.on('file', (field, file, filename, enc, mime) => {
buffers[field] = [] // add a new key to the buffers object
file.on('data', data => {
buffers[field].push(data)
})
file.on('end', () => {
files.push({
fileBuffer: Buffer.concat(buffers[field]),
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
resolve(files)
})
req.pipe(form) // pipe the request to the form handler
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const files = await parseForm(req)
const fileUrls = []
for (const file of files) {
const { fileBuffer, ...fileParams } = file
const result = uploadFile(fileBuffer, fileParams)
urls.push({ filename: result.key, url: result.Location })
}
res.status(200).json({ success: true, fileUrls: urls })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}
I have a REST API that upload images to s3 and returns the response. The API works perfectly using Postman.
The problem arrises when calling the API from frontend. I am using Angular 6.
I am getting Error: Unsupported content type: application/json error. Although I am setting the headers properly.
Here is my Angular 6 code.
export class UploadComponent {
percentDone: number;
uploadSuccess: boolean;
constructor(private http: HttpClient) {}
upload(file: File) {
this.singleBasicUpload(file);
}
singleBasicUpload(file: File) {
const headers = new HttpHeaders({
'Content-Type': 'multipart/form-data',
});
const options = { headers: headers };
this.http.post(`${BASE_URL}/upload`, file, options).subscribe(response => {
console.log('response', response);
});
}
}
And here is my S3 code in backend Node.js
AWS.config.update({
accessKeyId: constants.IAM_USER_KEY,
secretAccessKey: constants.IAM_USER_SECRET,
});
const BUCKET_NAME = constants.BUCKET_NAME;
const ACL = 'public-read';
const S3 = new AWS.S3();
export async function S3Upload(req, res) {
const chunks = [];
let fname;
let fileType;
let fileEncodingType;
const busboy = new Busboy({
headers: req.headers,
});
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
fname = filename.replace(/ /g, '_');
fileType = mimetype;
fileEncodingType = encoding;
file.on('data', data => {
// you will get chunks here will pull all chunk to an array and later concat it.
console.log(chunks.length);
chunks.push(data);
});
file.on('end', () => {
console.log(`File [${filename}] Finished`);
});
});
busboy.on('finish', () => {
const userId = UUID();
const params = {
Bucket: BUCKET_NAME, // your s3 bucket name
Key: `${userId}-${fname}`,
Body: Buffer.concat(chunks), // concatinating all chunks
ACL,
ContentEncoding: fileEncodingType, // optional
ContentType: fileType, // required
};
// we are sending buffer data to s3.
S3.upload(params, (err, s3res) => {
if (err) {
res.send({
err,
status: 'error',
});
} else {
return res.send({
data: s3res,
message: 'Image successfully uploaded.',
});
}
});
});
req.pipe(busboy);
}
Network description