Multiple file upload to S3 with Node.js & Busboy - node.js

I'm trying to implement an API endpoint that allows for multiple file uploads.
I don't want to write any file to disk, but to buffer them and pipe to S3.
Here's my code for uploading a single file. Once I attempt to post multiple files to the the endpoint in route.js, it doesn't work.
route.js - I'll keep this as framework agnostic as possible
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
let chunks = []
form.on('file', (field, file, filename, enc, mime) => {
file.on('data', data => {
chunks.push(data)
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
const buf = Buffer.concat(chunks)
resolve({
fileBuffer: buf,
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
req.pipe(form)
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const { fileBuffer, ...fileParams } = await parseForm(req)
const result = uploadFile(fileBuffer, fileParams)
res.status(200).json({ success: true, fileUrl: result.Location })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}

I couldn't find a lot of documentation for this but I think I've patched together a solution.
Most implementations appear to write the file to disk before uploading it to S3, but I wanted to be able to buffer the files and upload to S3 without writing to disk.
I created this implementation that could handle a single file upload, but when I attempted to provide multiple files, it merged the buffers together into one file.
The one limitation I can't seem to overcome is the field name. For example, you could setup the FormData() like this:
const formData = new FormData()
fileData.append('file[]', form.firstFile[0])
fileData.append('file[]', form.secondFile[0])
fileData.append('file[]', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
This structure is laid out in the FormData.append() MDN example. However, I'm not certain how to process that in. In the end, I setup my FormData() like this:
Form Data
const formData = new FormData()
fileData.append('file1', form.firstFile[0])
fileData.append('file2', form.secondFile[0])
fileData.append('file3', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
As far as I can tell, this isn't explicitly wrong, but it's not the preferred method.
Here's my updated code
route.js
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
const files = [] // create an empty array to hold the processed files
const buffers = {} // create an empty object to contain the buffers
form.on('file', (field, file, filename, enc, mime) => {
buffers[field] = [] // add a new key to the buffers object
file.on('data', data => {
buffers[field].push(data)
})
file.on('end', () => {
files.push({
fileBuffer: Buffer.concat(buffers[field]),
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
resolve(files)
})
req.pipe(form) // pipe the request to the form handler
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const files = await parseForm(req)
const fileUrls = []
for (const file of files) {
const { fileBuffer, ...fileParams } = file
const result = uploadFile(fileBuffer, fileParams)
urls.push({ filename: result.key, url: result.Location })
}
res.status(200).json({ success: true, fileUrls: urls })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}

Related

sent file from React to Node and got empty obj

I am getting img data and send this data to the server
console.log shows that data exists
const [fileData, setFileData] = useState("");
console.log("fileData:", fileData);
const getFile = (e: any) => {
setFileData(e.target.files[0]);
};
const uploadFile = (e: any) => {
e.preventDefault();
const data = new FormData();
data.append("file", fileData);
axios({
method: "POST",
url: "http://localhost:5000/api/setImage",
data: data,
headers: {
"content-type": "multipart/form-data", // do not forget this
},
}).then((res) => {
alert(res.data.message);
});
};
server endpoint
router.post("/setImage", userController.setImage);
async setImage(req, res, next) {
try {
let uploadFile = req.body;
console.log(uploadFile);
} catch (e) {
next(e);
}
}
console.log shows empty object but I'm waiting img data
Try using multer with fs and tempy.
router.post("/setImage", multer({ dest: tempy.file() }).single("file"), async (req, res, next) => {
if (!req.file) return
fs.readFile(req.file.path, function (err, filedata) {
if (!req.file) return
// Here you should get your expected data in filedata variable
})
})

Upload image in react native and Express

I use expo document picker to pick a file, and multer in the back end to save it into a folder.
When I send the image, I get a 200 status response but the image is not saved.
What have I been doing wrong ?
React native code :
const pickPicture = async () => {
let result = await DocumentPicker.getDocumentAsync({});
console.log(result.file);
console.log(result);
const data = new FormData();
data.append("avatar", {
uri: result.uri,
type: result.type,
name: result.fileName,
file: result.file,
});
axios
.post(
`http://localhost:8000/uploadAvatar`,
data,
{
withCredentials: true,
headers: {
"Content-Type": "multipart/form-data",
},
transformRequest: (infos, error) => {
return data;
},
}
)
.catch(function (error) {
console.log(error);
})
.then(function (res) {
console.log(res);
});
Node code :
const storage = multer.diskStorage({
destintion: (req, file, callback) => {
callback(null, "avatars")
},
filename: (req, file, callback) => {
callback(null, Date.now() = path.extname(file.originalname));
}
});
const upload = multer({storage: storage});
app.use("/uploadAvatar", upload.single("avatar"), (req, res) => {
res.status(200).json("Image enregistrée !")
})
Path of the image to upload result.file returned by DocumentPicker is a reference of the image to the device storage not actually image data.
It required to fetch raw image data as BLOB and upload BLOB data to the server.
let result = await DocumentPicker.getDocumentAsync({});
console.log(result.file);
console.log(result);
const imageData = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.onload = function () {
resolve(xhr.response);
};
xhr.onerror = function (e) {
reject(new TypeError("Network request failed"));
};
xhr.responseType = "blob";
xhr.open("GET",result.file, true);
xhr.send(null);
});
const data = new FormData();
data.append("avatar", {
uri: result.uri,
type: result.type,
name: result.fileName,
file: imageData,
});

download folder from aws with express and React app

I need to download folder from aws s3.
i have route "api/uploadFiles/download/all" who is calling a controller method downloadAll
My controller method call downloadFolder function.
downloadAll: async (req, res) => {
await downloadFolder(req, res)
res.status(200)
}
My downloadFolder function create archive with content data fils in aws bucket folder.
function downloadFolder(req, res) {
const archive = archiver('zip', { gzip: true, zlib: { level: 9 }});
const params = {
Bucket: bucketName,
Prefix: `folder/`,
};
return new Promise((resolve, reject) => {
s3.listObjects(params, (err, data) => {
if (err) {
reject(err)
} else {
data.Contents.forEach((content) => {
const file = s3.getObject({
Bucket: bucketName, Key: content.Key
}).createReadStream()
archive.append(file, { name: content.Key })
});
archive.finalize()
archive.pipe(res)
resolve(archive)
}
});
})
}
exports.downloadFolder = downloadFolder
My handler in React call route and create zip file for save.
const handleDownloadAll = () => {
axios.get('http://localhost:8000/api/uploadFiles/download/all')
.then(res => {
console.log(res.data)
const zip = new JSZip();
zip.file('download.zip', res.data);
zip.generateAsync({type:"blob"})
.then(function(content) {
saveAs(content, "download.zip");
});
})
.catch(err => {
console.log(err)
})
}
When i unzip my zip file i have a zip file inside. And when i unzip this zip file, i have error : data damaged
You have to wait for archiver pipe to be finished, instead of finishing right after you append stream to archive variable:
async function downloadFolder(req, res) { // async function
const params = {
Bucket: bucketName,
Prefix: `folder/`,
};
const { Contents } = await s3.listObjects(params).promise(); // convert request to promise
return new Promise((resolve, reject) => { // wrap into a promise
const archive = archiver('zip', { gzip: true, zlib: { level: 9 } });
archive.on('finish', () => { // end | close
resolve(archive); // return to main function
});
archive.pipe(res);
// error handler
for (const content of Contents) {
const file = s3.getObject({
Bucket: bucketName, Key: content.Key
}).createReadStream();
archive.append(file, { name: content.Key });
}
archive.finalize();
});
}
exports.downloadFolder = downloadFolder

Node.js upload Image Stream.Readable to S3

My lambda is triggered by a request from the browser. The browser sends an image as multipart/form-data.
The lambda uses busboy to parse the request:
function parseForm(event: IHttpEvent) {
return new Promise(
(resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 10 },
});
const imageResponse = new Map<string, IImageParseResponse>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
imageResponse.set(id, { file, filename, mimeType });
});
busboy.on("error", (error) => reject(`Parse error: ${error}`));
busboy.on("finish", () => resolve(imageResponse));
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
When I parsed the request I want to upload the file to AWS S3.
export async function handler(event: IHttpEvent) {
var res = await parseForm(event);
const s3 = new S3Client({ region: "eu-central-1" });
for (const [k, v] of res) {
console.log(`File ${v.filename} ${v.mimeType} streaming`);
const stream = new Readable().wrap(v.file);
const upload = new Upload({
client: s3,
params: {
Key: v.filename,
Bucket: "my-image-bucket",
Body: stream,
ContentType: v.mimeType,
},
});
upload.on("httpUploadProgress", (p) => console.log(p));
const result = await upload.done();
console.log(result);
return result;
}
}
This does not work. However the Browser will receive a 200 OK with a null body response. What confuses me even more is that console.log(result); does not log anything to console.
Where is my mistake? I dont't fully understand the mechanics of streams. But as far as I understand it will be more memory-efficient. In the future I plan to upload multiple images at once. And in order to save cost I want my method to be as efficient as possible.
In general I did 2 mistakes.
Tried to upload the stream when it was already read to the end by busboy
I did not properly wait for the completion of the upload to s3 before terminating the function.
In the end i ended up with the following:
const s3 = new S3Client({ region: "eu-central-1" });
const { BUCKET_NAME, MAX_IMAGE_SIZE } = process.env;
export async function handler(event: IHttpEvent) {
const results = await parseForm(event);
const response = [];
for (const r of results) {
if (r.status === "fulfilled") {
const value: any = r.value.result;
response.push({
id: r.value.id,
key: value.Key,
url: value.Location,
});
}
if (r.status === "rejected")
response.push({ id: r.reason.id, reason: r.reason.error });
}
return response;
}
async function doneHandler(
id: string,
uploadMap: Map<string, Upload>
): Promise<{ id: string; result: ServiceOutputTypes }> {
try {
var result = await uploadMap.get(id).done();
} catch (e: any) {
var error = e;
} finally {
uploadMap.delete(id);
if (error) throw { id, error };
return { id, result };
}
}
function parseForm(event: IHttpEvent) {
return new Promise( (resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 1, fileSize: parseInt(MAX_IMAGE_SIZE) },
});
const responses: Promise<{
id: string;
result: ServiceOutputTypes;
}>[] = [];
const uploads = new Map<string, Upload>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
uploads.set(
id,
new Upload({
client: s3,
params: {
Bucket: BUCKET_NAME,
Body: new Readable().wrap(file),
Key: filename,
ContentType: mimeType,
ContentEncoding: encoding,
},
})
);
responses.push(doneHandler(id, uploads));
file.on("limit", async () => {
const aborts = [];
for (const [k, upload] of uploads) {
aborts.push(upload.abort());
}
await Promise.all(aborts);
return reject(new Error("File is too big."));
});
});
busboy.on("error", (error: any) => {
reject(new Error(`Parse error: ${error}`));
});
busboy.on("finish", async () => {
const res = await Promise.allSettled(responses);
resolve(res);
});
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
This solution also handles file-limits and tries to abort all pending uploads to S3

Angular 6 file upload

I have a REST API that upload images to s3 and returns the response. The API works perfectly using Postman.
The problem arrises when calling the API from frontend. I am using Angular 6.
I am getting Error: Unsupported content type: application/json error. Although I am setting the headers properly.
Here is my Angular 6 code.
export class UploadComponent {
percentDone: number;
uploadSuccess: boolean;
constructor(private http: HttpClient) {}
upload(file: File) {
this.singleBasicUpload(file);
}
singleBasicUpload(file: File) {
const headers = new HttpHeaders({
'Content-Type': 'multipart/form-data',
});
const options = { headers: headers };
this.http.post(`${BASE_URL}/upload`, file, options).subscribe(response => {
console.log('response', response);
});
}
}
And here is my S3 code in backend Node.js
AWS.config.update({
accessKeyId: constants.IAM_USER_KEY,
secretAccessKey: constants.IAM_USER_SECRET,
});
const BUCKET_NAME = constants.BUCKET_NAME;
const ACL = 'public-read';
const S3 = new AWS.S3();
export async function S3Upload(req, res) {
const chunks = [];
let fname;
let fileType;
let fileEncodingType;
const busboy = new Busboy({
headers: req.headers,
});
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
fname = filename.replace(/ /g, '_');
fileType = mimetype;
fileEncodingType = encoding;
file.on('data', data => {
// you will get chunks here will pull all chunk to an array and later concat it.
console.log(chunks.length);
chunks.push(data);
});
file.on('end', () => {
console.log(`File [${filename}] Finished`);
});
});
busboy.on('finish', () => {
const userId = UUID();
const params = {
Bucket: BUCKET_NAME, // your s3 bucket name
Key: `${userId}-${fname}`,
Body: Buffer.concat(chunks), // concatinating all chunks
ACL,
ContentEncoding: fileEncodingType, // optional
ContentType: fileType, // required
};
// we are sending buffer data to s3.
S3.upload(params, (err, s3res) => {
if (err) {
res.send({
err,
status: 'error',
});
} else {
return res.send({
data: s3res,
message: 'Image successfully uploaded.',
});
}
});
});
req.pipe(busboy);
}
Network description

Resources