I have an endpoint that receives files and creates a background task for uploading those files to S3.
In order to background the file uploads, I'm using Agenda (https://github.com/agenda/agenda). The only limitation is that I need to store the file in a format that is supported by MongoDB (which is what Agenda uses under the hood). In order to do that, I am converting the file as a buffer before sending that over to Agenda.
This is my code:
Mutation: {
batchCreateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return await Promise.all(
pics.map(async (pic, i) => {
const { file, bodyPart, localPath } = pic;
const { createReadStream } = await file;
const stream = createReadStream();
console.log("Setting up buffer...");
const buffer = await new Promise((resolve, reject) => {
var buffers = [];
stream.on("data", function(data) {
buffers.push(data);
});
stream.on("end", function() {
const everything = Buffer.concat(buffers);
resolve(everything);
});
stream.on("error", function(e) {
reject(e);
});
});
const progressPic = await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
console.log("Creating backgruond task...");
Agenda.now("uploadProgressPic", {
userId: currentUser.id,
progressPicId: progressPic.id,
filename: `${progressPic.id}-${bodyPart}.jpg`,
buffer,
});
console.log("Done.");
return progressPic;
})
);
}
),
},
This is fast on my local development server, but taking a long time to run in production because of the buffer stuff. The lines following console.log(Setting up buffer...) are taking a long time.
What I would like to do is:
Create and return an array of progressPics, one for each element in the pics array
Do the buffer stuff after the response has been sent so it doesn't hold up the front end.
Is this possible?
============ UPDATE ==========
So if I do not await for the promise, it complains that the request disconnected before the buffer finished:
const uploadProgressPic = async ({ file, progressPicId, userId, bodyPart }) => {
try {
const { createReadStream } = await file;
const stream = createReadStream();
console.log("Setting up buffer...");
const buffer = await new Promise((resolve, reject) => {
var buffers = [];
stream.on("data", function(data) {
buffers.push(data);
});
stream.on("end", function() {
const everything = Buffer.concat(buffers);
resolve(everything);
});
stream.on("error", function(e) {
reject(e);
});
});
console.log("Done.");
console.log("Creating backgruond task...");
Agenda.now("uploadProgressPic", {
userId,
progressPicId,
filename: `${progressPicId}-${bodyPart}.jpg`,
buffer,
});
} catch (error) {
console.log("ERROR OCCURRED: ", error);
}
};
export default {
Mutation: {
batchCreateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { file, bodyPart, localPath } = pic;
const progressPic = await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
uploadProgressPic({
file,
progressPicId: progressPic.id,
userId: currentUser.id,
bodyPart,
});
return progressPic;
});
}
),
},
};
Error:
ERROR OCCURRED: BadRequestError: Request disconnected during file upload stream parsing.
at IncomingMessage.<anonymous> (/Users/edmundmai/Documents/src/acne-tracker/server/node_modules/graphql-upload/lib/processRequest.js:300:35)
at Object.onceWrapper (events.js:291:20)
at IncomingMessage.emit (events.js:203:13)
at IncomingMessage.EventEmitter.emit (domain.js:471:20)
at resOnFinish (_http_server.js:614:7)
at ServerResponse.emit (events.js:208:15)
at ServerResponse.EventEmitter.emit (domain.js:471:20)
at onFinish (_http_outgoing.js:649:10)
at onCorkedFinish (_stream_writable.js:678:5)
at afterWrite (_stream_writable.js:483:3)
at processTicksAndRejections (internal/process/task_queues.js:77:11) {
message: 'Request disconnected during file upload stream parsing.',
expose: true,
statusCode: 499,
status: 499
}
========== UPDATE 2 =============
Even trying to 1) simplify it and 2) move createReadStream() outside of uploadProgressPic shows the same error:
const uploadProgressPic = async ({
stream,
progressPicId,
userId,
bodyPart,
models,
}) => {
try {
console.log("Uploading to S3...");
const { Location: url, Key: key, Bucket: bucket } = await S3.upload({
stream,
folder: userId,
filename: `${progressPicId}-${bodyPart}.jpg`,
});
if (url && key && bucket) {
await models.ProgressPic.findOneAndUpdate(
{ _id: progressPicId },
{ $set: { url, key, bucket } },
{ new: true, useFindAndModify: false }
);
console.log("Done!");
}
} catch (error) {
console.log("ERROR OCCURRED: ", error);
}
};
export default {
Mutation: {
batchCreateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { file, bodyPart, localPath } = pic;
const progressPic = await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
const { createReadStream } = await file;
const stream = createReadStream();
uploadProgressPic({
stream,
progressPicId: progressPic.id,
userId: currentUser.id,
bodyPart,
models,
});
return progressPic;
});
}
),
},
};
Error:
Uploading to S3...
Uploading to S3...
Uploading to S3...
ERROR OCCURRED: BadRequestError: Request disconnected during file upload stream parsing.
at IncomingMessage.<anonymous> (/Users/edmundmai/Documents/src/acne-tracker/server/node_modules/graphql-upload/lib/processRequest.js:300:35)
at Object.onceWrapper (events.js:291:20)
at IncomingMessage.emit (events.js:203:13)
at IncomingMessage.EventEmitter.emit (domain.js:471:20)
at resOnFinish (_http_server.js:614:7)
at ServerResponse.emit (events.js:208:15)
at ServerResponse.EventEmitter.emit (domain.js:471:20)
at onFinish (_http_outgoing.js:649:10)
at onCorkedFinish (_stream_writable.js:678:5)
at afterWrite (_stream_writable.js:483:3)
at processTicksAndRejections (internal/process/task_queues.js:77:11) {
message: 'Request disconnected during file upload stream parsing.',
expose: true,
statusCode: 499,
status: 499
}
Done!
Funny thing is I still see a few Done!s in the logs even though it complains?
Not an expert on the subject but i have an idea that may work and a theory :
IDEA: if you're dealing with a big number a images than your problem may originate from the await Promise.all(). i recommend that you use parallelLimit from async to limit the parallel functions to be executed at a time otherwise you will have a performance problem.
THEORY: Maybe you can free the memory allocation after each use of Buffer to avoid memory leak problems and make your server more performant.
If i am wrong in anyway please correct me. I myself interested in the outcome of this problem.
Don't await the Promise.
new Promise((resolve, reject) => {
var buffers = [];
stream.on("data", function(data) {
buffers.push(data);
});
stream.on("end", function() {
const everything = Buffer.concat(buffers);
resolve(everything);
});
stream.on("error", function(e) {
reject(e);
});
}).then((buffer) => {
Agenda.now("uploadProgressPic", {
userId: currentUser.id,
progressPicId: progressPic.id,
filename: `${progressPic.id}-${bodyPart}.jpg`,
buffer,
});
}).catch((error) => {
// Clean up here
});
return models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
This way, you'll kick off creating the buffers, but won't actually wait for that to code to execute and will immediately create the ProgressPic instance and return it. Because the call to Agenda.now requires the resolved value of the Promise, we stick it inside the then callback. Note that it's important to append a catch as well -- if you don't, you could end up with an unhandled rejection.
You may want to use the catch callback to log the error and do any additional cleanup. For example, you may want to create the created ProgressPic (in which case, you should move the create call above the buffer Promise so you can reference the created instance).
If you're like me and die a little bit on the inside each time you have to type then, you can extract all that logic into a separate function:
const uploadProgressPic = async (/* parameters omitted for brevity */) => {
try {
const buffer = await new Promise(...)
Agenda.now(...)
} catch (error) {
// Do whatever
}
}
and then call it inside your resolver, again, without awaiting it:
uploadProgressPic()
return models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
I tried a variety of things that ended up not working because creating the buffer was just too slow in production for some reason. My ultimate solution that actually works was to split up the upload into two requests:
Backend:
Request #1: Create a progress pic, using the local file path as the URL
Request #2: Upload the file and update the progress pic
import { combineResolvers } from "graphql-resolvers";
import { isAuthenticated } from "./authorization";
import S3 from "../services/s3";
export default {
Query: {
progressPics: combineResolvers(
isAuthenticated,
async (parent, args, { models, currentUser }) => {
return await models.ProgressPic.find({ user: currentUser.id });
}
),
},
Mutation: {
createProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { bodyPart, localPath } = pic;
return await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
return progressPic;
});
}
),
updateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { file, filename, progressPicId } = pic;
const { createReadStream } = await file;
const stream = createReadStream();
const { Location: url, Key: key, Bucket: bucket } = await S3.upload({
stream,
filename,
folder: currentUser.id,
});
return await models.ProgressPic.findOneAndUpdate(
{ _id: progressPicId },
{ $set: { url, key, bucket } },
{ new: true, useFindAndModify: false }
);
});
}
),
},
};
The frontend will then wait for the response from Request #1, and send Request #2 but ignore the response so it can just return immediately.
const createAndUploadProgressPics = async photos => {
const {
data: { createProgressPics: progressPics },
} = await createProgressPics({
variables: {
pics: photos.map((p, i) => ({
bodyPart: BODY_PARTS[i],
localPath: p.uri,
})),
},
});
updateProgressPics({
variables: {
pics: progressPics.map(({ id, bodyPart }, i) => {
return {
progressPicId: id,
filename: `${id}-${bodyPart}.jpg`,
file: photos[i],
};
}),
},
});
onFinish(progressPics);
navigation.goBack();
};
Related
I want to make 100% coverage on this function with node-tap but I can't mock the error part, it always throw
Cannot find module 'create' Require stack: - /home/mptr8/Code/Projects/me/fastify-example/fastify-postgres/test/integration.js
But I have create function on my query.js file, what do I do wrong here? Why it doesn't invoke the method?
t.mock("../query.js", {
create: () => {
throw new Error();
},
});
I also try this combination, because query.js are dependent on db.js. Now the mock error gone but still I'm not getting the error throw from my fastify.inject.
t.mock("../db.js", {
"../query.js": {
create: () => { throw new Error() },
},
});
app.post("/", async (request, reply) => {
try {
const { body } = request;
const book = create(body.title);
reply.send(book);
} catch (error) {
// this part are not covered
reply.code(500).send({ message: "internal server error" });
}
});
here are my complete code. You can see the full code on this github repository.
// server.js
const fastify = require("fastify");
const {
migration,
create,
} = require("./query");
const db = require("./db");
function build(opts = {}) {
const app = fastify(opts);
migration();
app.post("/", async (request, reply) => {
try {
const { body } = request;
const book = create(body.title);
reply.send(book);
} catch (error) {
reply.code(500).send({ message: "internal server error" });
}
});
app.addHook("onClose", (_instance, done) => {
db.close();
done();
});
}
module.exports = build;
// db.js
const { Pool } = require("pg");
const pool = new Pool({
connectionString:
"postgresql://postgres:postgres#localhost:5432/fastify_postgres?schema=public",
});
module.exports = {
query: (text, params) => pool.query(text, params),
close: () => pool.end(),
};
// query.js
const db = require("./db");
async function migration() {
await db.query(`
CREATE TABLE IF NOT EXISTS books (
id serial PRIMARY KEY,
title varchar (255) NOT NULL
)
`);
}
async function create(title) {
return await db.query("INSERT INTO books (title) VALUES ($1)", [title]);
}
module.exports = { migration, create };
// test.js
const tap = require("tap");
const fastify = require("../server");
tap.test("coba", async (t) => {
const app = await fastify();
t.test("should success create books", async (t) => {
const response = await app.inject({
method: "POST",
url: "/",
payload: {
title: "Hello,World!",
},
});
t.equal(response.statusCode, 200);
});
t.test("should throw error", async (t) => {
const app = await fastify();
// it doesn't throw the error :((
t.mock("../query.js", {
create: () => {
throw new Error();
},
});
const response = await app.inject({
method: "POST",
url: "/",
payload: {
title: "Hello,World!",
},
});
t.equal(response.statusCode, 500);
// call app close on last test child to close app and db properly
app.close();
});
});
You should use the returned value by the t.mock function:
const build = t.mock({
"../server": {
"./query.js": {
create: () => { throw new Error() },
}
}
})
const app = await build({})
I am trying to create and schedule jobs with agenda. So i split it into two files, one file the consumer creates jobs and the other file the producer schedules jobs.
this is the consumer file
const Agenda = require("agenda");
async function run() {
try {
const agenda = new Agenda({
db: {
address: process.env.MONGODB_URL,
collection: 'agendaJobs',
options: {
useUnifiedTopology: true
}
}
});
agenda.define('ticket creation', async (job, done) => {
const { time } = job.attrs.data;
console.log(time);
done();
});
agenda.on("ready", function () {
agenda.start();
console.log('Agenda Started');
});
//await agenda.start();
} catch (error) {
console.error(error);
process.exit(-1);
}
}
module.exports = {
run: run
};
and the producers file is
//import { default as Agenda } from 'agenda';
const Agenda = require("agenda");
const jobs = async (job) => {
try {
const agenda = new Agenda({
db: {
address: process.env.MONGODB_URL,
collection: 'agendaJobs',
options: {
useUnifiedTopology: true
}
}
});
const created = await agenda.schedule(job.time, 'ticket creation', {
time: job.time
});
return created;
} catch (error) {
console.error(error);
process.exit(-1);
}
};
module.exports = jobs;
and this is my controller where i use it
const jobs = require('./producer');
const { run } = require('./consumer');
run();
app.post('/create/job', async (req, res) => {
try {
const { time } = req.body;
await jobs({
time: time
});
return res.status(200).send({
success: true,
message: 'Job created'
});
} catch (error) {
return res.status(error.status).send(error);
}
});
is there anywhere i am wrong, i know this connects to the db
an example of the body sent to the controller
{
"time": "3 minutes"
}
i have used the node debugger to trace the error, it always fails at a line that looks like this, but i don't know what is causing it
yield schedule(this).save()
then shows an error in the terminal saying
TypeError: Cannot read property 'insertOne' of undefined
** The props property of this is always undefined
https://github.com/agenda/agenda/issues/335#issuecomment-249154993
agenda.on("ready", async () => {
const created = await agenda.schedule(job.time, 'ticket creation', {
time: job.time
});
});
My lambda is triggered by a request from the browser. The browser sends an image as multipart/form-data.
The lambda uses busboy to parse the request:
function parseForm(event: IHttpEvent) {
return new Promise(
(resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 10 },
});
const imageResponse = new Map<string, IImageParseResponse>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
imageResponse.set(id, { file, filename, mimeType });
});
busboy.on("error", (error) => reject(`Parse error: ${error}`));
busboy.on("finish", () => resolve(imageResponse));
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
When I parsed the request I want to upload the file to AWS S3.
export async function handler(event: IHttpEvent) {
var res = await parseForm(event);
const s3 = new S3Client({ region: "eu-central-1" });
for (const [k, v] of res) {
console.log(`File ${v.filename} ${v.mimeType} streaming`);
const stream = new Readable().wrap(v.file);
const upload = new Upload({
client: s3,
params: {
Key: v.filename,
Bucket: "my-image-bucket",
Body: stream,
ContentType: v.mimeType,
},
});
upload.on("httpUploadProgress", (p) => console.log(p));
const result = await upload.done();
console.log(result);
return result;
}
}
This does not work. However the Browser will receive a 200 OK with a null body response. What confuses me even more is that console.log(result); does not log anything to console.
Where is my mistake? I dont't fully understand the mechanics of streams. But as far as I understand it will be more memory-efficient. In the future I plan to upload multiple images at once. And in order to save cost I want my method to be as efficient as possible.
In general I did 2 mistakes.
Tried to upload the stream when it was already read to the end by busboy
I did not properly wait for the completion of the upload to s3 before terminating the function.
In the end i ended up with the following:
const s3 = new S3Client({ region: "eu-central-1" });
const { BUCKET_NAME, MAX_IMAGE_SIZE } = process.env;
export async function handler(event: IHttpEvent) {
const results = await parseForm(event);
const response = [];
for (const r of results) {
if (r.status === "fulfilled") {
const value: any = r.value.result;
response.push({
id: r.value.id,
key: value.Key,
url: value.Location,
});
}
if (r.status === "rejected")
response.push({ id: r.reason.id, reason: r.reason.error });
}
return response;
}
async function doneHandler(
id: string,
uploadMap: Map<string, Upload>
): Promise<{ id: string; result: ServiceOutputTypes }> {
try {
var result = await uploadMap.get(id).done();
} catch (e: any) {
var error = e;
} finally {
uploadMap.delete(id);
if (error) throw { id, error };
return { id, result };
}
}
function parseForm(event: IHttpEvent) {
return new Promise( (resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 1, fileSize: parseInt(MAX_IMAGE_SIZE) },
});
const responses: Promise<{
id: string;
result: ServiceOutputTypes;
}>[] = [];
const uploads = new Map<string, Upload>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
uploads.set(
id,
new Upload({
client: s3,
params: {
Bucket: BUCKET_NAME,
Body: new Readable().wrap(file),
Key: filename,
ContentType: mimeType,
ContentEncoding: encoding,
},
})
);
responses.push(doneHandler(id, uploads));
file.on("limit", async () => {
const aborts = [];
for (const [k, upload] of uploads) {
aborts.push(upload.abort());
}
await Promise.all(aborts);
return reject(new Error("File is too big."));
});
});
busboy.on("error", (error: any) => {
reject(new Error(`Parse error: ${error}`));
});
busboy.on("finish", async () => {
const res = await Promise.allSettled(responses);
resolve(res);
});
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
This solution also handles file-limits and tries to abort all pending uploads to S3
I am using aws sdk to uplod user input image and then get the image link from aws and i will store the link in mongoDB. In that case when i run .upload() it is async.
const imgSRC = [];
for (let img of image) {
console.log(img);
const params = {
Bucket: process.env.AWS_BUCKET,
Key: `${img.originalname}_${userID}`,
Body: img.buffer,
};
s3.upload(params, (error, data) => {
if (error) {
console.log(error);
res.status(500).json({ msg: "server error" });
}
imgSRC.push(data.Location);
console.log(imgSRC);
});
}
const newPost = new Post({
userID: userID,
contentID: contentID,
posts: [
{
caption: caption,
data: imgSRC,
},
],
});
const post = await newPost.save();
in that case when the .save to mongodb run, there is no imgLinks from aws yet. How can i fix that things.
I've already tried async and it didn't work
You need to use Promise.all() in this manner
const uploadImage = (obj) => {
return new Promise((resolve, reject) => {
const params = {
Bucket: process.env.AWS_BUCKET,
Key: obj.key,
Body: obj.body,
}
s3.upload(params, (error, data) => {
if (error) {
console.log(error);
return reject(error);
}
return data;
});
})
}
const mainFunction = async () => {
const promises = [];
for (let img of image) {
const options = {
key: `${img.originalname}_${userID}`,
body: img.buffer
};
promises.push(uploadImage(options));
}
const result = await Promise.all(promises);
const imgSRC = result.map((r) => { return r.Location });
return imgSRC;
}
If you use await on s3.upload method you should remove the callback for this method.
try {
const data = await s3.upload(params);
imgSRC.push(data.Location);
console.log(imgSRC);
} catch(e) {
console.log(error);
res.status(500).json({ msg: "server error" });
}
Let me know if it works.
I'm trying to implement an API endpoint that allows for multiple file uploads.
I don't want to write any file to disk, but to buffer them and pipe to S3.
Here's my code for uploading a single file. Once I attempt to post multiple files to the the endpoint in route.js, it doesn't work.
route.js - I'll keep this as framework agnostic as possible
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
let chunks = []
form.on('file', (field, file, filename, enc, mime) => {
file.on('data', data => {
chunks.push(data)
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
const buf = Buffer.concat(chunks)
resolve({
fileBuffer: buf,
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
req.pipe(form)
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const { fileBuffer, ...fileParams } = await parseForm(req)
const result = uploadFile(fileBuffer, fileParams)
res.status(200).json({ success: true, fileUrl: result.Location })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}
I couldn't find a lot of documentation for this but I think I've patched together a solution.
Most implementations appear to write the file to disk before uploading it to S3, but I wanted to be able to buffer the files and upload to S3 without writing to disk.
I created this implementation that could handle a single file upload, but when I attempted to provide multiple files, it merged the buffers together into one file.
The one limitation I can't seem to overcome is the field name. For example, you could setup the FormData() like this:
const formData = new FormData()
fileData.append('file[]', form.firstFile[0])
fileData.append('file[]', form.secondFile[0])
fileData.append('file[]', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
This structure is laid out in the FormData.append() MDN example. However, I'm not certain how to process that in. In the end, I setup my FormData() like this:
Form Data
const formData = new FormData()
fileData.append('file1', form.firstFile[0])
fileData.append('file2', form.secondFile[0])
fileData.append('file3', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
As far as I can tell, this isn't explicitly wrong, but it's not the preferred method.
Here's my updated code
route.js
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
const files = [] // create an empty array to hold the processed files
const buffers = {} // create an empty object to contain the buffers
form.on('file', (field, file, filename, enc, mime) => {
buffers[field] = [] // add a new key to the buffers object
file.on('data', data => {
buffers[field].push(data)
})
file.on('end', () => {
files.push({
fileBuffer: Buffer.concat(buffers[field]),
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
resolve(files)
})
req.pipe(form) // pipe the request to the form handler
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const files = await parseForm(req)
const fileUrls = []
for (const file of files) {
const { fileBuffer, ...fileParams } = file
const result = uploadFile(fileBuffer, fileParams)
urls.push({ filename: result.key, url: result.Location })
}
res.status(200).json({ success: true, fileUrls: urls })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}