Uploading Multiple Files To S3 Node - node.js

I am trying to upload multiple files to S3 from my Sails.jsapplication. I've a Controller to handle the Request and a Service to upload should in case multiple controllers wants to utilize the function. The promise in the controller doesn't get resolved. And also, I keep getting this error Unhandled rejection TimeoutError: Missing credentials in config
Controller
multiSizeCreate: (req, res) => {
UploadService.uploadFile(req).then((data, failure) => {
if (failure) return [null];
return [Promise.resolve(AWSService.s3awsupload(data))];<-- <--The promise doesn't get resolved
})
.spread(uploadedData => {
if (!uploadedData) return ResponseService.json(403, res, 'File upload failed');
console.log(uploadedData); <--It logs an unresolved promise
return ResponseService.json(201, res, 'Size(s) created successfully');
})
.catch(err => ValidationService.jsonResolveError(err, Size, res))
}
AWSService
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const fs = require('fs');
const key = sails.config.settings.aws.key;
const secret = sails.config.settings.aws.secret;
const bucket = sails.config.settings.aws.bucket;
_ = require('lodash');
AWS.config.update({accessKeyId: key, secretAccessKey: secret});
const Promise = require('bluebird');
module.exports = {
s3awsupload: (files) => {
let toUpload = []
if (files.length > 0) {
_.forEach(files, file => {
toUpload.push(awsmainUpload(file));
});
return [toUpload];
} else {
return null;
}
}
}
const awsmainUpload = (fileDirectory) => {
const uploadImage = fs.createReadStream(fileDirectory.fd);
const fileName = Math.floor(Date.now() / 1000);
const params = {Bucket: bucket, Key: fileName + " " + fileDirectory.fileName, Body: uploadImage};
return new Promise((resolve, reject) => {
s3.upload(params, (error, outpout) => {
return error ? reject(error) : resolve(outpout);
})
})
}

Related

How to read JSON from S3 by AWS Lambda Node.js 18.x runtime?

#TBA gave the solution.
The root cause is not by runtime. It came from SDK v3.
Point: Do not update the code with mixed things (like both of runtime & SDK version together 🥲)
Thanks again, TBA.
I was using Node.js 14.x version runtime Lambda to read some json file from S3.
Brief code is below
const AWS = require("aws-sdk");
const s3 = new AWS.S3();
exports.handler = (event) => {
const { bucketName, objKey } = event
const params = {
Bucket: bucketName,
Key: objKey
};
return new Promise((resolve) => {
s3.getObject(params, async (err, data) =>{
if (err) console.log(err, err.stack);
else {
const contents = JSON.parse(data.Body)
resolve(contents);
}
});
})
};
and it returned the json data as I expected.
And today I tried to create a new lambda with runtime Node.js 18.x but it returned null or some errors...
Q) Could you give me some advice to solve this 🥲 ?
+) I used same json file for each lambda
+) Not sure why, but in my case, data.Body.toString() didn't work (I saw some answers in stackoverflow provide that and tried but no lucks)
Thanks in advance!
Case A (returns null)
import { S3Client, GetObjectCommand } from "#aws-sdk/client-s3";
const s3Client = new S3Client({ region: "ap-northeast-2" });
export const handler = (event) => {
const { objKey, bucketName } = event;
const params={
Bucket: bucketName,
Key: objKey
};
const getObjCommand = new GetObjectCommand(params);
return new Promise((resolve) => {
s3Client.send(getObjCommand, async (err, data) =>{
if (err) console.log(err, err.stack);
else {
const list = JSON.parse(data.Body)
resolve(list);
}
});
})
};
Case B (returns "Unexpected token o in JSON at position 1")
export const handler = async (event) => {
const { objKey, bucketName } = event;
const params={
Bucket: bucketName,
Key: objKey
};
const getObjCommand = new GetObjectCommand(params);
const response = await s3Client.send(getObjCommand)
console.log("JSON.parse(response.Body)", JSON.parse(response.Body))
};
Case C (returns "TypeError: Converting circular structure to JSON")
export const handler = async (event) => {
const { objKey, bucketName } = event;
const params={
Bucket: bucketName,
Key: objKey
};
const getObjCommand = new GetObjectCommand(params);
try {
const response = await s3Client.send(getObjCommand)
return JSON.stringify(response.Body)
} catch(err) {
console.log("error", err)
return err
}
};

Nodejs S3 - Delete Multiple Images

My code it's responding as successful but the images are still in my bucket.
I'm trying to delete images from my s3Bucket (upload works perfect) but the Images delete process it's not working properly.
Here's my code:
myRouter
router.delete("/:id", auth, async (req, res) => {
const productSelected = await Product.findById(req.params.id);
const result = await drop(productSelected);
console.log('>>>>>>> ', result);
res.send(result);
});
myS3Class
const aws = require('aws-sdk');
const multer = require('multer');
const multerS3 = require('multer-s3');
const AWS_CONSTANTS = require('../constants');
const config = require('config');
aws.config.update({
secretAccessKey: config.get('AWS_SECRET_ACCESS_KEY'),
accessKeyId: config.get('AWS_ACCESS_KEY_ID'),
region: AWS_CONSTANTS.region
})
const s3 = new aws.S3();
const drop = async (data) => {
let obj = [];
let result = "";
data.images.map((image) => {
obj.push({ Key: `${AWS_CONSTANTS.bucket}/${image}` }); // This is the full path
});
const options = {
Bucket: AWS_CONSTANTS.bucket,
Delete: {
Objects: obj,
Quiet: false,
},
}
try {
await s3.deleteObjects(
options,
function (err, data) {
if (err) console.log('err ==>', err);
console.log('delete successfully', data);
result = data;
}
).promise();
} catch(error) {
return { success: false, data: null }
}
return result;
}
module.exports.drop = drop;
This is the response from my code:
>>>>>>> delete successfully {
Deleted: [ { Key: 'my-bucketfolder/1655743085375Python.png' } ],
Errors: []
}

AWS Lambda Custom Nodejs Container Shows Runtime Error

I have built a AWS Lambda function with custom container image. I am trying to convert an excel file to pdf with Libreoffice - getting the file from S3 and saving it to a file and converting it to pdf and then uploading it back to S3.
Here the code.
const fs = require('fs');
const getStream = require('get-stream');
const { Readable } = require('stream')
const { S3Client, GetObjectCommand, PutObjectCommand } = require("#aws-sdk/client-s3");
const libre = require('libreoffice-convert');
const path = require('path');
exports.handler = async (event) => {
const bucket = event.queryStringParameters.bucket;
const file = event.queryStringParameters.file;
const convertedFile = event.queryStringParameters.convertedFile;
if (event.queryStringParameters['warmup'] !== undefined) {
return {
result: true,
message: 'warmed up'
}
}
const client = new S3Client({ region: "ap-south-1" });
const command = new GetObjectCommand({ Bucket: bucket, Key: file });
const response = await client.send(command);
const objectData = response.Body;
const writeStream = fs.createWriteStream("/tmp/sample.xlsx");
objectData.pipe(writeStream);
var end = new Promise((resolve, reject) => {
objectData.on('close', resolve(true));
objectData.on('end', resolve(true));
objectData.on('error', reject(false));
});
let completed = await end;
if (completed) {
const extend = '.pdf'
const outputPath = `/tmp/sample${extend}`;
const enterPath = '/tmp/sample.xlsx';
var readingFile = new Promise((resolve, reject) => {
fs.readFile(enterPath, (err, data)=>{
if (err) {
reject(false);
}
resolve(data);
});
});
var fileData = await readingFile;
var converting = new Promise((resolve, reject) => {
libre.convert(fileData, extend, undefined, (err, done) => {
if (err) {
reject(false)
}
fs.writeFileSync(outputPath, done);
resolve(true)
});
})
var converted = await converting;
if (converted) {
var convertedFileStream = fs.createReadStream(outputPath);
const uploadCommand = new PutObjectCommand({ Bucket: bucket, Key: convertedFile, Body: convertedFileStream });
const lastResponse = await client.send(uploadCommand);
const returnResponse = {
result: true,
message: 'success',
bucket: event.queryStringParameters.bucket,
file: event.queryStringParameters.file,
convertedFile: event.queryStringParameters.convertedFile
};
if (event.queryStringParameters['returnEvent'] !== undefined) {
returnResponse['returnEvent'] = event;
}
return returnResponse;
}
}
return completed;
};
However, I am getting this error at time. Sometimes, it is success, but, sometimes it throws this error.
{
"errorType": "Error",
"errorMessage": "false",
"stack": [
"Error: false",
" at _homogeneousError (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:56:16)",
" at postError (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:72:34)",
" at done (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:99:13)",
" at fail (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:113:13)",
" at /function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:148:24",
" at processTicksAndRejections (internal/process/task_queues.js:97:5)"
]
}
I dont know Nodejs on a great deal so I think if the code is not written the correct way. Any ideas what I am doing wrong here ?
Like #hoangdv when I logged errors I came to know that the file saving to the disk was not correct. So, I changed the area of the code where it saves to like this and then it worked.
const client = new S3Client({ region: "ap-south-1" });
const command = new GetObjectCommand({ Bucket: bucket, Key: file });
const { Body } = await client.send(command);
await new Promise((resolve, reject) => {
Body.pipe(fs.createWriteStream(filePath))
.on('error', err => reject(err))
.on('close', () => resolve())
})
const excelFile = fs.readFileSync(filePath);

AWS S3 Loading many files

I need to upload a lot of files (about 65.000) splitted in subdirectory.
I tried to iterate and load every single file like this:
const fs = require("fs");
const path = require("path");
const async = require("async");
const AWS = require("aws-sdk");
const readdir = require("recursive-readdir");
const slash = require("slash");
const { BUCKET, KEY, SECRET } = process.env;
const rootFolder = path.resolve(__dirname, "./");
const uploadFolder = "./test_files/15";
const s3 = new AWS.S3({
signatureVersion: "v4",
accessKeyId: KEY,
secretAccessKey: SECRET,
});
function getFiles(dirPath) {
return fs.existsSync(dirPath) ? readdir(dirPath) : [];
}
async function deploy(upload) {
if (!BUCKET || !KEY || !SECRET) {
throw new Error("you must provide env. variables: [BUCKET, KEY, SECRET]");
}
const filesToUpload = await getFiles(path.resolve(__dirname, upload));
return new Promise((resolve, reject) => {
async.eachOfLimit(
filesToUpload,
10,
async.asyncify(async (file) => {
const Key = file.replace(rootFolder + path.sep, "");
console.log(`uploading: [${slash(Key)}]`);
var options = { partSize: 5 * 1024 * 1024, queueSize: 4 };
return new Promise((res, rej) => {
s3.upload(
{
Key: slash(Key),
Bucket: BUCKET,
Body: fs.readFileSync(file),
},
(err) => {
if (err) {
return rej(new Error(err));
}
res({ result: true });
}
);
});
}),
(err) => {
if (err) {
return reject(new Error(err));
}
resolve({ result: true });
}
);
});
}
deploy(uploadFolder)
.then(() => {
console.log("task complete");
process.exit(0);
})
.catch((err) => {
console.error(err);
process.exit(1);
});
but after a considerable number of uploads i have this:
Error: Error: NetworkingError: connect ETIMEDOUT IP_S3_AWS
I need to upload this set of files from ec2 instance (because its a result of a image processing). I have this behavior from my pc, i don't know if from ec2 have the same problem.
I have considered the way of zip all and upload but i need to keep the original directory structure.
I accept also new way to resolve the problem.
Sorry for my bad english.
It would probably be much simpler to use the AWS CLI aws s3 sync command instead of building this yourself.

In NodeJS, how to download files from S3

In ExpressJS, I would like to download files previously uploaded to an Amazon S3 bucket.
Here is my current route:
const express = require('express');
const AWS = require('aws-sdk');
const mammoth = require('mammoth');
const fs = require('fs').promises
const path = require('path')
const router = express.Router();
router.put('/:id/download', async (req, res, next) => {
console.log('hitting download route')
var id = req.params.id;
let upload = await Upload.query().findById( id ).eager('user');
console.log("file to download is: ", upload.name)
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
});
const s3 = new AWS.S3();
// var fileStream = fs.createWriteStream('/tmp/file.docx');
// var s3Stream = s3.getObject(params).createReadStream();
const downloadFromS3 = async () => {
const params = {
Bucket: process.env.AWS_BUCKET,
Key: upload.file_url.split("com/").reverse()[0]
};
const { Body } = await s3.getObject(params).promise()
await fs.writeFile(`${ __dirname }/download.docx`, Body)
return Body
}
// mammoth.convertToHtml({ path: '/Users/dariusgoore/Downloads/1585930968750.docx' })
// .then(async function(result) {
// await Upload.query().findById( id )
// .patch({
// html: result.value,
// conversion_messages: result.messages
// })
// res.json(result);
// })
// .done();
res.send(downloadFromS3)
});
I get no errors, but the file is not created, or if I manually create the file, it remains empty.
If I've understood you correctly the issue is that you're not waiting for the file to be written to the local file system, you're returning it in the response via express.
Give this code a go.
const express = require('express')
const AWS = require('aws-sdk')
const mammoth = require('mammoth')
const fs = require('fs').promises
const path = require('path')
const router = express.Router()
const s3 = new AWS.S3()
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
})
const downloadFromS3 = async (key, location) => {
const params = {
Bucket: process.env.AWS_BUCKET,
Key: key,
}
const { Body } = await s3.getObject(params).promise()
await fs.writeFile(location, Body)
return true
}
router.put('/:id/download', async (req, res, next) => {
console.log('hitting download route')
const upload = await Upload.query()
.findById(req.params.id)
.eager('user')
console.log('file to download is: ', upload.name)
const key = upload.file_url.split('com/').reverse()[0]
const location = `${__dirname}/${key}.docx`
await downloadFromS3(key, location)
res.send({ key, location })
})
import { S3 } from 'aws-sdk';
import fs from 'fs';
export default class S3Service {
s3: S3;
constructor() {
this.s3 = new S3({
apiVersion: *****,
region: ********
});
}
//Download File
async download(bucketName: string, keyName: string, localDest?: string): Promise<any> {
if (typeof localDest == 'undefined') {
localDest = keyName;
}
const params = {
Bucket: bucketName,
Key: keyName
};
console.log("params: ", params);
let writeStream = fs.createWriteStream(localDest);
return new Promise<any>((resolve, reject) => {
const readStream = this.s3.getObject(params).createReadStream();
// Error handling in read stream
readStream.on("error", (e) => {
console.error(e);
reject(e);
});
// Resolve only if we are done writing
writeStream.once('finish', () => {
resolve(keyName);
});
// pipe will automatically finish the write stream once done
readStream.pipe(writeStream);
});
}
}

Resources