Can't upload files from Lambda to S3 - node.js

I tested on my localhost, then checked on s3 and saw that there was a new file created.
But when testing on Lambda, although there is no error, there is no file on S3. The log of s3.upload(params).promise() is also not displayed.
var fs = require('fs');
var AWS = require('aws-sdk');
exports.handler = async (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false
try {
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
var path = 'myfile.txt';
var file_buffer = fs.readFileSync(path);
console.log(file_buffer);
var params = {
Bucket: 'bucket-dev',
Key: '2222.txt',
Body: file_buffer
};
console.log("1111");
s3.upload(params).promise()
.then(function(data) {
console.log("Successfully uploaded to");
callback(null, "All Good");
})
.catch(function(err) {
console.error(err, err.stack);
callback(err);
});
console.log("2222");
return context.logStreamName
} catch (err) {
console.log(err);
callback(err);
}
}
Thanks

Try not to mix and match async and callback. Something like this might be closer to what you want...
var fs = require("fs");
var AWS = require("aws-sdk");
exports.handler = async (event, context) => {
AWS.config.update({
accessKeyId,
secretAccessKey,
});
const s3 = new AWS.S3();
const path = "myfile.txt";
const file_buffer = fs.readFileSync(path);
const params = {
Bucket: "bucket-dev",
Key: "2222.txt",
Body: file_buffer,
};
console.log("1111");
const res = await s3.upload(params).promise();
console.log("Successfully uploaded", res);
return "All good";
};

Related

How can I upload multiple images to an s3 bucket in a lambda function using node.js?

I am not very familiar with node and trying to upload an array of media objects to an s3 bucket using an AWS Lambda node function.
the payload has an album which is an array of key/data dictionaries. My code is as below but I'm certain this is wrong.
const awsServerlessExpress = require('aws-serverless-express');
const app = require('./app');
const server = awsServerlessExpress.createServer(app);
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient();
var s3 = new AWS.S3();
var s3Params = {
Bucket: 'bucketid',
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
exports.handler = async (event, context) => {
console.log(event);
var body = JSON.parse(event.body);
if (typeof body.album !== 'undefined' && body.album) {
body.album.forEach(function (value) {
var data = body.album.mediaString;
let mediaData = new Buffer(data, 'base64');
var mediaKey = body.album.mediaKey;
try {
s3Params = {
Bucket: 'bucketID',
Key: mediaKey,
Body: mediaData
};
try {
const stored = await s3.upload(s3Params).promise();
console.log("stored successfully");
return { body: JSON.stringify(data) };
} catch (err) {
console.log("error storing");
console.log(err);
return { error: err };
}
} catch (err) {
return { error: err };
}
});
return { body: JSON.stringify(data) };
} else {
return { error: 'error'};
}
};
I have an error that s3 not found. Just wondering if I'm going about this all wrong.
When I only upload one image with the following code everything works fine:
const awsServerlessExpress = require('aws-serverless-express');
const app = require('./app');
const server = awsServerlessExpress.createServer(app);
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient();
var s3 = new AWS.S3();
var s3Params = {
Bucket: 'bucketID',
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
exports.handler = async (event, context) => {
var body = JSON.parse(event.body);
var data = body.mediaString;
let mediaData = new Buffer(data, 'base64');
var mediaKey = body.mediaKey;
try {
s3Params = {
Bucket: 'bucketID',
Key: mediaKey,
Body: mediaData
};
try {
const stored = await s3.upload(s3Params).promise();
console.log("stored successfully");
return { body: JSON.stringify(data) };
} catch (err) {
console.log("error storing");
console.log(err);
return { error: err };
}
} catch (err) {
return { error: err };
}
};

Is there any way to upload fluent-ffmpeg converted videos directly to s3 without storing them on local?

Is it possible to store ffmpeg output directly to s3 without downloading it in local or any other storage?
Below is my understanding of ffmpeg which converts format of video. I have done conversion part but i need to store it's output directly to s3 bucket so anyone have idea regarding this problem ?
const AWS = require('aws-sdk');
const fs = require('fs');
const ffmpeg = require('fluent-ffmpeg');
const axios = require('axios');
const s3 = new AWS.S3({
endpoint: 's3-ap-south-1.amazonaws.com', // Put you region
accessKeyId: S3_ACCESS_KEY_ID, // Put you accessKeyId
secretAccessKey: S3_ACCESS_SECRET_KEY, // Put you accessKeyId
Bucket: S3_BUCKET_NAME, // Put your bucket name
signatureVersion: 'v4',
region: 'ap-south-1' // Put you region
});
var params = {
Bucket: S3_BUCKET_NAME,
Delimiter: '',
Prefix: S3_STORE_PATH
};
s3.listObjects(params, function (err, data) {
if (err) throw err;
console.log(data);
data.Contents.forEach(function (obj, index) {
const file_name = obj.Key;
const type = "mp4";
console.log(obj.Key)
const url = s3.getSignedUrl('getObject', {
Bucket: S3_BUCKET_NAME,
Key: obj.Key,
Expires: signedUrlExpireSeconds
});
console.log("SIGNED URL= ", url);
const filename = file_name.split('.').slice(0, -1).join('.');
const localFileOutput = `${filename}.${type}`;
// const localFileOutput = `${bucket_url}${filename}.${type}`;
console.log(localFileOutput);
const key = `${filename}.${type}`;
const convert_video = async (req,res) => {
await new Promise((resolve, reject) => {
ffmpeg().input(url)
.toFormat('mp4')
.output(localFileOutput)
.on('end', async () => {
const params = {
Bucket: S3_BUCKET_NAME,
Key: key,
Body: localFileOutput
}
// const fileContent = await fs.readFileSync(localFileOutput);
await s3.putObject(params).promise();
resolve();
}).run();
});
// res.send("success")
}
convert_video();
});
});

Unable to fetch list of all S3 objects using NodeJs

Kindly excuse my knowledge with NodeJs, as I've just started with it. I've following lambda function which isn't fetching list of objects (more than 1000) in S3 and stuck in infinite loop, resulting lambda timimg out. Not sure what's wrong here
Code:
console.log('Loading');
const AWS = require('aws-sdk');
var request=true;
const awsOptions = {
region: "us-east-1"
};
const s3 = new AWS.S3(awsOptions);
var list = [];
exports.handler = async (event, context, callback) => {
const SrcBucket = event.Records[0].s3.bucket.name;
const trigger_file = event.Records[0].s3.object.key;
var bucketParams = {
Bucket: SrcBucket,
Prefix: 'Test/'
};
do
{
s3.listObjects(bucketParams, (err, data) => {
if (err)
console.log("Error", err);
else
{
list.push(data.Contents);
if (data.IsTruncated)
bucketParams.Marker = data.NextMarker;
else
request = false;
}
});
} while (request);
callback(null, {
listLen: list.length
});

s3.listObjectsV2 is not a function NodeJs

I've following lambda function for fetching list of all the keys from S3, however, it ends up in error message as s3.listObjectsV2 is not a function. Not sure what's wrong. The code is taken from another SO post, however it doesn't seem to be working.
Code:
const AWS = require('aws-sdk');
var request=true;
const awsOptions = {
region: "us-east-1"
};
const s3 = new AWS.S3(awsOptions);
exports.handler = async (event, context, callback) => {
const SrcBucket = event.Records[0].s3.bucket.name;
const trigger_file = event.Records[0].s3.object.key;
var bucketParams = {
Bucket: SrcBucket,
Prefix: 'Temp/',
};
var allKeys = [];
listAllKeys();
function listAllKeys()
{
s3.listObjectsV2(bucketParams, function (err, data)
{
if (err)
{
console.log(err, err.stack); // an error occurred
}
else
{
var contents = data.Contents;
contents.forEach(function (content) {
allKeys.push(content.Key);
});
if (data.IsTruncated) {
bucketParams.ContinuationToken = data.NextContinuationToken;
console.log("get further list...");
listAllKeys();
}
}
});
}
console.log(allKeys.length);
}

In NodeJS, how to download files from S3

In ExpressJS, I would like to download files previously uploaded to an Amazon S3 bucket.
Here is my current route:
const express = require('express');
const AWS = require('aws-sdk');
const mammoth = require('mammoth');
const fs = require('fs').promises
const path = require('path')
const router = express.Router();
router.put('/:id/download', async (req, res, next) => {
console.log('hitting download route')
var id = req.params.id;
let upload = await Upload.query().findById( id ).eager('user');
console.log("file to download is: ", upload.name)
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
});
const s3 = new AWS.S3();
// var fileStream = fs.createWriteStream('/tmp/file.docx');
// var s3Stream = s3.getObject(params).createReadStream();
const downloadFromS3 = async () => {
const params = {
Bucket: process.env.AWS_BUCKET,
Key: upload.file_url.split("com/").reverse()[0]
};
const { Body } = await s3.getObject(params).promise()
await fs.writeFile(`${ __dirname }/download.docx`, Body)
return Body
}
// mammoth.convertToHtml({ path: '/Users/dariusgoore/Downloads/1585930968750.docx' })
// .then(async function(result) {
// await Upload.query().findById( id )
// .patch({
// html: result.value,
// conversion_messages: result.messages
// })
// res.json(result);
// })
// .done();
res.send(downloadFromS3)
});
I get no errors, but the file is not created, or if I manually create the file, it remains empty.
If I've understood you correctly the issue is that you're not waiting for the file to be written to the local file system, you're returning it in the response via express.
Give this code a go.
const express = require('express')
const AWS = require('aws-sdk')
const mammoth = require('mammoth')
const fs = require('fs').promises
const path = require('path')
const router = express.Router()
const s3 = new AWS.S3()
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
})
const downloadFromS3 = async (key, location) => {
const params = {
Bucket: process.env.AWS_BUCKET,
Key: key,
}
const { Body } = await s3.getObject(params).promise()
await fs.writeFile(location, Body)
return true
}
router.put('/:id/download', async (req, res, next) => {
console.log('hitting download route')
const upload = await Upload.query()
.findById(req.params.id)
.eager('user')
console.log('file to download is: ', upload.name)
const key = upload.file_url.split('com/').reverse()[0]
const location = `${__dirname}/${key}.docx`
await downloadFromS3(key, location)
res.send({ key, location })
})
import { S3 } from 'aws-sdk';
import fs from 'fs';
export default class S3Service {
s3: S3;
constructor() {
this.s3 = new S3({
apiVersion: *****,
region: ********
});
}
//Download File
async download(bucketName: string, keyName: string, localDest?: string): Promise<any> {
if (typeof localDest == 'undefined') {
localDest = keyName;
}
const params = {
Bucket: bucketName,
Key: keyName
};
console.log("params: ", params);
let writeStream = fs.createWriteStream(localDest);
return new Promise<any>((resolve, reject) => {
const readStream = this.s3.getObject(params).createReadStream();
// Error handling in read stream
readStream.on("error", (e) => {
console.error(e);
reject(e);
});
// Resolve only if we are done writing
writeStream.once('finish', () => {
resolve(keyName);
});
// pipe will automatically finish the write stream once done
readStream.pipe(writeStream);
});
}
}

Resources