errorMessage: "event is not defined" in lambda function nodejs - node.js

I am trying to run a lambda function attached to an API gateway GET request and below is the code
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const bucketName = "dhaval-upload";
let params = {
Bucket: bucketName,
Key: event.fileName
};
exports.handler = async (event, context, callback) => {
return await s3.getObject(params).promise()
.then((res) => {
return "abcd";
// return res.Body.toString('utf-8');
})
.catch((err) => {
return err;
});
};
but I am getting the below error
errorMessage: "event is not defined"
errorType: "ReferenceError"
But I don't understand the reason for this as I have another POST request running perfectly..
Any help will be highly appreciated

You need to place params inside your handler, like this:
exports.handler = async (event, context, callback) => {
let params = {
Bucket: bucketName,
Key: event.fileName
};
return await s3.getObject(params).promise()
.then((res) => {
return "abcd";
// return res.Body.toString('utf-8');
})
.catch((err) => {
return err;
});
};

Related

How to read CSV data from S3 using Node.js AWS Lambda function

I have a Node.js AWS Lambda function and I am trying to read records from a CSV file in S3 and print its contents.
Below is my code to achieve the same however I am getting Null as output.
Code:
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const csv = require('csv-parser');
const bucket = 'awslambdabuckets';
const objectkey = 'record.csv';
const params = { Bucket: bucket, Key: objectkey };
const results = [];
exports.handler = async function (event, ctx, callback) {
try {
const file = s3.getObject(params).createReadStream();
file
.pipe(csv())
.on('data', function (data) {
results.push;
})
.on('end', () => {
console.log(results);
callback(null, results);
});
} catch (err) {
console.log(err);
callback(Error(err));
}
};
Output:
Can someone help me point out what's the problem and how to fix it.
You are not pushing the data to the result, see and make changes as below
exports.handler = async function (event, ctx, callback) {
try {
const file = s3.getObject(params).createReadStream();
file
.pipe(csv())
.on('data', function (data) {
results.push(data);
})
.on('end', () => {
console.log(results);
callback(null, results);
});
} catch (err) {
console.log(err);
callback(Error(err));
}
};
You are not pushing data to the array:
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const csv = require('csv-parser');
const bucket = 'awslambdabuckets';
const objectkey = 'record.csv';
const params = { Bucket: bucket, Key: objectkey };
const results = [];
exports.handler = function (event, ctx, callback) {
try {
const file = s3.getObject(params).createReadStream();
file
.pipe(csv())
.on('data', function (data) {
results.push(data); // --> here
})
.on('end', () => {
console.log(results);
callback(null, results);
});
} catch (err) {
console.log(err);
callback(Error(err));
}
};

Lambda : Unable to stream and upload data in s3

Using the below code, I'm trying to download a file from one S3 bucket and upload it to another S3 bucket programmatically. The code executing without any issues/exceptions but the file is not getting processed.
const AWS = AWSXRay.captureAWS(require('aws-sdk'))
const S3 = new AWS.S3()
const fs = require('fs')
exports.index = async (event, context) => {
var getParams = {
Bucket: 'my-s3-test-bucket1',
Key: 'SampleVideo.mp4'
}
const inputFilename = '/tmp/SampleVideo.mp4';
const writeStream = fs.createWriteStream(inputFilename)
new Promise ((resolve, reject) => {S3.getObject(getParams).createReadStream().pipe(writeStream).on('end', () => {console.log('end');return resolve();}).on('error',(error) => {console.log('error');return reject(error);})});
writeStream.on('finish', function () {
var putParams = {
Body: fs.createReadStream(inputFilename),
Bucket: 'my-s3-test-bucket2',
Key: 'transfer-' + 'OutputVideo.mp4',
}
S3.upload(putParams, function (err, data) {
if (err) console.log(err, err.stack)
else console.log('logging data' + data)
})
})
}
It seems that because you are using async handler, your function completes prematurely before its body has a chance to fully execute.
You can wrap your code in a promise, as shown in the AWS docs, to actually tell your function to wait for its entire body to execute:
const AWS = AWSXRay.captureAWS(require('aws-sdk'))
const S3 = new AWS.S3()
const fs = require('fs')
exports.index = async (event, context) => {
const promise = new Promise(function(resolve, reject) {
var getParams = {
Bucket: 'my-s3-test-bucket1',
Key: 'SampleVideo.mp4'
}
const inputFilename = '/tmp/SampleVideo.mp4';
const writeStream = fs.createWriteStream(inputFilename)
new Promise ((resolve, reject) => {S3.getObject(getParams).createReadStream().pipe(writeStream).on('end', () => {console.log('end');return resolve();}).on('error',(error) => {console.log('error');return reject(error);})});
writeStream.on('finish', function () {
var putParams = {
Body: fs.createReadStream(inputFilename),
Bucket: 'my-s3-test-bucket2',
Key: 'transfer-' + 'OutputVideo.mp4',
}
S3.upload(putParams, function (err, data) {
if (err) console.log(err, err.stack)
else console.log('logging data' + data)
})
})
}
return promise
}

Node.js upload Image Stream.Readable to S3

My lambda is triggered by a request from the browser. The browser sends an image as multipart/form-data.
The lambda uses busboy to parse the request:
function parseForm(event: IHttpEvent) {
return new Promise(
(resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 10 },
});
const imageResponse = new Map<string, IImageParseResponse>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
imageResponse.set(id, { file, filename, mimeType });
});
busboy.on("error", (error) => reject(`Parse error: ${error}`));
busboy.on("finish", () => resolve(imageResponse));
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
When I parsed the request I want to upload the file to AWS S3.
export async function handler(event: IHttpEvent) {
var res = await parseForm(event);
const s3 = new S3Client({ region: "eu-central-1" });
for (const [k, v] of res) {
console.log(`File ${v.filename} ${v.mimeType} streaming`);
const stream = new Readable().wrap(v.file);
const upload = new Upload({
client: s3,
params: {
Key: v.filename,
Bucket: "my-image-bucket",
Body: stream,
ContentType: v.mimeType,
},
});
upload.on("httpUploadProgress", (p) => console.log(p));
const result = await upload.done();
console.log(result);
return result;
}
}
This does not work. However the Browser will receive a 200 OK with a null body response. What confuses me even more is that console.log(result); does not log anything to console.
Where is my mistake? I dont't fully understand the mechanics of streams. But as far as I understand it will be more memory-efficient. In the future I plan to upload multiple images at once. And in order to save cost I want my method to be as efficient as possible.
In general I did 2 mistakes.
Tried to upload the stream when it was already read to the end by busboy
I did not properly wait for the completion of the upload to s3 before terminating the function.
In the end i ended up with the following:
const s3 = new S3Client({ region: "eu-central-1" });
const { BUCKET_NAME, MAX_IMAGE_SIZE } = process.env;
export async function handler(event: IHttpEvent) {
const results = await parseForm(event);
const response = [];
for (const r of results) {
if (r.status === "fulfilled") {
const value: any = r.value.result;
response.push({
id: r.value.id,
key: value.Key,
url: value.Location,
});
}
if (r.status === "rejected")
response.push({ id: r.reason.id, reason: r.reason.error });
}
return response;
}
async function doneHandler(
id: string,
uploadMap: Map<string, Upload>
): Promise<{ id: string; result: ServiceOutputTypes }> {
try {
var result = await uploadMap.get(id).done();
} catch (e: any) {
var error = e;
} finally {
uploadMap.delete(id);
if (error) throw { id, error };
return { id, result };
}
}
function parseForm(event: IHttpEvent) {
return new Promise( (resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 1, fileSize: parseInt(MAX_IMAGE_SIZE) },
});
const responses: Promise<{
id: string;
result: ServiceOutputTypes;
}>[] = [];
const uploads = new Map<string, Upload>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
uploads.set(
id,
new Upload({
client: s3,
params: {
Bucket: BUCKET_NAME,
Body: new Readable().wrap(file),
Key: filename,
ContentType: mimeType,
ContentEncoding: encoding,
},
})
);
responses.push(doneHandler(id, uploads));
file.on("limit", async () => {
const aborts = [];
for (const [k, upload] of uploads) {
aborts.push(upload.abort());
}
await Promise.all(aborts);
return reject(new Error("File is too big."));
});
});
busboy.on("error", (error: any) => {
reject(new Error(`Parse error: ${error}`));
});
busboy.on("finish", async () => {
const res = await Promise.allSettled(responses);
resolve(res);
});
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
This solution also handles file-limits and tries to abort all pending uploads to S3

upload files to aws s3 from node js

I am using aws sdk to uplod user input image and then get the image link from aws and i will store the link in mongoDB. In that case when i run .upload() it is async.
const imgSRC = [];
for (let img of image) {
console.log(img);
const params = {
Bucket: process.env.AWS_BUCKET,
Key: `${img.originalname}_${userID}`,
Body: img.buffer,
};
s3.upload(params, (error, data) => {
if (error) {
console.log(error);
res.status(500).json({ msg: "server error" });
}
imgSRC.push(data.Location);
console.log(imgSRC);
});
}
const newPost = new Post({
userID: userID,
contentID: contentID,
posts: [
{
caption: caption,
data: imgSRC,
},
],
});
const post = await newPost.save();
in that case when the .save to mongodb run, there is no imgLinks from aws yet. How can i fix that things.
I've already tried async and it didn't work
You need to use Promise.all() in this manner
const uploadImage = (obj) => {
return new Promise((resolve, reject) => {
const params = {
Bucket: process.env.AWS_BUCKET,
Key: obj.key,
Body: obj.body,
}
s3.upload(params, (error, data) => {
if (error) {
console.log(error);
return reject(error);
}
return data;
});
})
}
const mainFunction = async () => {
const promises = [];
for (let img of image) {
const options = {
key: `${img.originalname}_${userID}`,
body: img.buffer
};
promises.push(uploadImage(options));
}
const result = await Promise.all(promises);
const imgSRC = result.map((r) => { return r.Location });
return imgSRC;
}
If you use await on s3.upload method you should remove the callback for this method.
try {
const data = await s3.upload(params);
imgSRC.push(data.Location);
console.log(imgSRC);
} catch(e) {
console.log(error);
res.status(500).json({ msg: "server error" });
}
Let me know if it works.

Query S3 json file in AWS

I have json file uploaded to s3
then I wrote the following code to Query this file
const aws = require('aws-sdk');
const s3 = new aws.S3();
const bucket = 'hotels.mserver.online';
const objectKey = 'hotelsrates.json';
exports.handler = (event,context,callback) => {
// TODO implement
const response = getS3Objects(bucket,objectKey); //s3.listObjectsV2({}).promise();
console.log(response);
};
function getS3Objects(bucket,key) {
return s3.getObject({ Bucket:bucket, Key:key, ResponseContentType:'application/json '})
.promise().then(file => { return file })
.catch(error => { return error });
}`
but the result is getting null .
I understand what you are trying to accomplish here but that is not the right way to do it.
function getS3Objects(bucket,key){
return s3.getObject({Bucket:bucket,Key:key,ResponseContentType:'application/json'})
.promise().then(file=>{return file})
.catch(error =>{return error});
}`
The part above will still return a promise object, which means that you need to handle it accordingly. Instead of const response = getS3Objects(bucket,objectKey); you want to do
getS3Objects(bucket,objectKey).then(response => console.log(response));
Inside of your handler function.
Furthermore, your usage of s3.getObject function is incorrect. Where first argument is an object - parameters, and the second argument is a callback function.
s3.getObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data);
Therefore in your case, you want to modify your getS3Objects function a bit. If you want to use promises, then you can do it like this.
function getS3Objects(bucket, key) {
return new Promise((resolve, reject) => {
s3.getObject(
{
Bucket: bucket,
Key: key,
ResponseContentType: 'application/json'
},
(err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
}
);
});
}
Another way that you can do this is as follows:
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
async function readFile(Bucket, Key) {
const params = {
Bucket,
Key,
ResponseContentType: 'application/json',
};
const f = await s3.getObject(params).promise();
return f.Body.toString('utf-8');
}
readFile('mybucket', 'xyz.json').then(console.log);

Resources