Let me just apologize for this abysmal code ahead of time. I have almost zero node experience, and write all of my JS with React apps and Elixir on the back end. I am struggling to write a correct Lambda function in NodeJS, and have basically cobbled something together from Googling/SO/trial and error, etc.
What I'm doing is the following:
User wants to upload a file so they send some info to back end.
Back end generates a presigned key.
Front end sends file to S3.
S3 fires event and Lambda executes
Lambda now checks for mimetype and if it's a bad file, will delete the file from the S3 bucket and make a DELETE API call to my backend to tell it to delete the row the photo upload belongs to.
Where I'm struggling is when I make the API call to my backend inside of the s3.deleteObject call, I am getting wildly inconsistent results. A lot of time it's sending two delete requests back to back in the same Lambda execution. Sometimes it's like it never even calls the backend and just runs and shows complete without really logging anything to Cloudwatch.
My code is as follows:
const aws = require('aws-sdk');
const s3 = new aws.S3({apiVersion: '2006-03-01'});
const fileType = require('file-type');
const imageTypes = ['image/gif', 'image/jpeg', 'image/png'];
const request = require('request-promise');
exports.handler = async (event, context) => {
// Get the object from the event and show its content type
const bucket = event.Records[0].s3.bucket.name;
const key = decodeURIComponent(
event.Records[0].s3.object.key.replace(/\+/g, ' ')
);
const params = {
Bucket: bucket,
Key: key,
};
try {
const {Body} = await s3.getObject(params).promise();
const fileBuffer = new Buffer(Body, 'base64');
const fileTypeInfo = fileType(fileBuffer);
if (
typeof fileTypeInfo !== 'undefined' &&
fileTypeInfo &&
imageTypes.includes(fileTypeInfo.mime)
) {
console.log('FILE IS OKAY.');
} else {
await s3
.deleteObject(params, function(err, data) {
console.log('FILE IS NOT AN IMAGE.');
if (err) {
console.log('FAILED TO DELETE.');
} else {
console.log('DELETED ON S3. ATTEMPTING TO DELETE ON SERVER.');
const url =
`http://MYSERVERHERE:4000/api/event/${params.Key.split('.')[0]}`;
const options = {
method: 'DELETE',
uri: url,
};
request(options)
.then(function(response) {
console.log('RESPONSE: ', response);
})
.catch(function(err) {
console.log('ERROR: ', err);
});
}
})
.promise();
}
return Body;
} catch (err) {
const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`;
console.log(message);
throw new Error(message);
}
};
This has been driving me mad for days. Any help is appreciated to explain why I would be getting unexpected results from a Lambda function like this.
Please check after update your else part with proper await use
Please try below code.
exports.handler = async (event, context) => {
// Get the object from the event and show its content type
const bucket = event.Records[0].s3.bucket.name;
const key = decodeURIComponent(
event.Records[0].s3.object.key.replace(/\+/g, ' ')
);
const params = {
Bucket: bucket,
Key: key,
};
try {
const {Body} = await s3.getObject(params).promise();
const fileBuffer = new Buffer(Body, 'base64');
const fileTypeInfo = fileType(fileBuffer);
if (
typeof fileTypeInfo !== 'undefined' &&
fileTypeInfo &&
imageTypes.includes(fileTypeInfo.mime)
) {
console.log('FILE IS OKAY.');
} else {
await s3.deleteObject(params).promise(); //fail then catch block execute
console.log('DELETED ON S3. ATTEMPTING TO DELETE ON SERVER.');
const url =
`http://MYSERVERHERE:4000/api/event/${params.Key.split('.')[0]}`;
const options = {
method: 'DELETE',
uri: url,
};
let response = await request(options); ////fail then catch block execute
console.log(response);
}
return Body;
} catch (err) {
console.log(err);
const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`;
console.log(message);
throw new Error(message);
}
};
S3 delete operation is eventual consistent in all regions.
Hence as par AWS (captured relevant info),
A process deletes an existing object and immediately attempts to read it. Until the deletion is fully propagated, Amazon S3 might return the deleted data.
A process deletes an existing object and immediately lists keys within its bucket. Until the deletion is fully propagated, Amazon S3 might list the deleted object.
Ref: https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel
Related
I am trying to write a function in Lambda that requires a file from S3 to be read into a buffer. I have seen multiple examples of them being read into streams but none with buffers. My current code for getting the object is
exports.handler = async (event, context, callback) => {
//console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
const srcBucket = event.Records[0].s3.bucket.name;
const srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
const params =
{
Bucket: srcBucket,
Key: srcKey
};
try
{
var slippiGame = s3.getObject(params, function(error, data)
{
if (error)
{
console.log(error);
}
else
{
const game = new SlippiGame(slippiGame);
}
});
}
catch (e)
{
console.log("fail");
console.log(e);
}
console.log(slippiGame);
return("success");
};
I know that this approach is wrong since my slippiGame variable isn't a buffer and the SlippiGame constructor only takes a local file or a buffer. Is there a way to either store the file from S3 temporarily in the Lambda directory or create a buffer to hold the S3 file in?
The Body attribute of data is already a Buffer.
If you prefer file download, create a read stream from the response and pipe it to a write stream. See here for more.
const s3 = new AWS.S3({apiVersion: '2006-03-01'});
const params = {Bucket: 'myBucket', Key: 'myImageFile.jpg'};
const file = require('fs').createWriteStream('/path/to/file.jpg');
s3.getObject(params).createReadStream().pipe(file);
One additional problem with your current code: you are mixing the async and the callback variants of the Lambda function handler -- it's better to just use the async variant and get rid of callback
I would like to mimic the following AWS call using the google-cloud/storage package
const params = {
Body: data,
Key: key,
ContentType: type
};
return new Promise(function (resolve, reject) {
bucket.putObject(params, function(error, data) {
if (error) {
console.log('ERROR: ', error);
reject(error);
}
resolve(data);
});
})
In the above call, if I pass some directory hierarchy in the Key param, the folder structure would be created and the file correctly placed.
For instance, if I pass the Key as
root/test_folder/input_file.json
Then the file would be placed as
S3:///root/test_folder/input_file.json
I am unable to find a similar call in google-cloud/storage.
If I use the
<bucket>.upload()
method, I can place the file under a directory, but I can ONLY upload files!
await storage.bucket(bucketName).upload(filename, {
destination: 'abc/xyz',
If I use the
file.save()
method, I can put data into storage, but now I cannot put this under a specific directory!
await file.save(contents);
I need some way of putting content into a directory structure in google-storage and the directory structure may not exist.
Sorry I was wrong. This could simply be done with the file.save() method.
We just need to specify the path along with the filename .
const {Storage} = require('#google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('bucket');
const file = myBucket.file('xxx/yyy/my-file', { generation: 0 });
const contents = 'This is the contents of the file.';
file.save(contents, function(err) {
if (err) {
file.deleteResumableCache();
}
});
The above would store the file under
bucket/xxx/yyy
I am trying to access a file in a private S3 bucket from a lambda function identified by Cognito.
Reading the stream works outside a lambda but not inside a lambda
Creating a pre-signed url works inside a lambda
Waiting for the the content to be ready as a string works inside a lambda
I've managed to get a pre-signed url to download the file. Using the same parameters, I've tried to write the read stream to a local file. A file gets created but it's empty. I couldn't catch any error in the process.
const s3 = new AWS.S3({ apiVersion: 'latest' });
const file = 's3Filename.csv'
const userId = event.requestContext.identity.cognitoIdentityId;
const s3Params = {
Bucket: 'MY_BUCKET',
Key: `private/${userId}/${file}`,
};
var fileStream = require('fs').createWriteStream('/path/to/my/file.csv');
var s3Stream = s3.getObject(s3Params).createReadStream();
// Try to print s3 stream errors
s3Stream
.on('error', function (err) {
console.error(err); // prints nothing
});
// Try to print fs errors
s3Stream
.pipe(fileStream)
.on('error', function (err) {
console.error('File Stream:', err); // prints nothing
})
.on('data', function (chunk) {
console.log(chunk); // prints nothing
})
.on('end', function () {
console.log('All the data in the file has been read'); // prints nothing
})
.on('close', function (err) {
console.log('Stream has been Closed'); // prints nothing
});
I am quite confident that my parameters are correct because I can get a pre-signed url that allows me to download the file.
console.log(s3.getSignedUrl('getObject', s3Params));
I can also read the file content using getObject().promise(). This could work but I'm parsing a CSV file and I'd rather go easy on the memory and parse the stream.
try
{
const s3Response = await s3.getObject(s3Params).promise();
let objectData = s3Response.Body.toString('utf-8');
console.log(objectData);
}
catch (ex)
{
console.error(ex);
}
Why is the file created from S3 stream empty? And why is there nothing that prints?
Could it be an access policy issue? If that's the case, why didn't I get any error when executing?
I am playing the aws mobilehub with react-native and I was hoping that it can speed up the backend hosting for me.
However, I cannot get its backend API working. After a long run with their docs, I pin down the problem between its lambda function and dynamodb service.
Any thoughts are greatly appreciated!
Problem#1
As the titled says: my aws lambda functions can request its dynamodb but has no response.
What went wrong here?
Or how can I get debug info from AWS dynamodb? (I gg and enabled Cloudtrial but it doesn't seem to have operation logs of the dynamodb too.)
Lambda side
Here I have the simplest node.js 6.10 codes:
const AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-2'});
const dynamodb = new AWS.DynamoDB.DocumentClient();
exports.handler = function(event, context, callback) {
var responseCode = 200;
var requestBody, httpMethod, res;
console.log("request: " + JSON.stringify(event));
// Request Body
requestBody = event.body;
/*testing dynamodb with put*/
console.log("PUT begins");
let putItemParams2 = {
TableName: "xxxx-mobilehub-xxxx-Test",//tableName
Item: {businessId:'putItemParams2r3',test:'yooo', hmm:'hhhh'}
};
console.log("putItemParams2: ",putItemParams2);
dynamodb.put(putItemParams2, (err, data) => {
console.log("putItemParams2");
if (err) console.log("dynamodb err: ",err, err.stack); // an error occurred
else console.log("dynamodb data: ",data); // successful response
});
console.log("PUT end");
var response = {
statusCode: responseCode
//....
};
...
//comment out context.succeed here to avoid program termination before intended.
//console.log("response: " + JSON.stringify(response))
//context.succeed(response);
};
Logs
When the previouse codes are triggered, from AWS CloudWatch I can see logs:
START RequestId: 3d7c5f7f-1b98-11e8-ad00-93a6d10c8f4e Version: $LATEST
[timestamp] PUT begins
[timestamp] putItemParams2: { TableName: 'xxx-mobilehub-xxxx-Test',
Item: { businessId: 'putItemParams2r3', test: 'yooo', hmm: 'hhhh'}}
[timestamp] put end
END RequestId: 3d7c5f7f-1b98-11e8-ad00-93a6d10c8f4e
So no err, no data, no response. I checked my dynamodb and there is nothing insert.
Extra info
condition#1: this dynamodb table has public access since I want to rule out the auth problem.
condition#2: I ensure that my lambda function has access to these tables. e.g. arn:aws:dynamodb::xxxx:table/xxxx-mobilehub-xxxx- allow everything
condition#3: I build myself a simple node.js to execute the (aws-sdk)and this server works perfectly fine with the same code..
I am able to "get" &"put" items int & out from my dynamodb table.
Problem#2
my react-native code use 'aws-amplify-react-native'. Which the API.put is fine and the lambda function is at least receiving the api call (from problem#1).
However, API.get returns me 403 error, and the lambda function doesn't even has log for this operation..
async function getBusiness(){
const path = "/Test";
const api = "TestCRUD";
let queryGetBusiness = {body: {userId: "hmmm"}};
try {
let apiResponse = await API.get(api, path, queryGetBusiness)//.then((data)=>{console.log(data)});
let apiResponseJson = await JSON.stringify(apiResponse);
console.log("response from saving Business: " + apiResponseJson);
}
catch (e) {console.log(e);}
}
P.S.(AWS could do much better with this mobilehub.. their documentation is lacking details and awsmobile cloud-api invoke has some problems I guess.)
const AWS = require('aws-sdk');
AWS.config.update({ region: 'us-east-2' });
const dynamodb = new AWS.DynamoDB.DocumentClient();
exports.handler = function (event, context, callback) {
var responseCode = 200;
var requestBody, httpMethod, res;
console.log("request: " + JSON.stringify(event));
// Request Body
requestBody = event.body;
/*testing dynamodb with put*/
console.log("PUT begins");
let putItemParams2 = {
TableName: "xxxx-mobilehub-xxxx-Test",//tableName
Item: { businessId: 'putItemParams2r3', test: 'yooo', hmm: 'hhhh' }
};
console.log("putItemParams2: ", putItemParams2);
dynamodb.put(putItemParams2, (err, data) => {
console.log("putItemParams2");
if (err) console.log("dynamodb err: ", err, err.stack); // an error occurred
else {
console.log("dynamodb data: ", data);
context.succeed(response);
}
// Call these here
console.log("PUT end");
});
//console.log("response: " + JSON.stringify(response))
};
Make sure you call context.succeed inside the callback function. Like above.
You can also just use the third argument to handler function - callback instead of context.succeed like callback(null, response);
Good day guys.
I have a simple question: How do I download an image from a S3 bucket to Lambda function temp folder for processing? Basically, I need to attach it to an email (this I can do when testing locally).
I have tried:
s3.download_file(bucket, key, '/tmp/image.png')
as well as (not sure which parameters will help me get the job done):
s3.getObject(params, (err, data) => {
if (err) {
console.log(err);
const message = `Error getting object ${key} from bucket ${bucket}.`;
console.log(message);
callback(message);
} else {
console.log('CONTENT TYPE:', data.ContentType);
callback(null, data.ContentType);
}
});
Like I said, simple question, which for some reason I can't find a solution for.
Thanks!
You can get the image using the aws s3 api, then write it to the tmp folder using fs.
var params = { Bucket: "BUCKET_NAME", Key: "OBJECT_KEY" };
s3.getObject(params, function(err, data){ if (err) {
console.error(err.code, "-", err.message);
return callback(err); }
fs.writeFile('/tmp/filename', data.Body, function(err){
if(err)
console.log(err.code, "-", err.message);
return callback(err);
});
});
Out of curiousity, why do you need to write the file in order to attach it? It seems kind of redundant to write the file to disk so that you can then read it from disk
If you're writing it straight to the filesystem you can also do it with streams. It may be a little faster/more memory friendly, especially in a memory-constrained environment like Lambda.
var fs = require('fs');
var path = require('path');
var params = {
Bucket: "mybucket",
Key: "image.png"
};
var tempFileName = path.join('/tmp', 'downloadedimage.png');
var tempFile = fs.createWriteStream(tempFileName);
s3.getObject(params).createReadStream().pipe(tempFile);
// Using NodeJS version 10.0 or later and promises
const fsPromise = require('fs').promises;
try {
const params = {
Bucket: 's3Bucket',
Key: 'file.txt',
};
const data = await s3.getObject(params).promise();
await fsPromise.writeFile('/tmp/file.txt', data.Body);
} catch(err) {
console.log(err);
}
I was having the same problem, and the issue was that I was using Runtime.NODEJS_12_X in my AWS lambda.
When I switched over to NODEJS_14_X it started working for me :').
Also
The /tmp is required. It will directly write to /tmp/file.ext.