I am new to AWS Lambda. I got a working model of lambda function which logs the json data to cloudwatch and also S3 bucket.
This is the function :
exports.handler = function(event, context) {
var s3 = new AWS.S3();
var param = {Bucket: 'test', Key: 'test123', Body: event.name};
console.log("EVENT DATA :" + param.Body);
s3.upload(param, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
console.log('actually done!');
context.done();
});
console.log('done?');
};
This is my json data :
{
"name": "XYZ ABC",
"value": 123
}
How should I push the whole JSON data given above to S3 and CloudFront logs rather than just event.name?
Thanks.
Change event.name to JSON.stringify(event). If you get [object Object] somewhere, change it to JSON.stringify(event, null, 2)
Related
I have created an api, when called, triggers the lambda function, written in nodejs, to take the json(array of objects) and insert the data into dynamodb. For each object in the array, the function creates a PutRequest object and when finished calls the batchWriteItem function. When I test in the aws console everything works fine but when I try in postman I get a 500 error. I know that the event is different when coming from postman vs the console and you are supposed to reference "event.body" if you want to access the json however when I do that I get an error with event.body.ForEach: "Cannot read property 'forEach' of undefined" in the console and a 500 error in postman. Below is the code that works in the console
var dynamo = new AWS.DynamoDB({region: 'us-east-1',});
exports.handler = (event, context, callback) => {
const done = (err, res) => callback(null, {
statusCode: err ? '400' : '200',
body: err ? err.message : res,
});
var params = {
RequestItems: {
"Lead": []
}
}
event.forEach(x => {
params.RequestItems.Lead.push({
PutRequest: {
Item: {
"Address": {S: x.Address},
"City": {S: x.City},
"State": {S: x.State},
"Zipcode": {S: x.Zipcode},
"Owner_First_Name": {S: x.Owner_First_Name},
"Owner_Last_Name": {S: x.Owner_Last_Name}
}
}
})
})
dynamo.batchWriteItem(params, done);
};
When the lambda receive the json body from api gateway, it will be passed as a json string.
To convert the json string to json, You need to parse the event.body.
const body = JSON.parse(event.body)
Then you can do body.forEach
Hope this helps
Here's the section of the Node Lambda function that gets the email stored in S3. How do I just get the 'text/plain' content from the returned data object?
Do I need to include an NPM email parsing dependency with the lambda function (uploaded as .zip) or should I use some regex in the lambda to get the section I want? If so what would that look like?
exports.handler = function(event, context, callback) {
var sesNotification = event.Records[0].ses;
// Retrieve the email from your bucket
s3.getObject({
Bucket: bucketName,
Key: "ses/"+sesNotification.mail.messageId
}, function(err, data) {
if (err) {
console.log(err, err.stack);
callback(err);
} else {
data
}
});
};
It would be safer to use mailparser package for parsing.
const simpleParser = require('mailparser').simpleParser;
simpleParser(data, (err, mail)=>{
console.log(mail.text);
})
I'm trying to make a call to the Amazon Rekognition service with NodeJS. The call is going through but I receive an InvalidImageFormatException error in which it says:
Invalid Input, input image shouldn't be empty.
I'm basing my code off an S3 example:
var AWS = require('aws-sdk');
var rekognition = new AWS.Rekognition({region: 'us-east-1'});
//Create a bucket and upload something into it
var params = {
Image: {
S3Object: {
Bucket: "MY-BUCKET-NAME",
Name: "coffee.jpg"
}
},
MaxLabels: 10,
MinConfidence: 70.0
};
var request = rekognition.detectLabels(params, function(err, data) {
if(err){
console.log(err, err.stack); // an error occured
}
else{
console.log(data); // successful response
}
});
The documentation states that the service only accepts PNG or JPEG images but I can't figure out what is going on.
I'm trying to upload a PDF to an S3 bucket using the Knox library, but I keep getting 505 errors and the PDFs won't save. My code:
// all of this works well
var knox = require('knox');
var client = knox.createClient(require('../path/to/config.js').knox);
client.putFile('tmp/file', '/prefix/key',
function(err, res) {
if (err) {
console.log("Error PUTing file in S3:", err);
}
console.log("S3 RESPONSE:", res.statusCode); // returns 505
}
);
Anyone have any insight into what I'm doing wrong? I've also tried setting my own headers using client.put(..), but I got the same 505 response.
2 Possible reasons.
1) If this is your complete code, then you forgot to enter the key,secret and bucket.
var client = knox.createClient({
key: '<api-key-here>'
, secret: '<secret-here>'
, bucket: 'learnboost'
});
2) There is a space in the file name that you are trying to upload.
This isn't an answer per se, and I'm still unsure about the 505 response above, but the AWS SDK that Amazon puts out works great if anyone is having similar issues with Knox. The above just becomes:
var aws = require('aws-sdk');
aws.config.loadFromPath('./path/to/config.json');
var s3 = new aws.S3();
var params = { Bucket: 'your-bucket',
Key: 'your-key',
Body: fs.readFileSync('/path/to/file.pdf') };
s3.putObject(params, function(err, data) {
if (err) {
console.log("Error PUTing file:", err);
}
console.log("S3 RESPONSE:", data);
});
I've tried using aws-sdk and knox and I get status code 301 trying to upload images. I get status code 301 and message - 'The bucket you are attempting to access must be addressed using the specified endpoint. Please send all future requests to this endpoint. This works in php.
AWS.config.loadFromPath(__dirname + '/config/config.json');
fs.readFile(source, function (err, data) {
var s3 = new AWS.S3();
s3.client.createBucket({Bucket: 'mystuff'}, function() {
var d = {
Bucket: 'mystuff',
Key: 'img/test.jpg',
Body: data,
ACL: 'public-read'
};
s3.client.putObject(d, function(err, res) {
if (err) {
console.log("Error uploading data: ", err);
callback(err);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
callback(res);
}
});
});
});
I actually solved this problem. In your config you have to have a region, since my bucket was "US Standard", I left my region blank and it worked.
config.json -
{ "accessKeyId": "secretKey", "secretAccessKey": "secretAccessKey", "region": ""}
go to s3 management console select one of your files and click on proporties - > look at the file link.
US standard
https://s3.amazonaws.com/yourbucket/
host in your console window
yourbucket.s3.amazonaws.com/
us-west-1
https://s3-us-west-1.amazonaws.com/yourbucket/
host in your console window
yourbucket.s3-us-west-1.amazonaws.com/
Did you try .send()?
I can upload to S3 by below code.
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/AWSRequest.html
var s3object = {Bucket: 'mystuff', Key: name, Body : data['data']};
s3.client.putObject(s3object).done(function(resp){
console.log("Successfully uploaded data");
}).fail(function(resp){
console.log(resp);
}).send();
I have the same problem with the new SDK and solved it by setting the endpoint option explicitly.
Reference : http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#constructor_details
Snippet:
var AWS = require('aws-sdk');
var s3 = new AWS.S3({ endpoint :'https://s3-your-region-varies.amazonaws.com' }),
myBucket = 'your-bucket-name';
var params = {Bucket: myBucket, Key: 'myUpload', Body: "Test"};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
console.log("Successfully uploaded data to "+myBucket+"/testKeyUpload");
}
});
Alternatively, you can solve this by setting the region in your config file and you just have to be precise of your region name.