I'm trying to put data into Dynamodb using serverless deployment. I have added the permission to write to Dynamodb.
The api is always sending {"message": "Internal server error"} but is able to put the data into db once if tried 5,6 times.
Following is the serverless.yaml config
handler: dynamoUpdate.handler
events:
- http:
path: /createdbentry
method: get
cors: true
Following is the code:
const AWS = require('aws-sdk')
AWS.config.update({ region: process.env.REGION || 'us-east-1' })
var ddb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
exports.handler = async (event) => {
return await createDBEntry("i123","Working");
}
const sendRes = (status, body) => {
var response = {
statusCode: status,
headers: {
"Content-Type": "application/json"
},
body: body
};
return response;
};
const createDBEntry = async function(id,result) {
var params = {
TableName: 'emplist',
Item: {
'ID' : {S:id},
'Summary':{S: result},
}
};
ddb.putItem(params, function(err, data) {
console.log("Here comes me.")
if (err) {
console.log("Opps Error");
return sendRes(403,err);
} else {
console.log("Complete")
return sendRes(200,data);
}
});
}
How can it be resolved ?
The problem is you are not returning any promise or awaiting anything async in your function called createDBEntry. Thus your handler returns undefined which makes apigateway return 500 internal server error.
You are mixing callbacks and async/await.
Your createDBEntry function should look like this.
const createDBEntry = async function(id, result) {
var params = {
TableName: 'emplist',
Item: {
'ID' : {S:id},
'Summary':{S: result},
}
};
try {
let data = await ddb.putItem(params).promise()
return sendRes(200, JSON.stringify(data))
} catch (err) {
console.log("Oops Error");
return sendRes(403, err.toString());
}
}
When you return from an async handler in lambda the current execution environment is frozen immediately (unlike when you use a non-async handler with a callback). This is why most of the time the writes to the database does not succeed.
Related
We have the following code used as lambda Function in Serverless Framework triggered every 2min with cron. The issue we are facing is that the writing in DynamoDB is inconsistent , we want to have 3 writings but instead we receive 1 or 2 writings every 2 minutes.
DynamoDB has a HASH key the HOUR and SORT KEY the DATE and Billing mode: PROVISIONED. Has someone faced the same behavior from DynamoDB or the same issue to share how he sovled it. Thanks
"use strict";
const AWS = require("aws-sdk");
const axios = require("axios");
const dynamoDb = new AWS.DynamoDB.DocumentClient();
const lambda = new AWS.Lambda({
region: "us-east-1",
});
module.exports.getWeather = async (event, context, callback) => {
const openWeatherMapAPIURL = `http://api.openweathermap.org/data/2.5/weather?id=${event}&appid=XXXXXXXXXXXXXXXXXXXXXXX&units=metric`;
const currentWeather = await axios
.get(openWeatherMapAPIURL)
.then((records) => {
console.log(records);
const d = new Date(records.headers.date);
let hour = d.getHours();
const params = {
TableName: process.env.DYNAMODB_TABLE_NAME,
Item: {
hour: hour,
date: records.headers.date,
city: records.data.name,
temp: records.data.main.temp,
feelsLike: records.data.main.feels_like,
description: records.data.weather[0].description,
},
};
setTimeout(function () {
dynamoDb.put(params, (error) => {
// handle potential errors
console.log(`zapis na: ${records.data.name} ${records.headers.date}`);
if (error) {
console.log(error);
console.error(error);
return;
}
});
}, 3000);
})
.catch((error) => {
console.log(error);
return;
});
const response = {
statusCode: 200,
body: JSON.stringify({
message: `Weather from ${event} was requested!`,
}),
};
callback(null, response);
};
module.exports.cron_launcher = (event, context, callback) => {
const requestedID = ["786735", "792578", "785842"];
requestedID.forEach((requestedID) => {
const params = {
FunctionName: process.env.HANDLER_LOCATION + "-getWeather",
InvocationType: "RequestResponse",
Payload: JSON.stringify(requestedID),
};
return lambda.invoke(params, function (error, data) {
if (error) {
console.error(JSON.stringify(error));
return new Error(`Error printing messages: ${JSON.stringify(error)}`);
} else if (data) {
console.log(data);
}
});
});
};
You are not waiting for the dynamodb.put operation to finish. Additionally, you are wrapping the call in a setTimeout. Your lambda function is returning before the network operation can be made. Make sure the put operation succeeds before returning a result from your lambda.
I see no reason for you to use a setTimeout here.
You can call dynamodb.put(...).promise() to get a promise from the dynamodb SDK and await that promise.
2.a Or you can continue using a callback, but wrap the entire section of code in a new promise object, calling the resolve method after the dynamodb.put call finishes.
I am trying to write an async lambda function which is calling a function for sign up a user in cognito.
my problem is that in my lambda function, it is not waiting for the result and finish the execution. would you mind check what is my issue? I am new to rxjs. please help me.
mylambda function
exports.handler = async (event, context) => {
//poolData and params will fetch from event
let source = await signup(poolData, params);
console.log(source);
});
my signup function
function signup(poolData, body) {
const userPool = new AmazonCognitoIdentity.CognitoUserPool(poolData);
const { username, password, attributes } = body;
const attributesList = [];
if (Array.isArray(attributes)) {
attributesList.push(
...attributes.map(item => new AmazonCognitoIdentity.CognitoUserAttribute(item))
);
}
let source = Observable.create(observer => {
let output = (err, res) => {
if (err)
{
observer.error(err);
}
else
{
const cognitoUser = res.user;
const data = {
username: cognitoUser.getUsername(),
};
observer.next(data);
}
observer.complete();
}
userPool.signUp(username, password, attributesList, null, output);
});
let respond;
let subscriber = {
next(value) {
console.log('Subscriber - next: ', value);
respond = {
'statusCode': 200,
'body': JSON.stringify({
"username": value.username,
})
}
}, error(err) {
console.log('Subscriber - err: ', err);
respond = err;
},
complete() {
console.log('Subscriber - complete');
return response;
}
};
source.subscribe(subscriber);
}
module.exports = signup;
This behavior is totally normal.
So first thing first, an observable is not a promise which means you are not able to await a response with the await keyword, also I don't see anything to be returned from the signup function, which will probably lead to undefined to be logged anyways.
So how to fix that, one way to fix this issue is to use toPromise() which will turn your observable into a promise which then can be awaited wherever needed.
The other way (which is the rxjs way) will be to return from the signup function the observable and inside your handler function to subscribe for the response.
let subscriber = {
next(value) {
console.log('Subscriber - next: ', value);
respond = {
'statusCode': 200,
'body': JSON.stringify({
"username": value.username,
})
}
}, error(err) {
console.log('Subscriber - err: ', err);
respond = err;
},
complete() {
console.log('Subscriber - complete');
return response;
}
};
exports.handler = (event, context) => {
//poolData and params will fetch from event
signup(poolData, params).subscribe(subscriber);
})
I'm trying to mock a call for putObject in a local Node.JS AWS Lambda function using Jest but for some reason I keep getting 0 number of calls in my expect.
Here is my main func(index.js):
const S3 = require("aws-sdk/clients/s3");
const s3 = new S3();
exports.handler = async (event) => {
putFunction = async (params, callback) => {
await s3.putObject(params, callback).promise();
};
const params = {
Bucket: "some value",
Key: "some key value",
ContentType: "application/json",
Body: "some body value",
};
const callback = {
function(err, data) {
console.log(JSON.stringify(err) + " " + JSON.stringify(data));
},
};
putFunction(params, callback);
}
I've tried adding async to my test function as I thought it was an asynchronous issue but that I still seem to be receiving the same error. Here is my test code(index.test.js):
let myHandler = require("../../src/lambda/index");
const mockedPutObject = jest.fn();
jest.mock("aws-sdk/clients/s3", () => {
return class S3 {
putObject(params, cb) {
mockedPutObject(params, cb);
}
};
});
it("has to mock s3#putObject", () => {
const params = {
Bucket: "test1",
Key: "test2",
ContentType: "application/json",
Body: "test3",
};
const callback = {
function(err, data) {
console.log(JSON.stringify(err) + " " + JSON.stringify(data));
},
};
const putFunc = myHandler.handler.putFunction;
putFunc;
expect(mockedPutObject).toHaveBeenCalledWith(params, callback);
});
Any help would be great.
This is a Jest/Node only answer for those that don't wish to pull in any third-party mocking libraries like aws-sdk-mock.
The problem (without seeing the error itself in your answer) is very likely to be related to the .promise() in your implementation code.
You have added this in the implementation to tell the SDK to return you a promise for whatever operation was called.
await s3.putObject(params, callback).promise();
That returned promise will either reject with an error or resolve with the data.
This means in the promise-based approach you can omit the callback completely.
await s3.putObject(params).promise();
(taken from this AWS blog post)
Fixing the handler...
You can either:
Put that callback logic in subsequent promise chain blocks:
.then((data) => {
// ... do stuff
})
.catch((err) => {
// ... handle error
}
or better still (as it looks like you're already embracing) the
more modern ES6 approach of
Awaiting the putObject promise within in a try-catch block:
try {
const data = await s3.putObject(params).promise()
// ... do things with data on successful response
} catch (err) {
// ... handle error
}
Putting those together
Your handler should look something like this:
const { S3 } = require("aws-sdk");
const s3 = new S3();
exports.handler = async (event) => {
const params = {
Bucket: "some value",
Key: "some key value",
ContentType: "application/json",
Body: "some body value",
};
try {
const data = await s3.putObject(params).promise();
// ... do stuff with data
return {
statusCode: 200,
body: JSON.stringify(data),
// ... etc.
}
} catch (err) {
// ... handle error
return {
statusCode: 400, // or any 4XX, 5XX
body: '...', // whatever you wish to return on error
// ... etc.
}
}
}
Fixing the tests...
Bearing in mind that you can omit the callback, the test code needs to reflect the extra .promise() in the call chain of the putObject in the handler.
In the test file, the SDK mock needs to be configured to:
a) return the top-level S3 constructor
b) have this S3 constructor itself return an object containing the putObject function
c) have this putObject itself return an object containing the promise function
So that it can be invoked as the real SDK would:
const { S3 } = require("aws-sdk"); // require("aws-sdk") returns { S3 }
const s3 = new S3() // returns { putObject }
await s3.putObject(params) // returns { promise }
.promise(); // returns ...your_mock_response
// You need to return the { promise } here even if you don't care about
// mock calls beyond the putObject, because the handler itself calls .promise()
// and will throw "TypeError: Cannot read property 'promise' of undefined"
const putObjectMock = jest.fn(() => ({
promise: jest.fn()
}));
jest.mock('aws-sdk', () => ({
S3: jest.fn(() => ({
putObject: putObjectMock,
})),
}));
// S3 must have jest.fn(...) over an ordinary function otherwise
// the new S3() in the handler will fail.
// Jest does its magic with the function you provide to make it callable as a constructor
const myHandler = require("../../src/lambda/index");
// Don't forget to add the "async" before the "it" callback as your handler is async
it("has to mock s3#putObject", async () => {
const params = {
Bucket: "test1",
Key: "test2",
ContentType: "application/json",
Body: "test3",
};
await handler(); // Call the handler to then assert against the mock params
expect(putObjectMock).toHaveBeenCalledWith(params);
});
Final note - add your handler import after the mock setup to prevent a "Cannot access 'putObject' before initialization" error (caused by the handler's require of the SDK).
Hope this helps!
I am writing node js 10.x lambda function to put details into DynamoDB table.
Below is code
const AWS = require('aws-sdk');
var db = new AWS.DynamoDB.DocumentClient();
var tableName="xyz";
exports.handler = async (event) => {
// TODO implement
console.log("Event: "+ JSON.stringify(event));
var response = {
statusCode: 200,
"headers": {
"Access-Control-Allow-Origin" : "*",
"Access-Control-Allow-Credentials" : true
},
};
await db.put({
TableName: tableName,
Item: {
userid: event.userid,
}
}, (error, data) => {
if (error) {
console.log("error:"+ error);
}
else{
console.log("Success");
}
}).promise();
return response;
};
I am getting kind on random number of success return
Output execution 1
2019-11-07T07:03:45.388Z f451dfc1-01ea-41d0-a998-945cb0f18be1 INFO Success
2019-11-07T07:03:45.510Z f451dfc1-01ea-41d0-a998-945cb0f18be1 INFO Success
2019-11-07T07:03:45.511Z f451dfc1-01ea-41d0-a998-945cb0f18be1 INFO Success
Output execution 2
2019-11-07T07:08:19.270Z 3ce51f5d-bbbc-4dd6-b46f-2149ee9bb9cf INFO Success
Output execution 3
2019-11-07T07:08:27.410Z 2625bba5-b8e1-40e4-8704-7c0d486f6dff INFO Success
2019-11-07T07:08:27.431Z 2625bba5-b8e1-40e4-8704-7c0d486f6dff INFO Success
**
does anyone know the cause of this problem?
I am relatively new to node js 10.x. so please help me if I have missed something in code
**
you are using a callback and promise at the same time, remove the callback.
You can try something like
exports.handler = async (event, context) => {
const params = {
TableName: tableName,
Item: {
userid: event.userid,
}
};
try {
const data = await dynamoDB.put(params).promise();
console.log("Data: ", data);
} catch(error) {
console.error("Error:", error);
}
}
I have a lambda function written in node.js that returns a QRCode Image. I am also trying to read a value from the Dynamodb. However, the console logs inside it do not seem to be executed which makes me think the code is not being run.
I suspect this is due to so synchronization issues. But I am not sure what to do to fix it. The code is below:
var qrImage = require('qr-image');
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region:'us-west-2'});
exports.handler = async(event, context, callback) => {
var path = event.path;
var drugId = path.replace(/\//g, '');
var params = {
TableName: 'QRCodeInfo',
Key: {
"DrugId" : "1234"
}
};
docClient.get(params, function(err,data) { //does not get executed
if (err) {
console.log(err);
} else {
console.log(data);
}
});
return sendRes(200,drugId); //this works. Image is seen.
};
const sendRes = (status, body) => {
//console.log(body);
const svg_string = qrImage.imageSync(body, { type: 'svg', size: 10 });
var response = {
statusCode: status,
headers: {
"Content-Type": "image/svg+xml"
},
body: svg_string
};
return response;
};
You are probably exiting the lambda before the callback of the dynamodb call has had a chance to execute.
Try calling callback(null, data) in the callback of the dynamo call, after your console.log and similar in the err scenario e.g. callback(err)
You do not exit a lambda by calling return, you should be calling callback() (that's why it's available as the 3rd argument of the lambda) see https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-handler.html#nodejs-prog-model-handler-callback