AWS.DynamoDB.DocumentClient.get always returns null - node.js

I want to retrieve data from an DynamoDB item through a Lambda function. However, the following code always returns null.
async function getData(userId) {
let documentClient = new AWS.DynamoDB.DocumentClient();
let params = {
TableName: 'mytable',
Key: {
user_id: userId
}
};
let result = await documentClient.get(params).promise();
console.log(result);
return result;
}
module.exports.handler = async event => {
let test = getData('test');
return { statusCode: 200, body: test };
};
My DynamoDB table looks like this.
Any idea?

Based on the comments.
The solution was to await for the getData:
module.exports.handler = async event => {
let test = await getData('test');
return { statusCode: 200, body: test };
};

Related

Call DynamoDb scan recursively when Promisified

I need to get some data from DynamoDb, using the scan() method. I have implemented some basic pagination by calling my function recursively n number of times to get the correct page.
Currently, I call my function and inside the scan() callback, if the data can be send back, I use the handler callback to return the data.
CURRENT CODE
const AWS = require('aws-sdk')
const docClient = new AWS.DynamoDB.DocumentClient()
const TABLE_NAME = process.env.TABLE_NAME
const DEFAULT_PAGE_SIZE = 500
const DEFAULT_PAGE_NUMBER = 1
const self = {
handler: (event, context, callback) => {
const {pageNumber, pageSize} = event.queryStringParameters ? event.queryStringParameters : {pageNumber: DEFAULT_PAGE_NUMBER, pageSize: DEFAULT_PAGE_SIZE}
const params = {
TableName: ORGANISATION_TYPES_TABLE_NAME,
Limit: pageSize ? pageSize : DEFAULT_PAGE_SIZE
}
return self.scan(params, pageNumber, 1, callback)
},
scan: (params, pageNumber, pageCount, callback) => {
docClient.scan(params, (err, data) => {
if (err) {
callback(null, {
statusCode: 500,
body: JSON.stringify(err)
})
};
if (data.LastEvaluatedKey && pageCount < pageNumber) {
pageCount += 1
params.ExclusiveStartKey = data.LastEvaluatedKey
self.scan(params, pageNumber, pageCount, callback)
} else {
callback(null, {
statusCode: 200,
body: JSON.stringify(data)
})
}
})
}
}
module.exports = self
The above code does work, allowing me to specify a pageSize and pageNumber query parameter.
However, I want to Promisify self.scan.
I tried the following, but it results in the response being undefined
DESIRED CODE
const AWS = require('aws-sdk')
const docClient = new AWS.DynamoDB.DocumentClient()
const ORGANISATION_TYPES_TABLE_NAME = process.env.ORGANISATION_TYPES_TABLE_NAME
const DEFAULT_PAGE_SIZE = 500
const DEFAULT_PAGE_NUMBER = 1
const self = {
handler: (event, context, callback) => {
const {pageNumber, pageSize} = event.queryStringParameters ? event.queryStringParameters : {pageNumber: DEFAULT_PAGE_NUMBER, pageSize: DEFAULT_PAGE_SIZE}
const params = {
TableName: ORGANISATION_TYPES_TABLE_NAME,
Limit: pageSize ? pageSize : DEFAULT_PAGE_SIZE
}
return self.scan(params, pageNumber, 1).then((response) => {
callback(null, {
statusCode: 200,
body: JSON.stringify(response)
})
}).catch((err) => {
callback(null, {
statusCode: 500,
body: JSON.stringify(err)
})
})
},
scan: (params, pageNumber, pageCount) => {
return new Promise((resolve, reject) => {
docClient.scan(params, (err, data) => {
if (err) {
reject(err)
};
if (data.LastEvaluatedKey && pageCount < pageNumber) {
pageCount += 1
params.ExclusiveStartKey = data.LastEvaluatedKey
self.scan(params, pageNumber, pageCount, callback)
} else {
resolve(data)
}
})
})
}
}
module.exports = self
I also tried just doing return Promise.resolve(data) inside the docClient.scan() callback, but that doesn't work either. It's as if promises cannot be resolved inside a callback?
I have recently helped someone with this problem, there's actually quite an elegant solution that we hit upon that uses the hasNextPage property on the response you get from the SDK. The key is to have your recursive function pass an array that holds your results through the recursive calls and just concat until you run out of pages and then just return the array.
const scan = async params => {
function scanRec(promise, xs) {
return promise
.then(async result => {
const response = result.$response;
const items = xs.concat(result.Items);
response.hasNextPage() ? scanRec(response.nextPage().promise(), items) : items
})
}
return scanRec(docClient.query(params).promise(), []);
}
You'd then use the function in the normal way:
const params = { /** params **/ };
scan(params).then(x => {
// ...
})

Lambda Invoke not triggering second lambda

I have gone through similar threads to fix this issue but I have had no luck. Both lambdas can be trigger independently of one another, and I am able to invoke the second Lambda through the command line, but my code does not work.
'use strict'
/* eslint max-statements: ['error', 100, { 'ignoreTopLevelFunctions': true }] */
const RespHelper = require('../../lib/response')
const { uuid } = require('uuidv4')
const AWS = require('aws-sdk')
const DB = require('./dynamo')
const respHelper = new RespHelper()
const Dynamo = new DB()
const lambda = new AWS.Lambda({
region: 'us-west-2'
})
const secondLambda = async (lambdaData) => {
var params = {
LogType: 'Tail',
FunctionName: 'second_lambda_name',
InvocationType: 'RequestResponse',
Payload: JSON.stringify(lambdaData)
}
lambda.invoke(params, function (err, data) {
if (err) {
console.log(err)
} else {
console.log(`Success: ${data.Payload}`)
}
})
}
exports.handler = async event => {
const id = uuid()
let bodyData = {
uuid: id,
user: 'owner#email.com',
processingStatus: 'IN_PROGRESS'
}
let payloadData = {
uuid: id,
user: 'owner#email.com',
processingStatus: 'COMPLETE'
}
try {
await Dynamo.writeRecordToDB(bodyData)
await secondLambda(payloadData)
return respHelper.sendResponse(200, { message: bodyData })
} catch (err) {
console.log(`Failure: ${err}`)
return respHelper.sendResponse(400, { message: 'ERROR' })
}
}
I have double checked the lambda role and it has the Invoke Lambda and Invoke Asynchronous Invoke permission on all resources. Console outputs don't give me any indication of why this is not working. Any help is appreciated.
You're awaiting a callback when you need to await a promise
const secondLambda = async lambdaData =>
lambda
.invoke({
LogType: 'Tail',
FunctionName: 'second_lambda_name',
InvocationType: 'RequestResponse',
Payload: JSON.stringify(lambdaData),
})
.promise()

Cannot return array if item not present in dynamodb

I have a function that will take an array of jobs as a parameter in it. This function will check the existence of each job in the database through its id.
If a job is not to present in the database, that particular job needs to be pushed into an array called latestJobs. I'm calling this function in my main.js file. But the code breaks and stops.
Below is my main.js code:
module.exports.app = async () => {
try {
...
const jobs = await getJobsForCountries(body);
const latestJobs = await filterPreDraftedJobs(jobs);
console.log('latestJobs', latestJobs);
} catch (e) {
console.error('Error:- ', e); // Comes to here
}
};
My checker function looks like:
module.exports = async (jobs) => {
let latestJobs = [];
for (const job of jobs) {
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: job.Id
}
};
await dynamoDb.get(params, (err, data) => {
if (err) {
latestJobs.push(job);
console.log('Job not found in DB');
}
}).promise();
}
return latestJobs;
};
How can I fix this issue? I want the latestJobs which will not present in the database. Is there a function for dynamodb which can do this for me?
You are mixing callback, promise and await style. I would do it like this
module.exports = async (jobs) => {
let latestJobs = [];
for (const job of jobs) {
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: job.Id
}
};
try {
const result = await dynamoDb.get(params).promise();
if (result) {
return;
}
} catch(err) {
latestJobs.push(job);
}
}
return latestJobs;
};
Also, make sure that table is created and the region and name you are passing is correct.
I am not much familiar with dynamoDB but looking at the above conversation code must be something like this. I have tried to improve performance and making sure the code is modular and readable.
async function addUpdateJobs(jobs)
{
let paramsArray = [];
for (const job of jobs)
{
const jobParams = {
params:{
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: job.Id
}
},
job:job
};
paramsArray.push(jobParams );
}
return await this.getJobs(paramsArray);
}
function getJobs(paramsArray)
{
let latestJobs = [];
paramsArray.each(async (jobParam)=>
{
try
{
const result = await dynamoDb.get(jobParam.params).promise();
if (result)
{
return;
}
} catch (err)
{
latestJobs.push(jobParam.job);
}
});
return latestJobs;
}
PS: Also I was gonig through error handling in amazondynamodb.

Chained AWS Lambda calls in an orchestration

I am attempting to create an orchestration AWS lambda that calls two other AWS lambdas. These two other AWS lambdas can be invoked in their own right but in certain cases, there is a need for orchestration.
My orchestration lambda looks like this:
module.exports.orchestration = async (event, context, callback) => {
const lambdaAPromise = lambdaA();
const lambdaBPromise = lambdaB();
const lambdaAResponse = await lambdaAPromise;
const lambdaBResponse = await lambdaBPromise;
if (lambdaAResponse && lambdaBResponse) {
console.log(
"Both streams responsed with: ",
lambdaAResponse,
lambdaBResponse
);
var orchestrationResponse = [];
orchestrationResponse.push(lambdaAResponse);
orchestrationResponse.push(lambdaBResponse);
const orchestrationSucceeded = {
statusCode: 200,
isBase64Encoded: false,
body: orchestrationResponse
};
callback(null, orchestrationSucceeded);
} else {
console.log(
"At least one stream not responded: ",
lambdaAResponse,
lambdaBResponse
);
const orchestrationFailed = {
statusCode: 400,
isBase64Encoded: false,
body: someresponse
};
callback(null, orchestrationFailed);
}
};
function lambdaA() {
var payload = {
groupNumber: requestBody.groupNumber
};
var params = {
FunctionName: process.env.CCE_FUNCTION_NAME,
InvocationType: "RequestResponse",
LogType: "Tail",
Payload: JSON.stringify(payload)
};
return lambda
.invoke(params)
.promise()
.then(({ Payload }) => {
var payload = JSON.parse(Payload);
return payload.body;
});
}
function lambdaB() {
var payload = {
groupNumber: requestBody.groupNumber
};
var params = {
FunctionName: process.env.CCE_FUNCTION_NAME,
InvocationType: "RequestResponse",
LogType: "Tail",
Payload: JSON.stringify(payload)
};
return lambda
.invoke(params)
.promise()
.then(({ Payload }) => {
var payload = JSON.parse(Payload);
return payload.body;
});
}
Both lambdaA and lambdaB functions look like this:
module.exports.lambdaA = (event) => {
return new Promise((resolve) => {
do something ...
resolve(boolean value);
});
};
My issue was that the await function did not occur as I had an incorrect signature (was still using callback rather than promise). I have updated the code snippets that are now working correctly.
Just wrapping up from comment:
Issue was lambdaA and lambdaB use callbacks, hence you cannot await them. [From Snippet #1]

How do I batch delete with DynamoDB?

I am getting an error that "The provided key element does not match the schema". uuid is my primary partition key. I also have a primary sort key for version. I figured I can use batchWrite (docs) to delete all items with same uuid.
My ES6 code is as follows:
delete(uuid) {
const promise = new Promise();
const params = {
RequestItems: {
[this.TABLE]: [
{
DeleteRequest: {
Key: { uuid: uuid }
}
}
]
}
};
// this._client references the DocumentClient
this._client.batchWrite(params, function(err, data) {
if (err) {
// this gets hit with error
console.log(err);
return promise.reject(err);
}
console.log(result);
return promise.resolve(result);
});
return promise;
}
Not sure why it is erroring on the key that is the primary. I have seen posts about needing other indexes for times when I am searching by something that isn't a key. But I don't believe that's the case here.
Here is the batch write delete request sample. This code has been tested and working fine. If you change this code for your requirement, it should work.
Table Definition:-
Bag - Table Name
bag - Hash Key
No partition key in 'Bag' table
Batch Write Code:-
var AWS = require("aws-sdk");
AWS.config.update({
region : "us-west-2",
endpoint : "http://localhost:8000"
});
var documentclient = new AWS.DynamoDB.DocumentClient();
var itemsArray = [];
var item1 = {
DeleteRequest : {
Key : {
'bag' : 'b1'
}
}
};
itemsArray.push(item1);
var item2 = {
DeleteRequest : {
Key : {
'bag' : 'b2'
}
}
};
itemsArray.push(item2);
var params = {
RequestItems : {
'Bag' : itemsArray
}
};
documentclient.batchWrite(params, function(err, data) {
if (err) {
console.log('Batch delete unsuccessful ...');
console.log(err, err.stack); // an error occurred
} else {
console.log('Batch delete successful ...');
console.log(data); // successful response
}
});
Output:-
Batch delete successful ...
{ UnprocessedItems: {} }
This is doable with Node lambda, but there are a few things you need to consider to address concurrency while processing large databases:
Handle paging while querying all of the matching elements from a secondary index
Split into chunks of 25 requests as per BatchWrite/Delete requirements https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html
Above 40,000 matches you might need a 1 second delay between cycles https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html
Here a snipped that I wrote:
const AWS = require("aws-sdk");
const dynamodb = new AWS.DynamoDB.DocumentClient();
const log = console.log;
exports.handler = async (event) => {
log(event);
let TableName = event.tableName;
let params = {
let TableName,
FilterExpression: "userId = :uid",
ExpressionAttributeValues: {
":uid": event.userId,
},
};
let getItems = async (lastKey, items) => {
if (lastKey) params.ExclusiveStartKey = lastKey;
let resp = await dynamodb.scan(params).promise();
let items = resp.Items.length
? items.concat(resp.Items.map((x) => x.id))
: items;
if (resp.LastEvaluatedKey)
return await getItems(resp.LastEvaluatedKey, items);
else return items;
};
let ids = await getItems(null, []);
let idGroups = [];
for (let i = 0; i < ids.length; i += 25) {
idGroups.push(ids.slice(i, i + 25));
}
for (const gs of idGroups) {
let delReqs = [];
for (let id of gs) {
delReqs.push({ DeleteRequest: { Key: { id } } });
}
let RequestItems = {};
RequestItems[TableName] = delReqs;
let d = await dynamodb
.batchWrite({ RequestItems })
.promise().catch((e) => log(e));
}
log(ids.length + " items processed");
return {};
};
Not sure why nobody provided a proper answer.
Here's a lambda I did in nodeJS. It will perform a full scan on the table, then batch delete every 25 items per request.
Remember to change TABLE_NAME.
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({ apiVersion: '2012-08-10' });
//const { TABLE_NAME } = process.env;
TABLE_NAME = "CHANGE ME PLEASE"
exports.handler = async (event) => {
let params = {
TableName: TABLE_NAME,
};
let items = [];
let data = await docClient.scan(params).promise();
items = [...items, ...data.Items];
while (typeof data.LastEvaluatedKey != 'undefined') {
params.ExclusiveStartKey = data.LastEvaluatedKey;
data = await docClient.scan(params).promise();
items = [...items, ...data.Items];
}
let leftItems = items.length;
let group = [];
let groupNumber = 0;
console.log('Total items to be deleted', leftItems);
for (const i of items) {
const deleteReq = {
DeleteRequest: {
Key: {
id: i.id,
},
},
};
group.push(deleteReq);
leftItems--;
if (group.length === 25 || leftItems < 1) {
groupNumber++;
console.log(`Batch ${groupNumber} to be deleted.`);
const params = {
RequestItems: {
[TABLE_NAME]: group,
},
};
await docClient.batchWrite(params).promise();
console.log(
`Batch ${groupNumber} processed. Left items: ${leftItems}`
);
// reset
group = [];
}
}
const response = {
statusCode: 200,
// Uncomment below to enable CORS requests
// headers: {
// "Access-Control-Allow-Origin": "*"
// },
body: JSON.stringify('Hello from Lambda!'),
};
return response;
};
Be aware, that you need to follow instructions:
src: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html
DeleteRequest - Perform a DeleteItem operation on the specified item. The item to be deleted is identified by a Key subelement:
Key - A map of primary key attribute values that uniquely identify the item. Each entry in this map consists of an attribute name and an attribute value. For each primary key, you must provide all of the key attributes. For example, with a simple primary key, you only need to provide a value for the partition key. For a composite primary key, you must provide values for both the partition key and the sort key.
For batch delete, we can use batchWrite with DeleteRequest. Here is an example, in this, we are providing tableName whose data is to be deleted, the payload is an array of ids which we need to remove.
In single request 25 items can be deleted.
const AWS = require('aws-sdk');
const dynamodb= new AWS.DynamoDB.DocumentClient({ apiVersion: '2012-08-10' });
const tableName = "PlayerData";
const payload = [{id=101}, {id=105}, {id=106}];
const deleteBatchData = async (tableName, payload, dynamodb) => {
try {
await dynamodb.batchWrite({
RequestItems: {
[tableName]: payload.map(item => {
return {
DeleteRequest: {
Key: {
id: item.id
}
}
};
})
}
}).
promise().
then((response) => {
return response;
})
.catch((err) => {
console.log("err ::", JSON.stringify(err))
});
} catch (err) {
console.log('Error in deleteBatchData ', err);
}
}
Why not use PartiQL. This approach is much more readable. (This too has a limit of 25 items per request just like BatchWriteITems)
// Import required AWS SDK clients and commands for Node.js.
import { BatchExecuteStatementCommand } from "#aws-sdk/client-dynamodb";
import { ddbDocClient } from "../libs/ddbDocClient.js";
const tableName = process.argv[2];
const movieYear1 = process.argv[3];
const movieTitle1 = process.argv[4];
const movieYear2 = process.argv[5];
const movieTitle2 = process.argv[6];
export const run = async (
tableName,
movieYear1,
movieTitle1,
movieYear2,
movieTitle2
) => {
try {
const params = {
Statements: [
{
Statement: "DELETE FROM " + tableName + " where year=? and title=?",
Parameters: [{ N: movieYear1 }, { S: movieTitle1 }],
},
{
Statement: "DELETE FROM " + tableName + " where year=? and title=?",
Parameters: [{ N: movieYear2 }, { S: movieTitle2 }],
},
],
};
const data = await ddbDocClient.send(
new BatchExecuteStatementCommand(params)
);
console.log("Success. Items deleted.", data);
return "Run successfully"; // For unit tests.
} catch (err) {
console.error(err);
}
};
run(tableName, movieYear1, movieTitle1, movieYear2, movieTitle2);

Resources