How do I use batchWriteItem with Node.js to query AWS DynamoDB? - node.js

I'm new to AWS and I'm having problems trying to develop a simple Lambda function with Node.js. In DynamoDB, I have a table named Game with 3 attributes: gamepk, user, result.
In just one single execution of the Lambda function, I want to insert a collection of game elements (the number of elements in the collection could vary).
I had been reading some tutorials and it said I should use batchWriteItem, but because the collection of Game elements is variable I don't know how to proceed.
Could somebody write a function in Node.js that solves my problem?
An example of the JSON that the lambda function receives is this one:
{
"games": [{
"gamepk": "1",
"user": "rolo",
"result": "1-0"
},
{
"gamepk": "2",
"user": "jhon",
"result": "1-1"
}
]
}

This should get you what you need.
Create a New Lambda Function
Select Node Version 6
Select a Role or Create a New one that has DynamoDB Access to Write!
Open Created Function in the Web Console
Paste the Snippet Bellow into the Cloud 9 Editor
const AWS = require('aws-sdk/global');
exports.handler = (event, context, callback) => {
// The event parameter is the input to your lambda function
console.log(JSON.stringify(event));
let lambdaInput = event['PROPERTY_NAME_DEFINED_IN_POST'];
let games = [];
let documentClient = new AWS.DynamoDB.DocumentClient();
lambda.forEach(item => {
games.push({
PutRequest: {
Item: {
gamepk: item['gamepk'],
user: item['user'],
result: item['result']
}
}
});
});
let params = {
RequestItems: {
'TABLE_NAME': games
}
};
documentClient.batchWrite(params, function(err, data) {
if (err) {
callback(err);
} else {
callback(null, data);
}
});
}
Save the Function and then you are going to want to select the drop down at the top that says 'Select a Test Event' and then select 'Configure Test Events'.
This will open a new dialog, just save the JSON that is in the main text area and give the test a Name and Save it.
Now select that test that you just made from the 'Select a Test Event' drop down and then click 'Test' in the top right.
This documentation could be found at AWS Javascript SDK

As an additional help for the answer , you can use an environment variable and use it in the following way:
const MI_TABLE = process.env.MI_TABLE
let params = {
RequestItems: {
[ MI_TABLE ] : games
}
};
return await batchWrite(params);

Related

Query DynamoDB to check if a particular item exists or not

I just started out with Node.js and AWS DynamoDB and I am stuck with a very basic problem. I would like to query my DynamoDB which has a "Users" table with "Username"(PKey) and "JoinedOn" columns. Now I would like to write a Lambda function to check if a particular Username exists or not and return a response on the basis of that. Currently I am using the below code:
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient();
const params = {
TableName : 'Users',
Key: {
Username: 'user1'
}
}
async function getItem(){
try {
const data = await docClient.get(params).promise()
return data
} catch (err) {
return err
}
}
exports.handler = async (event, context) => {
try {
const data = await getItem()
return { body: JSON.stringify(data) }
} catch (err) {
return { error: err }
}
}
I am currently hardcoding the key (Username: 'user1') in my code, but I want to avoid doing that as I want to use the same code to check for different users, whether the Username exists in the "Users" table or not.
I want to use SAM. My event.json should look like below, as I want to check if user1 exists in "Users" or not:
{
"Username": "user1"
}
On running "sam local invoke TestFunction -e events/event.json", I should get a response "User exists" or "User does not exist"
I am new to AWS and development. Any help is really appreciated.
Thank you in advance!
you need to define that username in environment variable section of lambda.
then inside the lambda code you can simply call process.env.username.

How to create a share link in Buildfire

I'm attempting to create a share link that users within a plugin can send to friends via email or sms. If their friend has the app, the goal would be to open the app to the plugin with a query string similar to navigation.navigateTo does so that it would open to specific content within the plugin. If they don't, the goal would be to send them to the app's webpage where they could download the app from the appropriate store (android or apple). Is this possible or at least a portion of it?
I've generated a share link using buildfire.deeplink.generateUrl but can't find the appropriate steps from there in the API documentation.
Yes, The steps as following:
generate the deeplink URL
buildfire.deeplink.generateUrl(options, callback)
make sure of passing data property to options where its representing the
deep link data that developers need to provide to the plugin once the user
opened the share link. For more information on how to reed this data from
plugin, see buildfrire.deeplink.getData.
buildfire.deeplink.generateUrl(
{
data: { videoId: "9Q-4sZF0_CE" },
},
(err, result) => {
if (err) {
console.error(err);
} else {
console.log(result.url);
}
}
);
after generate the deep link use the following function to open share device options
buildfire.device.share({ link: deepLinkUrl }, callback);
Finally you have to handle deeplink data in you plugin to be able to open desired content based deeplink data that you pass during generate the deeplink URL, check this buildfrire.deeplink.getData.
For more details check the doc.
Example
// share function
const share = () => {
let deeplinOptions= {};
deeplinOptions.title = 'Hello world';
deeplinOptions.type = "website";
deeplinOptions.description = 'First program';
deeplinOptions.imageUrl = '<IMAGE URL>';
deeplinOptions.data = {
"link": vidId
};
buildfire.deeplink.generateUrl(deeplinOptions, function (err, result) {
if (err) {
console.error(err);
} else {
let options = {
link: result.url
};
let callback = function (err, result) {
if (err) {
console.warn(err);
};
};
buildfire.device.share(options, callback);
};
});
};
// Handle Deeplink data in your plugin
const handleDeepLinkData = () => {
buildfire.deeplink.getData(function (data) {
if (data && data.link) {
let vidId= data.link;
// Do what you want
}
// Do what you want
});
}
Yeah, Just share the result URL

Facing this error "ValidationException: The parameter cannot be converted to a numeric value: NaN"

I am trying to enter the data in AWS Dynamo DB through the AWS Lambda function using AWS HTTP API. FYI The data type of the parameter (Id) originally in Dynamo DB is Number but it is taking as String while parsing JSON data, so I have written "Number" beside "Id" parameter in order to convert it to "Number". When I am trying to run this lambda function I am getting this error. Please help, Thanks!
Lambda function:
payload: { "Id": $input.json('$.Id') "name": $input.json('$.name')
console.log('starting function');
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region: 'us-east-1'});
exports.handler = function(event, ctx, callback) {
var params = {
Item: {
Id: Number(event.Id),
name: event.name
},
TableName: 'Test'
};
console.log(params)
docClient.put(params, function(err, data) {
if(err) {
callback(err, null);
} else{
callback(null, data);
}
});
}
Error log:
Look at the logs.
Your event.Id value is "NaN" which means "not a number".
Also event.name is "undefined".
So your problem is occuring here:
exports.handler = function(event, ctx, callback) {
Your event object is not populated with the values you are expecting.
The payload should be proper JSON and look something like:
{
"id": "6",
"name": "Test Name"
}
To achieve this, in your POST from your front-end code you could use something like:
data: JSON.stringify({"id": $('#Id').val(), "name": $('#name').val()})
Make sure that $('#Id').val() and $('#name').val() actually have proper values.

DynamoDB getItem returns empty item

I am using the aws-sdk within Node and trying to get an item from a DynamoDB table but it prints just Success {} and not the actual content of the item. Does anyone know how to get the actual content of the item?
The Node script I am using is the following:
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set the region
AWS.config.update({region: 'ap-southeast-2'});
// Create the DynamoDB service object
var ddb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
var params = {
TableName: 'test_table',
Key: {
'TWEET_KEY' : {S: 'Test'}
},
ProjectionExpression: 'ATTRIBUTE_NAME'
};
// Call DynamoDB to read the item from the table
ddb.getItem(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data.Item);
}
});
and the actual data within the DynamoDB test_table which I'm trying to get is as follows:
Thank you very much!
Here - ProjectionExpression: 'ATTRIBUTE_NAME', you need to set the attribute names that you need to get, or just remove it to get the whole record content.

Looking for a mainstream way to manage asynchronous flow in a nodejs application

I have this simple nodejs application, which generates dummy date for my web application.
All it does is:
Drops the dummy database
Populates the inventory collection
Populates the invoices collection
Populates the const data collection
Of course, all the actions are asynchronous and I want to execute them sequentially, one after another. For me, it was simpler to write something to manage this kind of the flow, however, I would like a mainstream solution, which would support other kinds of flows. For instance, running in parallel and stopping all on the first failure.
For your reference, please, find below the skeleton, depicting my solution:
/*global require, console, process*/
var mongo, db, inventory, createChain;
function generateInventory(count) {
// returns the generated inventory
}
function generateInvoices(count, inventory) {
// returns the generated invoices
}
function generateConst() {
// returns the generated const data
}
mongo = require('mongojs');
db = mongo.connect('dummy', ['invoices', 'const', 'inventory']);
createChain = function () {
"use strict";
var chain = [false], i = 0;
return {
add: function (action, errMsg, resultCallback) {
chain[chain.length - 1] = {action: action, errMsg: errMsg, resultCallback: resultCallback};
chain.push(false);
return this;
},
invoke: function (exit) {
var str, that = this;
if (chain[i]) {
chain[i].action(function (err, o) {
if (err || !o) {
str = chain[i].errMsg;
if (err && err.message) {
str = str + ": " + err.message;
}
console.log(str);
} else {
if (chain[i].resultCallback) {
chain[i].resultCallback(o);
}
i += 1;
that.invoke(exit);
}
});
} else {
console.log("done.");
if (exit) {
process.exit();
}
}
}
};
};
createChain()
.add(function (callback) {
"use strict";
console.log("Dropping the dummy database.");
db.dropDatabase(callback);
}, "Failed to drop the dummy database")
.add(function (callback) {
"use strict";
console.log("Populating the inventory.");
db.inventory.insert(generateInventory(100), callback);
}, "Failed to populate the inventory collection", function (res) {
"use strict";
inventory = res;
})
.add(function (callback) {
"use strict";
console.log("Populating the invoices.");
db.invoices.insert(generateInvoices(10, inventory), callback);
}, "Failed to populate the invoices collection")
.add(function (callback) {
"use strict";
console.log("Populating the const.");
db["const"].insert(generateConst(), callback);
}, "Failed to populate the const collection")
.invoke(true);
Can anyone suggest a relevant nodejs package, which would also be easy to use?
Thank you very much.
Use the async module to provide just about any type of flow control you're ever likely to need. In particular, the series method provides sequential flow control.
Actually, for sequential flow control, you should use waterfall
As an example:
async.waterfall([
function(cb){
cb(null,1);
},
function(r,cb){
// r=1
cb(null,2)
},
function(r,cb){
// r=2
cb(null,3)
}
],function(e,r){
// e=null
// r=3
})
This will execute sequentially.
If you callback an error early, (i.e. cb("error")), then it will directly go to the final function(e,r), with e="error" and r=undefined
Notice how function(r,cb){} can become precomposed in a util library to handle commonly reused blocks and make things in the future easier.

Resources