I am creating a weather station using the Particle Electron and AWS. I have managed to get the returned data sent to a DynamoDB table "weather" which contains all of the weather data with the following schema (with included sample values):
Item{13}
deviceId: 540056000a51343334363138 (String) (Primary Partition Key)
tm: 1458754711 (Number) (Primary Sort Key)
batSoC: 89 (String)
batV: 4.01 (String)
hum: 27.9 (String)
lat: 41.2083 (String)
lon: -73.3439 (String)
pres: 968.4 (String)
temp: 19.8 (String)
uvI: 0.1 (String)
wDir: 0 (String)
wGst: 0.0 (String)
wSpd: 0.0 (String)
as well as a separate "weather_index" table which contains only the deviceId and tm attributes for the most recent data that was written to the main table (kind of like an atomic counter but for a periodically updated unix timestamp value). So if the "weather_index" item above was the most recent entry, the item in the "weather_index" table would look like this:
Item{2}
deviceIdString: 540056000a51343334363138 (String) (Primary Partition Key)
tmNumber: 1458754711 (Number)
I am currently trying to write a very basic web frontend in Node.js (which, prior to this project, I have had no experience with, so I am still learning) and can't figure out how to:
Perform a DynamoDB getItem which contains a parameter retrieved via a previous getItem. Like:
latestTime = getItem(weather_index, deviceId) // Gets the time "tm" of the most recent weather observation and stores it in "latestTime"
// Where "weather_index" is the table name
currentWeather = getItem(deviceId, tm) // Gets the weather observation for the specified "tm" value and stores it in "currentWeather"
// Where "tm" is the unix time-stamp of the most recent observation
I then want to be able to print the individual values to the terminal/webpage/carrier pigeon/etc... (Something along the lines of currentWeather.deviceId, currentWeather.tm, currentWeather.batSoC, etc...
I have the following code that I can't really make work properly:
/*
* Module dependencies
*/
var AWS = require('aws-sdk')
// weathermon_dev credentials
AWS.config.update({accessKeyId: 'REDACTED for obvious reasons', secretAccessKey: 'This bit too'});
// Select AWS region
AWS.config.update({region: 'us-east-1'});
var db = new AWS.DynamoDB();
// db.listTables(function(err,data) {
// console.log(data.TableNames);
// });
var time = Date.now() / 1000;
time = Math.round(time);
//console.log("Time: ");
//console.log(time);
time = Math.round(time);
var deviceId = "540056000a51343334363138"
var params = {
Key: {
deviceId: {S: deviceId}
},
TableName: 'weather_index'
};
var timeJson;
db.getItem(params, function(err,data) {
if (err) console.log(err); // an error occurred
else console.log(data); // successful response
var timeJson = JSON.parse(data);
})
// var timeJson = JSON.parse(data);
// var itemTime = timeJson.item;
console.log("timeJSON: " + timeJson);
// console.log("itemTime: " + itemTime);
var params = {
Key: {
deviceId: {S: deviceId},
time: {N: 'tm'}
},
TableName: 'weather'
};
db.getItem(params, function(err, data) {
if (err) console.log(err); // an error occurred
else console.log(data); // successful response
})
Any help would be greatly appreciated.
You need to look into how NodeJS asynchronous calls work. You would need to wait until the callback from the first getItem() is called before you perform the second getItem().
I've rewritten the relevant part of your code here, to show you what I'm talking about, but I recommend you try to understand why the code needs to be written in this way instead of just copy/pasting it.
var deviceId = "540056000a51343334363138"
var params = {
Key: {
deviceId: {S: deviceId}
},
TableName: 'weather_index'
};
var timeJson;
db.getItem(params, function(err,data) {
if (err) console.log(err); // an error occurred
else {
console.log(data); // successful response
var timeJson = JSON.parse(data);
console.log("timeJSON: " + timeJson);
// Inside this callback we have the weather_index tm value,
// so query the weather table here.
var params = {
Key: {
deviceId: {S: deviceId},
time: {N: 'tm'}
},
TableName: 'weather'
};
db.getItem(params, function(err, data) {
if (err) console.log(err); // an error occurred
else {
console.log(data); // successful response
// TODO: Use the database response data here
}
});
}
});
Related
I am trying to create an eventlog type of field on mongodb records where I can store a list of activity. The first time I run the function, it appends to the array correctly but subsequent calls overwrite the last entry instead of appending. If I restart the server or refresh the page in the browser, it will append once again then repeat the same behavior.
I'm learning node and javascript so I'm sure it's some mistake I've made but I don't seem able to figure it out.
Javascript on the client is a tabulator event.
cellEdited:function(cell){
//cell - cell component
const oldValue = cell.cell.oldValue;
const newValue = cell.cell.value;
const title = cell.cell.column.definition.title;
var report = cell.cell.row.data;
report.event = `Updated ${title} from ${oldValue} to ${newValue}`;
$.ajax({
type: 'POST',
url: '/api/update',
data: report,
dataType: 'json'
});
}
The route that its calling on the server:
app.post('/api/update', isAuthenticated, function(req, res) {
var report = req.body;
var reason = '';
if (typeof report.event !== 'undefined') {
reason = report.event;
delete report.event;
} else {
reason = 'Report updated';
}
db.DamageReport.findOneAndUpdate({ _id: report._id}, report, function (err, doc) {
if (err) {
console.log('Err updating report ', err);
return res.send(500, { error: err});
}
/*
* Write eventlog
*/
var event = {"date": new Date(), "user": req.user.email, "event": reason };
appendLog(doc._id, event);
return res.json(doc);
});
});
The appendLog function:
function appendLog(id, entry) {
/*
* entry format:
* date: Date
* user: String
* event: String
*/
if (typeof(entry.date) !== 'object') {
entry.date = new Date();
}
db.DamageReport.findByIdAndUpdate(id, {$push: {eventLog: entry}}, function(err, result) {
if (err) {
return console.log('Error writing eventLog: ', err);
}
return(result);
});
}
It wouldn't append more than one because the previous save contained the Eventlog array in it's original form so every time it saved, it set it back to the original array and then appended the last update.
I am new to all this so apologies in advance. I am trying to query a Dynamodb using nodejs and lambda so I can get Alexa to return the value. The dynamodb table is set up to look something like:
date time filmname
2018-01-04 13:00 Titanic
2018-01-04 15:30 Titanic
2018-01-04 18:30 Paddingtion
2018-01-05 12:00 Star Wars
My table is setup with the:
Primary partition key = date (String)
Primary sort key = time (String)
Now what I want to do is query or get the information from dynamodb for a particular date so for 2018-01-04 3 items should return. Is this possible using nodejs within Lambda and allow alexa to read back all the items?
I have already setup the following code within my intent and this works fine:
var params = {
TableName: 'Cinema',
Key:{ "date": "2018-01-04", "time" : "13:00" }
};
docClient.get(params, (err, data) => {
if (err) {
this.emit(':tell', 'Test Error');
} else {
this.emit(':tell', 'Test Working ' + data.Item.title);
}
});
The above code returns Titanic, as expected. However, I am stuck as to how to get it to return all the items for a given date rather than just for that particular time.
I am able to run this js code standalone (i.e not in lambda) and this works fine. Although I suspect this is not the best way of doing it.
var params = {
TableName: 'Cinema',
KeyConditionExpression: "#date = :yymmdd and #time between :time1 and :time2",
ExpressionAttributeNames:{
"#date": "date",
"#time": "time"
},
ExpressionAttributeValues: {
":yymmdd":"2018-01-04",
":time1":'0',
":time2":'2'
}
};
docClient.query(params, function(err, data) {
if (err) {
console.error("Unable to query. Error:", JSON.stringify(err, null, 2));
} else {
console.log("Query succeeded.");
data.Items.forEach(function(item) {
console.log(" -", item.title + ": ");
});
}
});
Now if I run this code within Lambda and try and test the skill I get 'The remote endpoint could not be called, or the response it returned was invalid.' Also, for some reason my cloudwatch logs are not updating for that particular lambda function so I am unable to get further information for it at this time.
If anyone is interested, I solved the problem. It was as simple as ensuring that the response that is sent to Alexa was only triggered once.
I am new to this so although the code works as required I am open to any suggestions on best practice.
function queryDynamoDate_single() {
const startdate = this.attributes['startdate'];
var say = '';
var params = {
TableName: 'Cinema',
KeyConditionExpression: "#date = :yymmdd and #time between :time1 and :time2",
ExpressionAttributeNames:{
"#date": "date",
"#time": "time"
},
ExpressionAttributeValues: {
":yymmdd":startdate,
":time1":'0',
":time2":'2'
}
};
readDynamoItem(params, myResult=>{
say = myResult;
say = 'you asked,. The answer is: ' + myResult;
this.response.speak(say).listen('try again');
this.emit(':responseReady');
});
}
.
function readDynamoItem(params, callback) {
var title = [];
var time = [];
var noofitems = 0;
let speechOutput = "";
docClient.query(params, (err, data) => {
if (err) {
this.emit(':tell', 'Test Error');
} else {
console.log("Query succeeded.");
data.Items.forEach(function(item) {
console.log(" -", item.title + ": ");
noofitems = noofitems + 1;
title[noofitems] = item.title;
time[noofitems] = item.time;
});
for (var l = 1; l <= noofitems ; l++){
{
speechOutput = speechOutput + title[l];
}
callback(speechOutput);
}
});
}
please bear in mind that I can, at best, be described as a rookie in both node and amazon S3. I have something app that writes to S3 in the background. I want to read from S3 when the file has been written, and only once it's been written. I attempt to check the number of objects and return the result:
function haveFilesBeenWrittenToBucket(bucketName, callback) {
s3.listObjects({ Bucket: bucketName }, function(err, data) {
const items = data.Contents;
callback(items);
});
}
and the readFile function:
OSClient.prototype.readFile = function(params, callback) {
haveFilesBeenWrittenToBucket(params.Bucket, items => {
console.log("Number of items " + items.length);
if (items.length > 0) {
const rl = readline.createInterface({
input: s3.getObject(params).createReadStream()
});
const myArray = [];
rl.on("line", function (line) {
const lineArray = line.split(",");
for (const value of lineArray) {
if (isNaN(value)) {
// line.split creates string elements, adding extraneous quotation marks in a string and converting
// number to string, so there is a need to reverse this process.
const slicedElement = value.slice(1, -1);
myArray.push(slicedElement);
} else {
const valueOfNumber = Number(value);
myArray.push(valueOfNumber);
}
}
})
.on("close", function () {
callback(myArray);
});
}
else{
var myfunction = this.readFile.bind(this, params, callback);
setTimeout(myfunction, 5000);
}
});
};
and lastly:
targetClient.readFile(params, function (arrayResult) {
logger.info("Read file:" + fileName + OS_FILE_SUFFIX);
readArray = arrayResult;
});
If I put a breakpoint on callback(items) (in 'haveFilesBeenWrittenToBucket') everything works fine and I get back the file written in the bucket, but if not, nothing seems to get written to S3. Seems like some race condition, but I'm really clueless and I really would appreciate some help. Is there a conflict between listing objects and writing to S3 (at least not until much later, in some other test, when it shouldn't be (it's part of a mocha test suite - the readFile is in async.waterfall). I have been on this for days and got nowhere. As I said, it's my first exposure to node, so please be patient with me. Thanks.
S3 provides eventual consistency for list after read. So, you might observe the following:
A process writes a new object to Amazon S3 and immediately lists keys within its bucket. Until the change is fully propagated, the object might not appear in the list.
The only situation in which S3 provides immediate consistency is read-after-write for PUTS of new objects (with a minor caveat, documented here).
More details at S3 consistency model.
Here is an example of how you can use async retry to wait for an object and then retrieve its contents (assumed to be text in this example).
var aws = require("aws-sdk");
var async = require("async");
var s3 = new aws.S3();
var bucket = 'mybucket';
var iteration = 0;
function waitForObjects(bucket, callback) {
console.error(`Iteration: ${++iteration}`);
s3.listObjects({Bucket:bucket}, function(err, data) {
if (err) {
callback(err);
} else if (!data.Contents || !data.Contents.length) {
callback(new Error("No objects"))
} else {
callback(null, data);
}
});
}
// Try calling waitForObjects 10 times with exponential backoff
// (intervals of 100, 200, 400, 800, 1600, ... milliseconds)
async.retry({
times: 10,
interval: function(retryCount) {
return 50 * Math.pow(2, retryCount);
}
}, async.apply(waitForObjects, bucket), function(err, data) {
if (err) {
console.error(`Error waitForObjects: ${err}`);
} else {
console.log(`Object count: ${data.Contents.length}`);
data.Contents.forEach(function(item, index) {
console.log(`Object ${index+1} key: ${item.Key}`);
s3.getObject({Bucket:bucket, Key:item.Key}, function(err, data) {
console.log(`Object ${index+1} txt: ${data.Body.toString()}`);
});
});
}
});
Two things. Firstly, it turns out that my issue was not nodeJS related. Sigh
Secondly, the API now provides a 'waitFor' method for polling whether a bucket or objects exists:
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#waitFor-property
I have a DynamoDB table that looks like this:
(there are like 1500000 entries like this one with different timestamps)
I have 2 GSI:
I'm trying to retrieve all the rows in the table for a given day.
This is what my code looks like (NodeJS):
var AWS = require("aws-sdk");
AWS.config.update({accessKeyId: "", secretAccessKey: ""});
AWS.config.update({region: 'us-east-1'});
var docClient = new AWS.DynamoDB.DocumentClient();
var params = {
TableName: "QfGamingTransactionsProd",
IndexName: 'Result-RedeemedAt-index',
KeyConditionExpression: "#rs = :result and begins_with (#rat, :Rat)",
ExpressionAttributeNames: {
"#rs": "Result",
"#rat": "RedeemedAt"
},
ExpressionAttributeValues: {
":result": "SUCCESS",
":Rat": "2016-10-20"
}
};
docClient.query(params, function (err, data) {
if (err) {
console.error("Unable to query. Error:", JSON.stringify(err, null, 2));
} else {
console.log("\nQuery succeeded. \n");
console.log("- Total", data.Count);
}
});
It seems to be working, but i'm getting (way) less results than expected. This same code works fine on a smaller Table.
Similar results with "Scan".
What am I missing?
According to the size of each record, the number of records retrieved will change since DynamoDB has a size limitation for query (1MB).
In DynamoDB, a query will return only 1MB of data.
But we can paginate through the results. It may solve your issue.
Data returned by the query will contain a "LastEvaluatedKey", if data that satisfy that query is not fully retrieved. So we have to set the "LastEvaluatedKey" as the "ExclusiveStartKey". Then the query will retrieve the remaining data. By recursively following this method, we will get the complete data.
var data = [];
async.until(function () {
return scanComplete;
},
function (callback) {
docClient.query(params, function (err, result) {
if (err) {
console.log(err);
} else {
data.push(result.Items);
if (typeof (result.LastEvaluatedKey) === 'undefined') {
scanComplete = true;
//fully retrieved
} else {
params.ExclusiveStartKey = result.LastEvaluatedKey;
}
if (!scanComplete) {
}
}
callback(err);
});
},
// this runs when the loop is complete or returns an error
function (err) {
if (err) {
console.log('error in processing scan ');
console.log(err);
reject(err);
} else {
resolve(data);
}
});
This is because by default DynamoDB will return only 1mb of data at a time, But there is a way to solve this issue.
You need to change your implementation like following
Step 1: Call DyanmoDB table, it will return 1st 1mb of data, with that it will return "Next Evaluated Key"
Step 2: Call Dynamodb table again but this time you pass "Next Evaluated Key" in "Exclusive Start key" to get new set of data
Step3: Check if "Next Evaluated Key" is still available then repeat step2 else you got all the data for that key
Here are the references:
About query limits
Blog on how to implement this code
Hope that helps
I have the following code that scan's the DynamoDB table and returns a count of the number of people who have the key value test = true. For some reason, this code is not scanning the whole table. Does anyone know why?
var aws = require('aws-sdk');
var config = require('./config.js');
aws.config.update({accessKeyId: config.key, secretAccessKey: config.secret});
aws.config.update({region: 'us-east-1'});
function getItems() {
var db = new aws.DynamoDB.DocumentClient();
db.scan({
TableName : config.db,
}, function(err, data) {
if (err) { console.log(err); return; }
var count = 0;
for (var ii in data.Items) {
ii = data.Items[ii];
if (ii.setRemoveBrandingEmailOptin) {
console.log(ii.test);
count += 1;
}
}
console.log(count);
});
}
getItems();
Per the documentation:
If the total number of scanned items exceeds the maximum data set size
limit of 1 MB, the scan stops and results are returned to the user as
a LastEvaluatedKey value to continue the scan in a subsequent
operation. The results also include the number of items exceeding the
limit.
You shouldn't need to dump the entire table into your application for a simple count anyway. You're doing this in the most inefficient way possible. Try something like this:
db.scan({
TableName : config.db,
Select: 'COUNT',
FilterExpression: "#emailOptInField = :emailOptInValue",
ExpressionAttributeNames: {
"#emailOptInField": "setRemoveBrandingEmailOptin",
},
ExpressionAttributeValues: {
":emailOptInValue": true
}
}, function(err, data) {
if (err) { console.log(err); return; }
var count = data.Count;
console.log(count);
});