How to get aws dynamodb ConsumedCapacity in node.js? - node.js

I seen aws document and i have insert parameter ReturnConsumedCapacity but not response ConsumedCapacity object... (use dynamodb local)
What was the mistake I made?
var params = {};
params.TableName = this.tableName;
params.Limit = 100;
params.ReturnConsumedCapacity = "TOTAL";
var items = [];
var scanExecute = function() {
dynamo.scan(params, function(err, result) {
console.log(result); // not found ConsumedCapacity...
if (err) {
console.log(err);
callback(new Error('dynamo scan error'));
} else if (result.Count === 0) {
callback(null, undefined);
} else {
items = items.concat(result.Items);
if (result.LastEvaluatedKey) {
params.ExclusiveStartKey = result.LastEvaluatedKey;
scanExecute();
} else {
callback(null, items);
}
}
});
}
scanExecute();

When you run the DynamoDB local instance, the consumed capacity will not be tracked.
Consumed capacity units are not tracked. In operation responses, nulls
are returned instead of capacity units.

Related

Stop Lambda Triggers

I have some records in dynamoDB and trigger associated with it configured via serverless.yml file.
Below the configuration events:
- stream:
type: dynamodb
arn:
Fn::GetAtt:
- myTable
- StreamArn
batchSize: 1
But I have a requirement that based on some flag, I should stop the execution of all records (lambda) if one record ends in failure and subsequently all the records for that transaction has to be deleted from dynamoDB.
I noticed that even after deleting the records from DynamodB, the trigger still continues, Please is there a way to get the exisiting triggers with respect to the context and stop all ?
P.S I am using Nodejs
Code and Steps
Checker.js -- Talks to external system and adds the records to the dynamoDB table specified and exits. The below function Validate data is called via events from dynamodB. The serverless configuration for it is below
ValidateData :
handler: ValidateData.handler
memorySize: 1536
timeout: 300
events:
- stream:
type: dynamodb
arn:
Fn::GetAtt:
- kpiTaskTable
- StreamArn
batchSize: 1
ValidateData --
async.waterfall([
//go get kpis
function getKPIs(next) {
request({
agent: agent,
uri: getKPIsURL(endpoint, APIKey),
maxAttempts: retryCount,
retryDelay: retryDelayTime,
retryStrategy: request.RetryStrategies.HTTPOrNetworkError
}, function (error, response, body) {
if (error) {
console.log("ERROR:Error whilst fetching KPI's: " + error);
next(error);
} else {
//need to add in here to check that at least one kpi was returned otherwise error
kpis = JSON.parse(body);
if (kpis.constructor != Array) {
console.log("ERROR:Error KPI's are not of type Array");
next(error);
} else {
next(null);
}
}
});
}................................
function (err) {
if (err) {
console.log("ERROR: Something has gone wrong: " + err)
var stopReportGen = process.env.STOP_REPORT_CREATION_ON_ERROR;
if (stopReportGen === "true") {
console.log("Deleting records from dynamoDB for report ID " + reportId);
kpiUtil.deleteRecordsFromDynamoDB(reportId).then(function () {
s3Api.deleteFile(reportBucket, reportName, retryCount, retryDelayTime).then(function () {
console.log("INFO : The temp file is deleted from the S3 bucket")
callback(null, "ERROR: " + sourceId + "Report ID :" + reportId);
}).catch(function (err) {
console.log("ERROR : Error in deleting the temp file from the S3 bucket")
callback(null, "ERROR: " + sourceId + "Report ID :" + reportId);
})
});
}
Delete From Dynamodb -- Deleting the records from DB
var AWS = require('aws-sdk');
var fs = require('fs');
var path = require('path');
var zlib = require('zlib');
var fs = require('fs');
(function (exports) {
deleteRecordsFromDynamoDB = function (reportId) {
return new Promise(function (resolve, reject) {
var docClient = new AWS.DynamoDB.DocumentClient();
var table = process.env.KPI_TASK_TABLE;
var params = {
TableName: table,
FilterExpression: "#reportId = :reportId_val",
ExpressionAttributeNames: {
"#reportId": "reportId",
},
ExpressionAttributeValues: { ":reportId_val": parseInt(reportId) }
};
docClient.scan(params, onScan);
var count = 0;
function onScan(err, data) {
if (err) {
console.error("ERROR: Error, Unable to scan the table. Error JSON:", JSON.stringify(err, null, 2));
reject(err);
} else {
console.log("Scan succeeded for reportID ::"+reportId);
data.Items.forEach(function (itemdata) {
var delParams = {
TableName: table,
Key: {
"reportSource": itemdata.reportSource
}
};
console.log("Attempting a conditional delete...");
docClient.delete(delParams, function (err, data) {
if (err) {
console.error("ERROR:Error, Unable to delete item. Error JSON:", JSON.stringify(err, null, 2));
reject(err);
} else {
console.log("DeleteItem succeeded:", JSON.stringify(data, null, 2));
}
});
console.log("INFO:Item :", ++count, JSON.stringify(itemdata));
});
// continue scanning if we have more items
if (typeof data.LastEvaluatedKey != "undefined") {
console.log("Scanning for more...");
params.ExclusiveStartKey = data.LastEvaluatedKey;
docClient.scan(params, onScan);
}else{
resolve("sucess");
}
}
}
});
}
exports.deleteRecordsFromDynamoDB = deleteRecordsFromDynamoDB;
}(typeof exports === 'undefined' ? this['deleteRecordsFromDynamoDB'] = {} : exports))
Based on the above description, my understanding is that deleting the items will create the streams to lambda as well. You can ignore the delete streams in two ways:-
1) Check for eventName in Record. If the eventName is REMOVE, you can potentially ignore the stream in Lambda function
2) Before deleting the items in Dynamodb, please disable the stream on DynamoDB table using Update Table API.
Please note that Update Table is asynchronous operation. So it will take a while to reflect the change. The items should not be deleted until the stream is disabled. Otherwise, you can implement both option 1 and 2 to be in safer side.
var params = {
TableName: "SomeTableName",
StreamSpecification: {
StreamEnabled: false
}
};
dynamodb.updateTable(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data);
}
You may need to enable the stream when you would like to have the Lambda trigger back in operation.

Unhandled promise rejection firestore

I don't quite have the promises down I make. Anyway, I am reading a list of rows in a mysql table, then upon writing into Firestore I want to update a count of a "related" record. Basically aggregate counts.
Here is the start:
request(options, function (error, response, body) {
if (!error && response.statusCode == 200) {
var data = JSON.parse(body);
//console.log(data);
var allObjects = data.resource;
var obj = {};
for (j = 0; j < allObjects.length; j++) {
var records = [];
var record = allObjects[j];
//FireStore
var db = admin.firestore();
var docRef = db.collection(toCollection).doc(record.nfCode);
var rec = docRef.set(record);
addCounts(docRef);
}
res.render('index', {title: 'Load Table'});
offset = offset + limit
} else {
return res.send(body);//
//res.render('index', { title: 'Error' });
}
});
Then here is addCount:
function addCounts(docRef) {
// In a transaction, update the aggregate totals
var db = admin.firestore();
return db.runTransaction(transaction => {
transaction.get(docRef).then(res => {
var brandRef = db.collection('brands').doc(res.data().brandNFCode);
var transaction = db.runTransaction(t => {
return t.get(brandRef)
.then(doc => {
if (res.data().glutenFreeYN == "Y") {
var glutenFreeCount = doc.data().glutenFreeCount + 1;
var setWithOptions = transaction.set(brand, {
glutenFreeProductCount: glutenFreeProductCount
}, { merge: true });
return setWithOptions;
}
});
})
.then(result => {
console.log('Transaction success', result);
// return nil
})
.catch(err => {
// console.log('Transaction failure:', err);
return nil
});
})
})
}
Is there a better way to do this? And where is my error coming from?
Auth error:Error: socket hang up
(node:90050) UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 2): Error: Getting metadata from plugin failed with error: socket hang up
There are a few things wrong with your addCounts function that may be causing this:
function addCounts(docRef) {
// In a transaction, update the aggregate totals
var db = admin.firestore();
return db.runTransaction(transaction => {
return transaction.get(docRef).then(res => {
if(res.data().glutenFreeYN == "Y"){
var glutenFreeProductCount = res.data().glutenFreeProductCount + 1;
}
if(res.data().vegetarianYN == "Y"){
var vegetarianProductCount = res.data().vegetarianProductCount + 1;
}
if(res.data().dairyFreeYN == "Y"){
var dairyFreeProductCount = res.data().dairyFreeProductCount + 1;
}
if(res.data().organicYN == "Y"){
var organicProductCount = res.data().organicProductCount + 1;
}
// LOOK HERE!!! You had brand.set()
// this version uses the transaction
var setWithOptions = transaction.set(brand, {
glutenFreeProductCount: glutenFreeProductCount,
organicProductCount: organicProductCount,
vegetarianProductCount: vegetarianProductCount,
dairyFreeProductCount: dairyFreeProductCount
}, { merge: true });
// LOOK HERE!! Make sure to return the
// set operation
return setWithOptions;
});
// I removed the .catch() here as it will obscure
// errors. You can catch the result of addCounts.
})
};
Was running into this issue and spent over an hour trying to figure it out. Make sure your system time is synced properly. My time was correct, but my time zone wasn't so the system UTC time was out of sync.
To correct on Windows 10, go to Settings -> Time & Language -> Date and Time -> Sync Now

AngularJs $resource save throwing ERR_CONTENT_LENGTH_MISMATCH

Using AngularJs $resource for updating the existing data. calling save on resource.
This is my service.
app.factory('SubscriptionsService', function ($resource, $q) {
// $resource(url, [paramDefaults], [actions], options);
var resource = $resource('/api/subscriptions');
var factory = {};
factory.updateSubscriptions = function (updatedSubscriptions) {
console.log("SubscriptionsService: In updateSubscriptions. "+JSON.stringify( updatedSubscriptions));
var deferred = $q.defer();
resource.save(updatedSubscriptions,
function(response){
deferred.resolve(response);
},
function(response){
deferred.reject(response);
}
);
return deferred.promise;
}
return factory;
});
And API looks like the following.
exports.update = function (req, res) {
console.log("In Subscriptions API: Update invoked.");
if (req.body == null || req.body.items == null || req.body.items == 'undefined') {
return res.json({"status":0,"message":"no items present."});
}
var items = req.body.items;
console.log("About to call update on db."+items);
db.update(items,function(error,result,failedItems){
if(error)
return res.json({"status":-1,"message":error.message,"failedItem":failedItems});
return res.json({"status":1,"message":result,"failedItem":failedItems});
})
}
And controller as follows
$scope.confirmUpdates = function () {
if ($scope.updatedItems.length > 0) {
var updatedSubscriptionLevels = { "items": $scope.updatedItems };
var promise = SubscriptionsService.updateSubscriptions(updatedSubscriptionLevels);
promise.then(
function(response){
console.log("Response received from API.");
var statusCode = response.status;
console.log("statusCode: "+statusCode);
},
function(message){
console.log("Error message: "+message);
}
);
}
}
Strange part is data is getting updated. But promise receiving an error.
POST xxxxxx/testapi/subscriptions net::ERR_CONTENT_LENGTH_MISMATCH
I doubt I am not implementing/consuming resource in a right way. Can anybody advise?
Thanks.
I think I found the mistake.
There were couple of things I was not handling in a right way.
I didn't post my db code in mt question where the root cause was present.
if(itemsToUpdate!=null && itemsToUpdate.length>0){
var failedItems = []; // Holds failed items while updating.
console.log("Items to update: "+itemsToUpdate);
for(var index=0;index<itemsToUpdate.length;index++){
var item = itemsToUpdate[index];
console.log(item);
subscriptionLevels.update(
{_id:item._id},
{$set:{title:item.title,daysValid:item.daysValid,subscriptionFee:item.subscriptionFee}},
function(error,rowsEffected){
if(error){
console.log(error);
// Error occurred. Push it to failed items array.
failedItems.push({"title":item.title,"message":error.message});
}
else{
console.log("Number of rows updated: "+rowsEffected);
}
}
);
}
callback(null,"SUCCESS",failedItems);
}
else {
callback(null, "NO_ITEMS", []);
}
I missed if - else condition and invoking callback multiple times which was causing the issue.

Retrieve the last 3200 tweets of a specific user in node.js

I am new to javascript and node.js and this is my first post, so please bear with me.
I am using ntwitter to get all previous tweets of a specific user.
My problem is that if the user has more than 200 tweets, I need to create a loop and I am not sure if I do it right.
This is the async function that gets the 200 latest tweets:
exports.getUserTimeline = function(user, callback) {
twit.getUserTimeline({ screen_name: user, count: 200 }, function(err, data) {
if (err) {
return callback(err);
}
callback(err, data);
});
}
I found a solution to do this using a recursive function, but it's quite ugly.. How can I improve it ?
exports.getUserHistory = function(user, callback) {
recursiveSearch(user, callback);
function recursiveSearch(user, callback, lastId, data) {
var data = data || []
, args = {screen_name: user, count: 200};
if(typeof lastId != "undefined") args.max_id = lastId;
twit.getUserTimeline(args, function(err, subdata) {
if (err) {
console.log('Twitter search failed!');
return callback(err);
}
if (data.length !== 0) subdata.shift();
data = data.concat(subdata);
var lastId = parseInt(data[data.length-1].id_str);
if (subdata.length !== 0) {
recursiveSearch(user, callback, lastId, data);
} else {
callback(err, data);
}
});
}
}
Thank's a lot!
Update: This is the improved (refactored) function suggested by hunterloftis with two modifications:
property max_id should not be specified on the first iteration
the case where the user exists but no tweets have been posted must be handled
code:
function getUserHistory(user, done) {
var data = [];
search();
function search(lastId) {
var args = {
screen_name: user,
count: 200,
include_rts: 1
};
if(lastId) args.max_id = lastId;
twit.getUserTimeline(args, onTimeline);
function onTimeline(err, chunk) {
if (err) {
console.log('Twitter search failed!');
return done(err);
}
if (!chunk.length) {
console.log('User has not tweeted yet');
return done(err);
}
//Get rid of the first element of each iteration (not the first time)
if (data.length) chunk.shift();
data = data.concat(chunk);
var thisId = parseInt(data[data.length - 1].id_str);
if (chunk.length) return search(thisId);
console.log(data.length + ' tweets imported');
return done(undefined, data);
}
}
}
When retrieving tweets I noticed that my tweet count wasn't always the same as the 'statuses_count' property of the user. It took me some time to figure out that this difference corresponds to the number of deleted tweets :)
Does your recursive function work? Doesn't look too bad to me. I might refactor it just a little into something more like this:
function getUserHistory(user, done) {
var data = [];
search();
function search(lastId) {
var args = {
screen_name: user,
count: 200,
max_id: lastId
};
twit.getUserTimeline(args, onTimeline);
function onTimeline(err, chunk) {
if (err) {
console.log('Twitter search failed!');
return done(err);
}
if (data.length) chunk.shift(); // What is this for?
data = data.concat(chunk);
var thisId = parseInt(data[data.length - 1].id_str);
if (chunk.length) return search(thisId);
return done(undefined, data);
}
}
}

node.js fall through data cache pattern

I am looking for a clean way to structure my node.js code for the following situation. I thought of using EventEmitters to create a "workflow" type of thing. Also I thought of using one of the async libraries out there, that has been less thought out though.
Problem:
Looking for a piece of data
check cache, if found return
check db, if found return (store in cache)
get live data and return, (store in db, cache)
I mocked something up quickly using event emitters below.
var util = require("util");
var events = require('events');
var CheckForData = function() {
events.EventEmitter.call(this);
this.checkForData = function(key) {
this.emit("checkForDataRequest", key);
}
var _checkCache = function(key) {
if (key === 'cache') {
this.emit("found", {data:'cached data'});
}
else {
console.log("not found in cache "+key);
this.emit("checkDatastore", key);
}
}
var _chechDatastore = function(key) {
if (key === 'db') {
this.emit("found", {data:'db data'});
this.emit("storeCache", key, {data:'db data'});
}
else {
console.log("not found in db "+key);
this.emit("getData", key);
}
}
var _getData = function(key) {
if (key === 'live') {
this.emit("found", {data:'live data'});
this.emit("storeData", key, {data:'live data'});
}
else {
console.log("not found in live "+key);
this.emit("notFound", key);
}
}
var _storeData = function(key, data) {
this.emit("storeDb", key, data);
this.emit("storeCache", key, data);
}
var _storeDb = function(key, data) {
console.log("storing data in db. for "+key);
console.log(data);
}
var _storeCache = function(key, data) {
console.log("storing data in cache. for "+key);
console.log(data);
}
var _found = function(data) {
return data;
}
var _notFound = function(key) {
return key;
}
this.on("checkForDataRequest", _checkCache);
this.on("checkDatastore", _chechDatastore);
this.on("getData", _getData);
this.on("found", _found);
this.on("notFound", _notFound);
this.on("storeData", _storeData);
this.on("storeDb", _storeDb);
this.on("storeCache", _storeCache);
};
util.inherits(CheckForData, events.EventEmitter);
module.exports = new CheckForData();
To test it...
var checkForData = require('./check-for-data');
checkForData.on("found", function(data) {
console.log("Found data ");
console.log(data);
});
checkForData.on("notFound", function(key) {
console.log("NOOO Found data for " + key);
});
console.log("-------");
checkForData.checkForData('cache');
console.log("-------");
checkForData.checkForData('db');
console.log("-------");
checkForData.checkForData('live');
console.log("-------");
checkForData.checkForData('other');
console.log("-------");
Then async.js, i made a quick checkSeries which is basically the async.detectSeries but instead of returning the item in the collection return the result. See below...
var async = require('async');
function check_cache(key) {
console.log('checking cache');
return null;
}
function check_datastore(key) {
console.log('checking datastore');
return null;//{data: "db data"};
}
function check_api(options) {
console.log('calling api');
return {data: "live data"};
}
function exists(item, callback) {
callback(item());
}
async.checkSeries([check_cache, check_datastore, check_api], exists, function(result) {
// result now equals the first function that return data
console.log(result);
});
Any suggestions, hints, tips, ...? Is there a pattern or library that i am missing? Do you think it be possible/easier to do in Step, flow, ...? Memoize?
That seems like a lot of work for the caller and a lot of extra code that doesn't seem to be adding much value. I have something that looks like this in my model.
Foo.get = function (id, cb) {
var self = this;
// check the cache first
cache.get('foo:' + id, function (err, cacheFoo) {
// if found, just return it
if (!err && cacheFoo) {
return cb(null, cacheFoo);
}
// otherwise get from db
self.findOne({id: id}, function (err, dbFoo) {
if (err || !dbFoo) {
return cb(new Error('Foo not found', 404));
// you could do get live call here
}
// and store in cache
cache.store('foo:' + id, dbFoo);
return cb(null, dbFoo);
});
});
};
Callers can then always just call Foo.get(id, callback) and they don't have to care how it is actually retrieved. If it gets more complicated, you could use an async library (such as the aptly named async) to make the code more readable, but this should still be completely hidden from the caller.

Resources