Empty data object when deleting item from dynamodb - node.js

According to the docs I should get a data structure with the item as it was prior to the deletion (in case there was no error)
I do check there was no error but I get an empty object for data:
docClient.delete(params, (err, data) => {
if (err) {
console.error('Error tring to delete item:' + err);
callback(err, null); // error
} else if (!data.Items || data.Items.length == 0) {
console.info(JSON.stringify(data));
callback(null, null); // no items
} else {
console.info(JSON.stringify(data));
callback(null, data.Items[0].ExposeStartTimestamp);
}
});
Both prints empty json: {}

In order for the deleted data to appear in the response, the request should contain the attribute ReturnValues with value ALL_OLD.
var params = {
TableName: 'TableName',
Key: {
HouseId: houseId
},
ReturnValues: 'ALL_OLD'
};

Related

How do I update (not overwrite) a record in Elasticsearch using Node.js?

Writing a Lambda, I am grabbing a file from S3 and sending it to Elasticsearch. When I attached the file using pipeline-attachment, it attaches correctly but overwrites all my fields. How do I just attach the file to my record while maintaining my previous fields in my response? Can't find the documentation for this action.
I am using the Elasticsearch.index to initially attach the file, which works but overwrites all my other fields. I have tried the Elasticsearch.update, but cant get it to work.
s3.getObject({Bucket: bucket, Key: key}, function (err, data) {
if (err) {
console.log(err, err.stack); // an error occurred
context.fail();
} else {
console.log('data: ', data); // successful response
var attachment = data.Body.toString('base64');
console.log('attachment: ', attachment);
elasticsearch.index(
{
index: 'files',
pipeline: 'attachment',
type: 'file',
id: key,
body: {
data: attachment
}
},
function (error, resp) {
if (error) {
console.trace('index error!', error);
} else {
console.log('index response', resp);
context.succeed();
}
}
);
}
});

Callback hell on my Nodejs webservice sending to elasticsearch

I have a NodeJs consumer sending data to Elasticsearch at a rate of 2k TPS more or less.
I need to store the Requests as I receive them, and if there is any response, later on, I need to update the info on the request with some data from the response. The thing is that due to the hight TPS, I'm having a lot of issues where the response arrives in Elasticsearch before the request etc.. and this creates a Version conflict on the _doc. this is the part of my node code that does the upsert. I need some help to optimizing this code. Thanks a lot in advance.
sendToElasticSearch(index, type, id, body, cb) {
out('starting sendToElasticSearch()');
var me = this;
me.client.exists({
index: index,
type: type,
id: id
}, function(err, exists) {
if (err) {
cb(err)
} else {
if (exists === true) {
out('exists. doing update.');
// update existing document
me.client.update({
index: index,
type: type,
id: id,
body: body
}, function(err, resp) {
if (err) {
cb(err);
} else {
cb(null, resp);
}
});
} else {
out('adding new document');
// add new document
me.client.create({
index: index,
type: type,
id: id,
body: body
}, function(err, resp) {
if (err) {
cb(err);
} else {
cb(null, resp);
}
});
}
}
});
}
sendToElasticSearch(index, type, id, body, cb) {
var self = this;
function onDone (err, exists) {
if (err)
return cb(err);
var do = exists ? 'update' : 'create';
self[do]({index, type, id, body}, cb);
}
self.client.exists({index, type, id}, onDone);
}

Dynamodb get about 10,000 t0 30,000 records nodejs

getDBResults(id,
startDate_Timestamp,
endDate_Timestamp).then(data,
err)=> {
if (err) console.log("EE", err);
console.log("DD",data);
if (data.length != 0) {
res.json({ "Status": 200, "Data": data });
}
})
function getDBResults(id, startDate_Timestamp,
endDate_Timestamp) {
var q = Q.defer();
const params = {
TableName: 'TableName',
KeyConditionExpression: '#Id=:Id AND #Timestamp BETWEEN
:start_date AND :end_date',
ExpressionAttributeNames: {
"#Timestamp": "Timestamp",
"#Id": "Id"
},
ExpressionAttributeValues: {
":Id": id,
":start_date": startDate_Timestamp,
":end_date": endDate_Timestamp
}
};
var results = [];
var callback = function (err, data) {
console.log("DD", data);
if (err) {
console.log('Dynamo fail ' + err);
q.reject(err);
} else if (data.LastEvaluatedKey) {
params.ExclusiveStartKey = data.LastEvaluatedKey;
docclient.query(params, callback);
} else {
console.log("RR",results);
q.resolve(results);
}
data.Items.forEach(function (item) {
results.push(item);
});
}
docclient.query(params, callback);
return q.promise;
}
with this code, i am not getting any results and result of that
id contains almost 20,000 to 30,000 records. I want to fetch all
that records but it does not log anything on aws console.Please
help me to figure it out
Not sure it is the solution, but I would try to insert in results before calling a new query:
} else if (data.LastEvaluatedKey) {
// save items
data.Items.forEach(function (item) { results.push(item); });
params.ExclusiveStartKey = data.LastEvaluatedKey;
docclient.query(params, callback);
}
Advice: think about using promises, they are easier to work with. Your code is a "callback hell" :)

Async parallel with object only last result

I'm having an issue with async parallel. This is my current code:
// Tasks object for async
var Tasks = {};
// Go through the items
for(index in items) {
var itemName = items[index];
Tasks[index] = function(callback) {
self._requestItem(currency, appID, itemName, function(err, item) {
if( err) {
callback(err, null);
return;
}
callback(null, { "name": itemName, "data": item });
});
}
}
// Go through tasks, using async parallel
this._async.parallel(Tasks, function(err, results) {
console.log(err, results);
});
Each items entry is unique. But when the parallel finishes it shows every result like the last one. For example if I have 3 items in items then async results outputs 0, 1 the same as 2.
null { '0':
{ name: 'Test 3',
data:
{ success: true,
price: 17.02 } },
'1':
{ name: 'Test 3',
data:
{ success: true,
price: 17.02 } },
'2':
{ name: 'Test 3',
data:
{ success: true,
price: 17.02 } } }
Why does it do this? If I use 2 items in items it again copies the result from 1 to 0.
Adding snippet of _requestItem as requested.
Manager.prototype._requestItem = function(currency, appID, itemName, callback) {
var self = this;
this._request({
uri: this._uri,
baseUrl: this._baseUrl,
json: true
}, function(err, res, body) {
if( ! err && res.statusCode !== 200) {
if(self._errorCodes[res.statusCode] !== undefined) {
callback(self._errorCodes[res.statusCode], null);
} else {
callback('Unsuccessful response (' + res.statusCode + '). Is the API having issues?', null);
}
} else if( ! err && res.statusCode === 200) {
callback(null, body);
} else {
callback(err, null);
}
});
}
No matter what is the content of the body of the _requestItem() function the value of the name attribute on the response array elements shouldn't be the same if the elements of the items array are unique.
The only error i can see is the fact that index is declared as a global variable but this shouldn't be the cause of the problem.
I suggest you to inspect the content of the items variable before the code enters the for loop (to see if it has been corrupted before this point). Debugging is a good idea in this case.
A better implementation would be:
var tasks = items.map(function(itemName){
return function(callback) {
self._requestItem(currency, appID, itemName, function(err, item) {
if( err) {
callback(err, null);
return;
}
callback(null, { "name": itemName, "data": item });
});
}
});
// Go through tasks, using async parallel
this._async.parallel(tasks, function(err, results) {
console.log(err, results);
});

Making butch upsert in mongodb: callback never fired

I have an array of documents with unique _id and I want to insert them to my database. Some of them already in db, and for those I want to update an array property (push in array an item). All of this I need to make asyncronuosly, so after all inserted/updated I want to write response back (with callback) to client than all ok or write an error. After googling on subject I've found this solution with async module I've tried to implement it for my case. Now my code looks like this:
function processUsers(arr, listName, callback) {
var users = global.db.collection('vkusers');
var q = async.queue(function(task, cb) {
console.log('upsert butch');
users.insert(task.doc, function(err, doc) {
if (err) {
users.update({
_id : task.doc._id
}, {
$addToSet : {
mLists : listName
}
}, function(error, result){ console.log(error); console.log(result); });
}
});
}, arr.length);
for ( var doc in arr) {
q.push({
doc : arr[doc]
}, function(err) {
if (err)
callback(err, null);
})
}
q.drain = function() {
// this is the queue's callback, called when the queue is empty,
// i.e. when all your documents have been processed.
console.log('drain');
callback(null, { result: "success", upserted: arr.length });
}
}
Callback has signature callback(error, result), arr - my array of documents. I've tested it and with database everything is OK, i am getting the right result. But callback, and q.drain never fired!
You need to call async.queue's callback (cb in your code) when your insert/update is complete. Something like this:
var q = async.queue(function(task, cb) {
console.log('upsert butch');
users.insert(task.doc, function(err, doc) {
if (err) {
users.update({
_id : task.doc._id
}, {
$addToSet : {
mLists : listName
}
}, function(error, result) {
console.log(error);
console.log(result);
cb(error); // Update finished; call cb and pass in "error" so that it can bubble up if it exists
});
} else {
cb(); // Insert succeeded; call cb
}
});
}, arr.length);

Resources