Node.js memcached throwing unhandled promise rejection from within try/catch - node.js

I'm using a promisified version of the node.js memcached library for caching.
var get = promisify(memcached.get).bind(memcached);
var set = promisify(memcached.set).bind(memcached);
var add = promisify(memcached.add).bind(memcached);
var del = promisify(memcached.del).bind(memcached);
var incr = promisify(memcached.incr).bind(memcached);
var getMulti = promisify(memcached.getMulti).bind(memcached);
These are exposed in the exports of my connections.js file. I figured that this would be sufficient, as I expected memcached to silently fail if it had any problems, and I could just fall back to the database.
Once or twice an hour (I would guess the application gets around 30,000 requests an hour) a request fails, and the output to the log is this:
(node:28788) UnhandledPromiseRejectionWarning: Error: Item is not stored
at Socket.notstored (c:\Workspaces\DOH\Vaccines\Service\node_modules\memcached\lib\memcached.js:445:20)
at Client.rawDataReceived (c:\Workspaces\DOH\Vaccines\Service\node_modules\memcached\lib\memcached.js:744:51)
at Client.BufferBuffer (c:\Workspaces\DOH\Vaccines\Service\node_modules\memcached\lib\memcached.js:678:12)
at Socket.bowlofcurry (c:\Workspaces\DOH\Vaccines\Service\node_modules\memcached\lib\utils.js:126:15)
This seems odd to me, but I figured I would just account for that possibility by wrapping my function in a try/catch.
incr: async function (key, amount) {
key = Buffer.from(key).toString('base64')
try {
if (amount == undefined) {
amount = 1
}
var value = await get(env + key);
if (!value) {
await set(env + key, amount, 0)
} else {
await incr(env + key, amount);
}
return get(env + key)
} catch (e) {
console.log("Memcached incr error: " + e);
return 1
}
}
While I was debugging and connected to the memcached server, I saw the same error happen on a call to cache.incr(). The exception was not caught, it just failed the request and sent a 500 back to the client.
What am I missing? Does promisify not handle deep exceptions? The log is never able to trace the exception all the way back to a function call in my code; it just stops at utils.js in the memcached library. Why is that disconnect there?

Related

Why Does my AWS lambda function randomly fail when using private elasticache network calls as well as external API calls?

I am trying to write a caching function that returns cached elasticcache data or makes an api call to retrieve that data. However, the lambda function seems to be very unrealiable and timing out often.
It seems that the issue is having redis calls as well as public api calls causes the issue. I can confirm that I have setup aws correctly with a subnet with an internet gateway and a private subnet with a nat gateway. The function works, but lonly 10 % of the time.The remaining times exceution is stopped right before making the API call.
I have also noticed that the api calls fail after creating the redis client. If I make the external api call prior to making the redis check it seems the function is a lot more reliable and doesn't time out.
Not sure what to do. Is it best practice to seperate these 2 tasks or am I doing something wrong?
let data = null;
module.exports.handler = async (event) => {
//context.callbackWaitsForEmptyEventLoop = false;
let client;
try {
client = new Redis(
6379,
"redis://---.---.ng.0001.use1.cache.amazonaws.com"
);
client.get(event.token, async (err, result) => {
if (err) {
console.error(err);
} else {
data = result;
await client.quit();
}
});
if (data && new Date().getTime() / 1000 - eval(data).timestamp < 30) {
res.send(`({
"address": "${token}",
"price": "${eval(data).price}",
"timestamp": "${eval(data).timestamp}"
})`);
} else {
getPrice(event); //fetch api data
}
```
There a lot of misunderstand in your code. I'll try to guide you to fix it and understand how to do that correctly.
You are mixing asynchronous and synchronous code in your function.
You should use JSON.parse instead of eval to parse the data because eval allows arbitrary code to be executed in your function
You're using the res.send to return response to the client instead of callback. Remember the usage of res.send is only in express and you're using a lambda and to return the result to client you need to use callback function
To help you in this task, I completely rewrite your code solving these misundersand.
const Redis = require('ioredis');
module.exports.handler = async (event, context, callback) => {
// prefer to use lambda env instead of put directly in the code
const client = new Redis(
"REDIS_PORT_ENV",
"REDIS_HOST_ENV"
);
const data = await client.get(event.token);
client.quit();
const parsedData = JSON.parse(data);
if (parsedDate && new Date().getTime() / 1000 - parsedData.timestamp < 30) {
callback(null, {
address: event.token,
price: parsedData.price,
timestamp: parsedData.timestamp
});
} else {
const dataFromApi = await getPrice(event);
callback(null, dataFromApi);
}
};
There another usage with lambdas that return an object instead of pass a object inside callback, but I think you get the idea and understood your mistakes.
Follow the docs about correctly usage of lambda:
https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/using-lambda-functions.html
To undestand more about async and sync in javascript:
https://www.freecodecamp.org/news/synchronous-vs-asynchronous-in-javascript/
JSON.parse x eval: JSON.parse vs. eval()

Getting response before firebase transaction done

I'm trying to retrieve all the child then when there's match display.
I print the value in the console and my code work well there after few second, but when I print it in the agent as a message it show not available before the response because it does not wait.
Here is my code:
function retrieveContact(agent) {
var query = admin.database().ref("/contacts").orderByKey();
query.once("value")
.then(function(snapshot) {
snapshot.forEach(function(childSnapshot) {
var key = childSnapshot.key;
var childName = childSnapshot.child('name').val();
if (agent.parameters.name == childName) {
console.log('find ' + childName);
agent.add('The email address for ' + childName + ' is ' + childSnapshot.child('email').val());
}
// console.log('testMode'+childName);
}); //// .then
}); //// .once }
SO, how can I wait my response then let the agent show the result?
How can I include the promise concept in my code ?
You don't show your entire Handler function, but if you're doing async operations (such as reading from the firebase db) you must return the Promise. This is how the Handler Dispatcher knows to wait for the Promise to complete before returning a response to the user.
In your case, it is probably as simple as
return query.once("value")
// etc

Firebase functions returns RangeError at Function.MapValues

I've run into an issue with a firebase function, written in TypeScript for the Node.js environment. I have a function with https-endpoint where the client can send data that needs to be stored in the database. In order to know which objects already has been added to the database, it first reads a path ("lookup") that has a simplified registry of the object (lookup/:objectId/true). Then it makes the values that should be updated at the actual object path and updates these in the database.
The function is as follows:
export const scrapeAssignments = functions.https.onCall((data, context) => {
const htmlString = data.htmlString
// const htmlString = fs.readFileSync(testPath.join(__dirname, "./assignmentListExample.html"), { encoding: 'utf8' })
if (!(typeof htmlString === 'string') || htmlString.length === 0) {
throw new functions.https.HttpsError('invalid-argument', 'The function must be called with one argument "htmlString"');
}
const userId = getUserIdFromCallableContext(context)
console.log("userId", userId)
let newAssignments: ScrapedAssignment[] = []
try {
newAssignments = parseAssignment(htmlString)
} catch (e) {
const error = <Error>e
throw new functions.https.HttpsError('not-found', 'parsing error: ' + error.message)
}
return admin.database().ref("lookup").child(userId).child("assignments")
.once("value", lookupSnapshot => {
const oldAssignmentsLookup = lookupSnapshot.val() || {}
const newAssignmentsLookup = makeLookup(newAssignments)
// 1. Create update values for scraped assignment data
let scrapedAssignmentUpdateValues = newAssignments.reduce((prev, current) => {
const prefixed = prefixObject(current.id + "/", current)
return { ...prev, ...prefixed }
}, {})
// 2. Use the diff from the two lookups to find old assignments to delete
const removeAssignmentsValues = {}
Object.keys(oldAssignmentsLookup).forEach(assignmentId => {
if (isUndefined(newAssignmentsLookup[assignmentId]))
removeAssignmentsValues[assignmentId] = null
})
// 3. Add other user values to newly found assignments
Object.keys(newAssignmentsLookup).forEach(assignmentId => {
if (isUndefined(oldAssignmentsLookup[assignmentId])) {
const doneKey = assignmentId + "/done"
scrapedAssignmentUpdateValues[doneKey] = false
}
})
const combinedValues = { ...scrapedAssignmentUpdateValues, ...removeAssignmentsValues }
return admin.database().ref("userAssignments").child(userId).update(combinedValues)
}).catch(reason => {
throw new functions.https.HttpsError('internal', 'Database reason: ' + reason)
})
})
I see that the data is written to the right place and everything seems to go as expected, except that when I call the function from an iOS app, It returns an "Internal"-error
When I check the function logs in the cloud console, I see the following error:
assignment-scrapeAssignments uolk47opctna Unhandled error RangeError:
Maximum call stack size exceeded at Function.mapValues
(/user_code/node_modules/firebase-functions/node_modules/lodash/lodash.js:13395:23)
at encode
(/user_code/node_modules/firebase-functions/lib/providers/https.js:204:18)
at
/user_code/node_modules/firebase-functions/node_modules/lodash/lodash.js:13400:38
at
/user_code/node_modules/firebase-functions/node_modules/lodash/lodash.js:4925:15
at baseForOwn
(/user_code/node_modules/firebase-functions/node_modules/lodash/lodash.js:3010:24)
at Function.mapValues
(/user_code/node_modules/firebase-functions/node_modules/lodash/lodash.js:13399:7)
at encode
(/user_code/node_modules/firebase-functions/lib/providers/https.js:204:18)
at
/user_code/node_modules/firebase-functions/node_modules/lodash/lodash.js:13400:38
at
/user_code/node_modules/firebase-functions/node_modules/lodash/lodash.js:4925:15
at baseForOwn
(/user_code/node_modules/firebase-functions/node_modules/lodash/lodash.js:3010:24)
at Function.mapValues
(/user_code/node_modules/firebase-functions/node_modules/lodash/lodash.js:13399:7)
All I can read out of this, is that it's an "RangeError: Maximum call stack size exceeded"-error, and something happens at Function.mapValues. As I can read from this SO question it seems to be an issue with a reading and writing to the same location at the same time. But I'm quite sure that I'm not doing that here.. Plus everything seems behave like it should, except for the actual error.
When combinedValues is updated, it is an object with ~300 key/value pairs, is that a problem?
Looks like your function run out of memory, every cloud function has allocated memory to its execution.
You can try to increase the stack size from its default value (256MB) up to 2GB by going to:
Functions-> Dashboard, then go to your problematic function and click on the right menu "Detailed usage stats":
then on your function details in google cloud dashboard click edit:
then increase the value of "memory allocated" to 2GB (or lower sufficient value):
note: you should keep in mind that when your data growth you may exceed the highest limit so consider this when querying in the cloud functions

Node - How to handle uncaught exceptions with callbacks in node?

I am writing a node js application. I am using request and cheerio to load a set of URLs and get a bunch of information for the site, now let's assume all I am trying to get is the title:
var urls = {"url_1", "url_2", "url_3",...,"url_n"};
for(var i=0; i<urls.length; i++)
{
getDOMTitle(urls[i],function(error,title){
if(error)
console.log("Error while getting title for " + urls[i]);
else
console.log("The title for " + urls[i] + " is " + title);
});
}
This is how my getDOMTitle method looks:
function getDOMTitle(urlReq,callback)
{
var request = require('request');
var cheerio = require('cheerio');
request({url:urlReq},function(error, response, doc){
var $ = cheerio.load(doc);
if(error)
{
callback(true,null);
}
else
{
$('title', 'head').each(function (i, elem) {
var title = $(this).text();
callback(false,title);
});
}
}
}
In the case where the module throws an uncaught exception, how do I handle that situation?
I have tried adding the following:
process.on('uncaughtException', function (err) {
console.error(err);
console.log("Node NOT Exiting...");
callback(true,null);
});
When I do that, I get an error saying I cannot set the headers once they have been sent. If I remove the callback from the process error handling, I do not see that error but the client spins for a long time because I assume we are never calling the callback.
How can I solve this?
Also, I have read somewhere that you can catch uncaught exceptions at the application level so you don't have to replicate the code to catch it in every method, is that possible? and if it is and the method that threw the exception is expected to callback with some information, how can that be achieved?
Thank you,
To answer your stated question, using an uncaught exception handler as a general error-trapping mechanism is commonly regarded as poor design. It's a false economy to use it to handle anything other than non-recoverable situations where you just need to do some cleanup before exiting.
You've got some problems in your example code. In your for loop, all the callbacks are going to report that they were working with the very last URL in your array because they're all referring to the same copy of i, which will be at its highest value by the time any of them execute. You need to use a helper function or an immediate function invocation to give each callback a private copy of i.
In getDOMTitle the error callback should be callback(error) and the code in your loop should include the returned value in the error message. The success callback should use null as its first parameter, though this is just a matter of convention.

node.js redis async query

Hope someone can assist with a (simple) async question on node-redis. I'm trying to load a set from a hash in the redis db and then use that populated set further on. Here's the code snippet :-
var redis_client = redis.createClient(REDIS_PORT, REDIS_URL);
redis_client.hgetall(target_hash,function(e,o){
Object.keys(o).forEach(function(target){
// get the "name" from the hash
redis_client.hget(o[target],"name",function(e,o){
if (e){
console.log("Error occurred getting key: " + e);
}
else {
redis_client.sadd("newset",o);
}
});
});
// the following line prints nothing - why ??
redis_client.smembers("newset",redis.print);
When I examine the contents of "newset" in redis it is populated as expected, but at runtime it displayed as empty. I'm sure it's an async issue - any help much appreciated !
hgetall is an asynchronous call: when it receives a reply from the redis server, it will eventually call your callback function (target) { ... }. But within your script, it actually returns immediately. Since hgetall returns very fast, Node will immediately run the next statement, smembers. But at this point the sadd statements haven’t run yet (even if your system is very fast because there hasn’t been a context switch yet).
What you need to do is to make sure smembers isn’t called before all the possible sadd calls have executed. redis_client provides the multi function to allow you to queue up all the sadd calls and run a callback when they’re all done. I haven’t tested this code, but you could try this:
var redis_client = redis.createClient(REDIS_PORT, REDIS_URL);
redis_client.hgetall(target_hash, function(e, o) {
var multi = redis_client.multi();
var keys = Object.keys(o);
var i = 0;
keys.forEach(function (target) {
// get the "name" from the hash
redis_client.hget(o[target], "name", function(e, o) {
i++;
if (e) {
console.log("Error occurred getting key: " + e);
} else {
multi.sadd("newset", o);
}
if (i == keys.length) {
multi.exec(function (err, replies) {
console.log("MULTI got " + replies.length + "replies");
redis_client.smembers("newset", redis.print);
});
}
});
});
});
Some libraries have a built-in equivalent of forEach that allows you to specify a function to be called when the loop is all done. If not, you have to manually keep track of how many callbacks there have been and call smembers after the last one.
You shouldn't use multi unless you need actually need a transaction.
just keep a counter of the transactions and call smembers in the final callback
var redis_client = redis.createClient(REDIS_PORT, REDIS_URL);
var keys = Object.keys(o);
var i = 0;
redis_client.hgetall(target_hash,function(e,o){
Object.keys(o).forEach(function(target){
// get the "name" from the hash
redis_client.hget(o[target],"name",function(e,o){
i++
if (e){
console.log("Error occurred getting key: " + e);
}
else {
redis_client.sadd("newset",o);
if (i == keys.length) {
redis_client.smembers("newset", redis.print);
}
}});

Resources