ioredis in nodejs does nothing when 'get' from AWS ElastiCache Redis - node.js

I am running the following code in a nodejs container hosted on ECS. This runs great locally using redis. In AWS, it appears to connect (if I use an invalid address it errors on connection so I'm assuming it's connected). When I run redis.get( nothing happens. I've enabled debugging for ioredis and I get 1 message when I attempt the get:
2020-04-17T22:56:10.701Z ioredis:redis status[replica.virtual-happy-hour-redis.fnt3zc.usw2.cache.amazonaws.com:6379]: [empty] -> connecting
2020-04-17T22:56:11.042Z ioredis:redis status[10.200.0.37:6379]: connecting -> connect
2020-04-17T22:56:11.045Z ioredis:redis write command[10.200.0.37:6379]: 0 -> info([])
2020-04-17T23:02:02.627Z ioredis:redis queue command[10.200.0.37:6379]: 0 -> get([ 'friday' ])
# suspense is killing me....
Here's the code
var Redis = require('ioredis'),
console.log('cache connecting to', CONFIG.CACHE_URL);
var redis = new Redis(CONFIG.CACHE_URL);
console.log('cache connected');
const getRoom = (roomName, callback) => {
let room;
console.log('getRoom', roomName); // this logs as expected, nothing after this does
try {
redis.get(roomName, (err, result) => {
if (err) {
console.log('get cache error', err);
} else {
if (result) {
console.log('cache result', result);
room = JSON.parse(result);
} else {
console.log('no cache', roomName);
room = defaultRoom(roomName);
redis.set(roomName, JSON.stringify(room));
}
}
if (callback) callback(room);
console.log('getRoom done');
});
} catch (ex) {
console.log('getRoom error', ex.toString());
}
};
I've confirmed security groups, ElastiCache is in the same VPC as my ECS container. What can I do to troubleshoot this?
UPDATE
I swapped out ioredis with redis and it still happens, nada...

Fixed it! I wasn't aware I (newb to AWS) configured ElastiCache for Encryption in Transit. Once I set the auth token and used that with ioredis it works! I'm back in business!

Related

Add data to Redis on a distant server

Beginner to redis here.
I'm trying to CRUD data from a distant redis server but I don't seem to make it work :
First we connect to the server :
const client = redis.createClient("the PORT", "the host", {auth_pass: "the password"});
(async () => {
try {
await client.connect();
console.log('Connected to redis');
} catch (err) {
console.error(err)
}
})()
The connection seems to work, as I receive a
Connected to redis in my terminal
But when I try to add data, it doesn't seem to work
app.get('/redis', (req, res) => {
// set name to antoine on redis
client.set('name', 'antoine', redis.print);
// get name from redis
client.get('name', (err, name) => {
res.send(name);
}
);
});
The endpoint just load indefinitely as if it doesn't do the
client.get('name', (err, name) => {
res.send(name);
}
);
And in my redis GUI, it shows No keys to display., so it didn't create the key either.
What am I missing here ?
EDIT : I just realized that instead of connecting to the distant server, I was connecting to my local one somehow.
Then, it seems that it doesn't connect to the distant.
EDIT2 : I forgot the async connect function, this is why it didn't work !

Snowflake Node Lambda Connection Hangs

I'm using the snowflake node driver to connect to a DB. When running the connector from a local server I have no issues. However, when I try the same function running in lambda I can't seem to connect. There are no errors, exceptions, or timeouts... just nothing. Here is the code I'm using per their documentation.
var snowflake = require("snowflake-sdk");
var connection = snowflake.createConnection({
account: "****",
username: "******",
password: "******",
});
connect(connection);
const response = {
statusCode: 200,
body: JSON.stringify("Hello from Lambda!"),
};
return response;
function connect(connection) {
console.log("in connection");
let connection_ID;
try {
connection.connect(function (err, conn) {
if (err) {
console.error("Unable to connect: " + err);
} else {
console.log("Successfully connected to Snowflake");
// Optional: store the connection ID.
connection_ID = conn.getId();
}
console.log(connection_ID);
});
} catch (err) {
console.log(err);
}
}
For clarity, my lambda has no issues connecting to other API's, and is not running behind a VPC.
Any help would be greatly appreciated.
If you have not selected any VPC for your lambda function, it will use the default VPC of the region.
Can you select a VPC, which has access to the snowflake public endpoints and check.
If still an issue, please post the Cloud watch logs, it should give a clue.
You can also check on snowflake History page, if you get any Client-side connection request from the lambda or not.

AWS Lambda: Redis ElastiCache connection timeout error

I have a lambda function using Node 12.
I need to add a new connection to a Redis database hosted in AWS ElastiCache.
Both are in one private VPC and the security groups/subnets are configured properly.
Solution:
globals.js:
const redis = require('redis');
const redisClient = redis.createClient(
`redis://${process.env.REDIS_HOST}:${process.env.REDIS_PORT}/${process.env.REDIS_DB}`,
);
redisClient.on('error', (err) => {
console.log('REDIS CLIENT ERROR:' + err);
});
module.exports.globals = {
REDIS: require('../helpers/redis')(redisClient),
};
index.js (outside handler):
const { globals } = require('./config/globals');
global.app = globals;
const lambda_handler = (event, context, callback) => { ... }
exports.handler = lambda_handler;
helpers/redis/index.js:
const get = require('./get');
module.exports = (redisClient) => {
return {
get: get(redisClient)
};
};
helpers/redis/get.js:
module.exports = (redisClient) => {
return (key, cb) => {
redisClient.get(key, (err, reply) => {
if (err) {
cb(err);
} else {
cb(null, reply);
}
});
};
};
Function call:
app.REDIS.get(redisKey, (err, reply) => {
console.log(`REDIS GET: ${err} ${reply}`);
});
Problem:
When increasing lambda timeout to a value greater than Redis timeout, I get this error:
REDIS CLIENT ERROR:Error: Redis connection to ... failed - connect ETIMEDOUT ...
Addition:
I tried quiting/closing the connection after each transaction:
module.exports = (redisClient) => {
return (cb) => {
redisClient.quit((err, reply) => {
if (err) {
cb(err);
} else {
cb(null, reply);
}
});
};
};
app.REDIS.get(redisKey, (err, reply) => {
console.log(`REDIS GET: ${err} ${reply}`);
if (err) {
cb(err);
} else {
if (reply) {
app.REDIS.quit(() => {
cb()
});
}
}
})
Error:
REDIS GET: AbortError: GET can't be processed. The connection is already closed.
Extra Notes:
I have to use callbacks, this is why I pass ones in the above examples
I'm using "redis": "^3.0.2"
It's not a configuration issue as the cache was accessed hundred of times in a small period of time but it then started giving the timeout errors.
Everything works normally locally
It's not a configuration issue as the cache was accessed hundred of times in a small period of time but it then started giving the timeout errors.
i think it is origin of issue, probably redis database size hit the size limit, and it cannot process new data?
Can you delete old data in it?
Also it is possible Elastic Cache has limits on new TCP clients' connections, and if its depleted, new connections are refused with similar error message you mentioned.
If redis client in aws lambda function cannot establish connection, aws lambda function fails - and new one is started. New lambda function makes one more connection to redis, redis cannot process it, and one more lambda function is started...
So, at one moment, we hit the limit on active redis connections, and system is in deadlock.
I think you can temporary stop all lambda functions, and scale up Elastic Cache redis database.

How to avoid a broken connection with ORACLEDB? Nodejs

I have this database connection. Inside the function where the comment is located, there is a data update cycle for rest api. The data is updated, but when the data in the Oracle database is updated, the connection may fail and after that all subsequent updated data will get undefined. How can you properly connect to the database so that there are no failures?
oracledb.getConnection(
{
user: db.user,
password: db.password,
connectString: db.connectString
},
connExecute
);
function connExecute(err, connection) {
if (err) {
console.error(err.message);
return;
}
sql = `SELECT * FROM db.test`;
connection.execute(sql, {}, { outFormat: oracledb.OBJECT },
function (err, db) {
if (err) {
console.error(err.message);
connRelease(connection);
return;
}
// data update loop
connRelease(connection);
});
}
function connRelease(connection) {
connection.close(
function (err) {
if (err) {
console.error(err.message);
}
});
}
You should be using a connection pool. Connection pools have built-in logic to detect connections with issues and create new connections transparently. See this series on creating a REST API for more details: https://jsao.io/2018/03/creating-a-rest-api-with-node-js-and-oracle-database/
Keep in mind that issues can still happen, so you have to handle errors as needed for your application.
Mostly you add listener on connection object and on dissociation or failure again create connection. With minor changes you can adopt this approach and use listeners to check if connection is available if not connect again. There could be several reason that results in connection closing better handle exceptions, check if still connected and reconnect in case of error.
Or you can try this NPM this will do reconnection for you
https://www.npmjs.com/package/oracledb-autoreconnect
Ping me if you need calcification.
var dbConfig = {
host: '----',
user: '----',
password: '----',
database: '----',
port: ----
};
var connection;
function handleDisconnect() {
connection = <obj>.getConnection(dbConfig);
// Recreate the connection, since the old one cannot be reused.
connection.connect( function onConnect(err) {
// The server is either down
if (err) {
// or restarting (takes a while sometimes).
console.log('error when connecting to db:', err);
setTimeout(handleDisconnect, 10000);
// We introduce a delay before attempting to reconnect,
}
// to avoid a hot loop, and to allow our node script to
});
// process asynchronous requests in the meantime.
// If you're also serving http, display a 503 error.
connection.on('error', function onError(err) {
console.log('db error', err);
if (err.code == 'PROTOCOL_CONNECTION_LOST') {
handleDisconnect();
// lost due to either server restart, or a
} else {
// connnection idle timeout (the wait_timeout
throw err;
// server variable configures this)
}
});
}
handleDisconnect();

How do I ignore redis if it is not available?

I want my application (lets say a simple node file for now) to work as it is even if redis is not available. I'm not able to do it the correct way. This is what I've tried.
var redis = require('redis');
var redisClient = null;
var getRedisClient = function(){
if(redisClient){
return redisClient;
}
try {
redisClient = redis.createClient({connect_timeout : 5000, max_attempts : 1});
redisClient.on("error", function(err) {
console.error("Error connecting to redis", err);
redisClient = null;
});
return redisClient;
} catch(ex){
console.log("error initialising redis client " + ex);
return null;
}
};
try {
var client = getRedisClient();
console.log("done!");
} catch (ex){
console.log("Exception");
}
However, with this code my application exits if redis is not available (it shouldn't because i've not given a process.exit() command).
How can I solve this?
Checking for Successful Connection on Start
Using a promise, you could guarantee that at least initially, you were able to connect to redis without error within a specified time period:
const redis = require('redis');
const Promise = require('bluebird');
function getRedisClient(timeoutMs){
return new Promise((resolve, reject) => {
const redisClient = redis.createClient();
const timer = setTimeout(() => reject('timeout'), timeoutMs);
redisClient.on("ready", () => {
clearTimeout(timer);
resolve(redisClient);
});
redisClient.on("error", (err) => {
clearTimeout(timer);
reject(err);
});
});
};
const redisReadyTimeoutMs = 10000;
getRedisClient(redisReadyTimeoutMs)
.then(redisClient => {
// the client has connected to redis sucessfully
return doSomethingUseful();
}, error => {
console.log("Unable to connect to redis", error);
});
You Need Proper Error Handling
The redis client being non-null does NOT guarantee using it won't throw an error.
you could experience infrastructure misfortune e.g. crashed redis process, out of memory or network being down.
a bug in your code could cause an error e.g. invalid or missing arguments to a redis command.
You should be handling redis client errors as a matter of course.
DON'T null the Redis Client on Error
It won't give you much but it will force you to check for null every time you try and use it.
The redis client also has inbuilt reconnect and retry mechanisms that you'll miss out on if you null it after the first error. See the redis package docs, look for retry_strategy.
DO Wrap your redis client code with try .. catch ... or use .catch in your promise chain.
DO Make use of a retry_strategy.

Resources