Redis-node library discard transactions - node.js

Actually i'm using the excelent library redis-node.
But, I don't know how to make the following case to work:
I have a quantity field passed by param, then, inside a for loop, i need to use LPOP to extract the items from a first list, and RPUSH into another list, and the quantities must be the same, all inside a transaction block.
So, if the quantity param and the items extracted are different, i wan't to DISCARD the transaction, is this possible?
I put a piece of code that i've at the moment.
var redisNode = require('redis-node');
var client = redisNode.createClient();
var qty = req.params.qty;
// init transaction, lpop first and rpush finally
client.transaction(function () {
for (var i=0; i <= qty; i++) {
client.lpop(initStockListKey, function (err, item) {
if (err) throw err;
console.log('[assign] CREATE TRANSACTION list %s, pop %s', initStockListKey, item);
inProgressItems.push(item);
});
}
// pop qty validation, DISCARD
if (inProgressItems.length != qty) {
client.discard(function (err, status) {
console.log('[assign] DISCARD TRANSACTION err %s status %s', err, status);
})
}
});
I have the following error running this code:
ERR EXEC without MULTI

You're not using MULTI in the right way. Check the following node_redis documentation sample found in the library's GitHub repository:
var redis = require("redis"),
client = redis.createClient(), multi;
// start a separate multi command queue
multi = client.multi();
multi.incr("incr thing", redis.print);
multi.incr("incr other thing", redis.print);
// runs immediately
client.mset("incr thing", 100, "incr other thing", 1, redis.print);
// drains multi queue and runs atomically
multi.exec(function (err, replies) {
console.log(replies); // 101, 2
});
// you can re-run the same transaction if you like
multi.exec(function (err, replies) {
console.log(replies); // 102, 3
client.quit();
});
In Redis, EXEC marks the end of an atomic operation started by MULTI.

Related

Any way to reduce the amount of concurrent requests to fetch data and cache in nodejs?

I have an Express app which requires very low response rate ~<200ms. Right now we can only get this number but that's a separate topic.
We're planning to fetch a piece of data from the database, if found in Redis return the data if not then fire the request and save that to redis so the next requests can get it from Redis.
I'm running some testing and was wondering if there's a way to reduce the amount of database fetching requests?
For example, currently our application has 300req/s per box. We have six boxes running on AWS. If for the first time that piece of data is not available in Redis, there might be around ~500 requests trying to fetch the data from DB and cache that in Redis. We're trying to reduce that number down. Not sure if there's a way in Node.js or Redis to handle that.
Here's the code that I'm testing.
client.getAsync('key').then(function (data) {
if(data) {
console.log(data); // Return this data if found
res.send(data);
} else {
// I'm trying to reduce the number of calls for concurrent requests in this block.
console.log('not found');
var dataFromDb = // fetch data from DB
client.set('key', dataFromDb); // Fire and forget
res.send('not found'); // Return not found right away
}
});
And I test the call by using ab
ab -n 20 -c 10 http://localhost:8081/redis
This is the results I got
not found
not found
not found
not found
not found
not found
something
not found
something
something
something
something
something
something
something
something
something
something
In this example, there's 7 requests trying to fetch database with the same data and save to Redis.
My question is, is there anyway I can reduce the number of requests down? Because fetching DB is quite slow as of now ~900ms (We're trying to optimize that)
Yes there is. I did same thing. I will describe only logic here. Method to fetchCache should return a promise. Also you keep array of { cacheKey, promise }. Each time you send a request - you add key to this array. When next time you need to fetch cache - you check array first and if key there grabbing this promise. Else calling fetchCache.
Here is my code. It works, but probably hard to read. Should give you a basic understanding.
class DictTranslatableRepo {
constructor(model) {
var self = this;
self.title = model + "s Repo";
self.model = models[model];
self.running = {};
self.curItems = {};
}
*start() {
var self = this;
var curItems = yield self.model.findAll();
_.forEach(curItems, function(row) {
self.curItems[row.key] = row.value;
});
};
*map(from) {
var self = this;
if (from == "") return "";
if (!_.isUndefined(self.curItems[from])) return self.curItems[from];
if (_.isUndefined(self.running[from])) {
self.running[from] = [];
return new Promise(function(resolve, reject) {
self.running[from].push(resolve);
self.job(from, function(err, to) { // Main job
var callbackArr = self.running[from];
delete self.running[from];
_.forEach(callbackArr, function(callback) {
callback(to);
});
});
});
} else {
return new Promise(function(resolve, reject) {
self.running[from].push(resolve);
});
}
};
job(from, callback) {
var self = this;
var to = "as shown";
co(function*() {
try {
to = yield translator.translate(from);
yield self.model.add({key: from, value: to});
self.curItems[from] = to;
callback(null, to);
} catch (err) {
callback(err);
//logger.error("Cant translate entity: " + from);
}
}).catch(function(err) {
// Unhandled Error
callback(new Error(err));
});
};
}
My map method is your fetchCache method.

After inserting data into Cassandra with node.js, the program seems like it is still waiting

I am really new to Cassandra and node.js now. I am trying to insert 200 data and it seems they are inserted into DB.
var client = new cassandra.Client({contactPoints: ['127.0.0.1'], keyspace: 'big'});
var insertRepo = 'INSERT INTO big.repo_info (id, name, full_name) '
+ 'VALUES(?, ?, ?);';
for (var i = 0, len = items.length; i < len; i++) {
console.log(items[i].id, items[i].name, items[i].full_name);
client.execute(insertRepo,
[items[i].id, items[i].name, items[i].full_name],
{ prepare : true },
function (err) {
if (err) {
console.log(err);
} else {
console.log('done')
}
});
};
However, after this transaction, it seems that the program is waiting for something without really done. So, I just press ctrl-c to exit the program. Is it normal or do I need to do something for this?
I think I missed something.
Node.js event loop will stay active until open connections are closed.
In this case, it means you need to call client.shutdown().
Also, consider that client.execute() is an async operation, so invoking it from a sync loop it is generally not a good idea. You can use a workflow library instead, like the async library.
async.timesSeries(items.length, function (n, next) {
var params = [ items[i].id, items[i].name, items[i].full_name];
client.execute(insertQuery, params, { prepare: true }, next);
}, function allFinished(err) {
client.shutdown();
});
Check async.timesSeries() method for more information.

Nodejs step through array and finish each step before moving on

I'm having troubles processing a queue that I've got stored in Redis.
Basically the queue in Redis is a simple array of IDs that I want to step through one by one.
My current code:
async.forEach(data, function(n, done) {
redisClient.hgetall("visitor:" + n, function(err, visitor) {
if (visitor != null) {
agentOnlineCheck(visitor['userID'], function(online) {
if (online == true) {
console.log("We are done with this item, move on to the next");
} else {
console.log("We are done with this item, move on to the next");
}
});
} else {
console.log("We are done with this item, move on to the next");
}
});
}, function() {
console.log("I want this to fire when all items in data are finished");
});
I use the async library and above the var data represents an array such as:
['232323', '232423', '23232']
I want to loop through the array but one ID at a time. And not move on to the next ID until the previous one has run through all the callbacks.
Is this somehow possible?
You can use async.eachSeries instead of async.forEach.
c.f.: https://github.com/caolan/async#eachSeries

Access and modify extern variable in MongoDB request

I have a problem in a nodeJS app with mongoDB, i'm trying to do a forum and for each topic i want a button to display every sub topics.
So i need to get everything in the request:
One array with main topics
Another map array with ['topic1'] containing sub topics
Without the mapping (not an actual problem) i have this:
Post.find({'path': path})
.exec(function (err, posts){
if(err)
console.log("Get post list:" + err);
else
{
var sub_posts = new Array; // Second array with sub topics
for (var i = 0; posts[i]; i++) //Here is the loop for each topic
{
var tmp_path = ... // Path and next request works
Post.find({'path': tmp_path}) // Here is the second request
.exec(function(err, bis_posts) {
if (err) console.log('Error loading subforum');
else sub_posts.push(bis_posts); // Affectation fail !!!
})
}
res.render(... 'post_list': posts, 'sub_posts': sub_posts); // Send result
}
})}
So i get it's a scope problem and i should use callback but with the loop i can't resolve this problem.
Sorry for my english and thanks for your answers !
I have no idea what you mean by "affectation fail", but it looks like you're calling res.render too early — callbacks are invoked asynchronously after your current context finishes executing, so when you call res.render(...) after your for loop has finished, your Post.find(...)... operations still haven't finished and their callbacks haven't been invoked, so sub_posts will be still empty.
My node.js and Mongo are rusty, so perhaps this isn't the canonical way to do it, but I'd add a counter to track the state of the pending requests and only call res.render when all subposts have been fetched:
var sub_posts = new Array;
var pending = 0;
for (var i = 0; posts[i]; i++)
{
var tmp_path = ...
Post.find({'path': tmp_path})
.exec(function(err, bis_posts) {
if (err) console.log('Error loading subforum');
else sub_posts.push(bis_posts);
pending -= 1;
if (!pending) {
// all pending subpost lookups finished, render the response:
res.render(... 'post_list': posts, 'sub_posts': sub_posts);
}
});
pending += 1;
}

node.js redis async query

Hope someone can assist with a (simple) async question on node-redis. I'm trying to load a set from a hash in the redis db and then use that populated set further on. Here's the code snippet :-
var redis_client = redis.createClient(REDIS_PORT, REDIS_URL);
redis_client.hgetall(target_hash,function(e,o){
Object.keys(o).forEach(function(target){
// get the "name" from the hash
redis_client.hget(o[target],"name",function(e,o){
if (e){
console.log("Error occurred getting key: " + e);
}
else {
redis_client.sadd("newset",o);
}
});
});
// the following line prints nothing - why ??
redis_client.smembers("newset",redis.print);
When I examine the contents of "newset" in redis it is populated as expected, but at runtime it displayed as empty. I'm sure it's an async issue - any help much appreciated !
hgetall is an asynchronous call: when it receives a reply from the redis server, it will eventually call your callback function (target) { ... }. But within your script, it actually returns immediately. Since hgetall returns very fast, Node will immediately run the next statement, smembers. But at this point the sadd statements haven’t run yet (even if your system is very fast because there hasn’t been a context switch yet).
What you need to do is to make sure smembers isn’t called before all the possible sadd calls have executed. redis_client provides the multi function to allow you to queue up all the sadd calls and run a callback when they’re all done. I haven’t tested this code, but you could try this:
var redis_client = redis.createClient(REDIS_PORT, REDIS_URL);
redis_client.hgetall(target_hash, function(e, o) {
var multi = redis_client.multi();
var keys = Object.keys(o);
var i = 0;
keys.forEach(function (target) {
// get the "name" from the hash
redis_client.hget(o[target], "name", function(e, o) {
i++;
if (e) {
console.log("Error occurred getting key: " + e);
} else {
multi.sadd("newset", o);
}
if (i == keys.length) {
multi.exec(function (err, replies) {
console.log("MULTI got " + replies.length + "replies");
redis_client.smembers("newset", redis.print);
});
}
});
});
});
Some libraries have a built-in equivalent of forEach that allows you to specify a function to be called when the loop is all done. If not, you have to manually keep track of how many callbacks there have been and call smembers after the last one.
You shouldn't use multi unless you need actually need a transaction.
just keep a counter of the transactions and call smembers in the final callback
var redis_client = redis.createClient(REDIS_PORT, REDIS_URL);
var keys = Object.keys(o);
var i = 0;
redis_client.hgetall(target_hash,function(e,o){
Object.keys(o).forEach(function(target){
// get the "name" from the hash
redis_client.hget(o[target],"name",function(e,o){
i++
if (e){
console.log("Error occurred getting key: " + e);
}
else {
redis_client.sadd("newset",o);
if (i == keys.length) {
redis_client.smembers("newset", redis.print);
}
}});

Resources