Using Redis SCAN in NODE - node.js

I have Redis with a lot of keys in some format and I want to get keys that match some pattern and do some operations on them. I don't use KEYS method since it's not recommend in production. Using SCAN I'm wondering what is the best way to write it in code. I have to do something like a while loop but using promises, my current solution looks like this (code is simplified a little):
'use strict'
const Promise = require('bluebird');
const config = require('./config');
const client = require('./clinet');
let iterator = 0;
Promise.coroutine(function* () {
do {
iterator = yield clinet.scanAsync(iterator, 'myQuery', 'COUNT', config.scanChunkSize)
.then(data => {
let nextIterator = data[0];
let values = data[1];
//do some magic with values
return nextIterator;
})
} while (iterator !== '0');
})();
Is there a better way to do it that I'm missing?

I realize this is a really old question, but I found all of the other answers very unsatisfying. Here is yet another attempt to scan in a relatively clean way using async await (WITHOUT the use of yet another external dependency). You can easily modify this to continuously delete each set of found keys (you would want to tackle them in batches like this in case there are LOTS). Pushing them into an array just demonstrates one very basic thing you could do with them during this stage.
const redis = require('redis');
const { promisify } = require('util');
const client = redis.createClient({...opts});
const scan = promisify(client.scan).bind(client);
const scanAll = async (pattern) => {
const found = [];
let cursor = '0';
do {
const reply = await scan(cursor, 'MATCH', pattern);
cursor = reply[0];
found.push(...reply[1]);
} while (cursor !== '0');
return found;
}

You can use recursion to keep calling scan until done.
function scanAsync(cursor, pattern, returnSet){
return redisClient.scanAsync(cursor, "MATCH", pattern, "COUNT", "100").then(
function (reply) {
cursor = reply[0];
var keys = reply[1];
keys.forEach(function(key,i){
returnSet.add(key);
});
if( cursor === '0' ){
return Array.from(returnSet);
}else{
return scanAsync(cursor, pattern, returnSet)
}
});
}
Pass in a Set() to make sure keys aren't duplicated
myResults = new Set();
scanAsync('0', "NOC-*[^listen]*", myResults).map(
function( myResults ){ console.log( myResults); }
);

You can try this snippet to scan (1000) keys per iteration and 'delete`.
var cursor = '0';
function scan(pattern,callback){
redisClient.scan(cursor, 'MATCH',pattern,'COUNT', '1000', function(err, reply){
if(err){
throw err;
}
cursor = reply[0];
if(cursor === '0'){
return callback();
}else{
var keys = reply[1];
keys.forEach(function(key,i){
redisClient.del(key, function(deleteErr, deleteSuccess){
console.log(key);
});
});
return scan(pattern,callback);
}
});
}
scan(strkey,function(){
console.log('Scan Complete');
});

Nice option for node-redis module is to use scan iterators. Example:
const redis = require("redis");
const client = redis.createClient();
async function getKeys(pattern="*", count=10) {
const results = [];
const iteratorParams = {
MATCH: pattern,
COUNT: count
}
for await (const key of client.scanIterator(iteratorParams)) {
results.push(key);
}
return results;
}
(Of course you can also process your keys on the fly in for await loop without storing them in additional array if that's enough for you).
If you do not want to override scan parameters (MATCH/COUNT) you can just skip them and execute client.scanIterator() without parameter (defaults will be used then, MATCH="*", COUNT=10).

I think the node bindings for Redis are pushing too much responsibility to the caller here. So I created my own library for scanning as well, using generators in node:
const redis = require('redis')
const client = redis.createClient(…)
const generators = require('redis-async-gen')
const { keysMatching } = generators.using(client)
…
for await (const key of keysMatching('test*')) {
console.info(key)
}
It's the last bit that obviously is the thing that you should care about. Instead of having to carefully control an iterator yourself, all you need to do is use a for comprehension.
I wrote more about it here.

Go through this, it may help.
https://github.com/fritzy/node-redisscan
do not use the library as it, go through the code available at
https://github.com/fritzy/node-redisscan/blob/master/index.js

Related

Dynamic Slash Command Options List via Database Query?

Background:
I am building a discord bot that operates as a Dungeons & Dragons DM of sorts. We want to store game data in a database and during the execution of certain commands, query data from said database for use in the game.
All of the connections between our Discord server, our VPS, and the VPS' backend are functional and we are now implementing slash commands since traditional ! commands are being removed from support in April.
We are running into problems making the slash commands though. We want to set them up to be as efficient as possible which means no hard-coded choices for options. We want to build those choice lists via data from the database.
The problem we are running into is that we can't figure out the proper way to implement the fetch to the database within the SlashCommandBuilder.
Here is what we currently have:
const {SlashCommandBuilder} = require('#discordjs/builders');
const fetch = require('node-fetch');
const {REST} = require('#discordjs/rest');
const test = require('../commonFunctions/test.js');
var options = async function getOptions(){
let x = await test.getClasses();
console.log(x);
return ['test','test2'];
}
module.exports = {
data: new SlashCommandBuilder()
.setName('get-test-data')
.setDescription('Return Class and Race data from database')
.addStringOption(option =>{
option.setName('class')
.setDescription('Select a class for your character')
.setRequired(true)
for(let op of options()){
//option.addChoice(op,op);
}
return option
}
),
async execute(interaction){
},
};
This code produces the following error when start the npm for our bot on our server:
options is not a function or its return value is not iterable
I thought that maybe the function wasn't properly defined, so I replaced the contents of it with just a simple array return and the npm started without errors and the values I had passed showed up in the server.
This leads me to think that the function call in the modules.exports block is immediatly attempting to get the return value of the function and as the function is async, it isn't yet ready and is either returning undefined or a promise or something else not iteratable.
Is there a proper way to implement the code as shown? Or is this way too complex for discord.js to handle?
Is there a proper way to implement the idea at all? Like creating a json object that contains the option data which is built and saved to a file at some point prior to this command being registered and then having the code above just pull in that file for the option choices?
Alright, I found a way. Ian Malcom would be proud (LMAO).
Here is what I had to do for those with a similar issues:
I had to basically re-write our entire application. It sucks, I know, but it works so who cares?
When you run your index file for your npm, make sure that you do the following things.
Note: you can structure this however you want, this is just how I set up my js files.
Setup a function that will setup the data you need, it needs to be an async function as does everything downstream from this point on relating to the creation and registration of the slash commands.
Create a js file to act as your application setup "module". "Module" because we're faking a real module by just using the module.exports method. No package.jsons needed.
In the setup file, you will need two requires. The first is a, as of yet, non-existent data manager file; we'll do that next. The second is a require for node:fs.
Create an async function in your setup file called setup and add it to your module.exports like so:
module.exports = { setup }
In your async setup function or in a function that it calls, make a call to the function in your still as of yet non-existent data manager file. Use await so that the application doesn't proceed until something is returned. Here is what mine looks like, note that I am writing my data to a file to read in later because of my use case, you may or may not have to do the same for yours:
async function setup(){
console.log('test');
//build option choice lists
let listsBuilt = await buildChoiceLists();
if (listsBuilt){
return true;
} else {
return false;
}
}
async function buildChoiceLists(){
let classListBuilt = await buildClassList();
return true;
}
async function buildClassList(){
let classData = await classDataManager.getClassData();
console.log(classData);
classList = classData;
await writeFiles();
return true;
}
async function writeFiles(){
fs.writeFileSync('./CommandData/classList.json', JSON.stringify(classList));
}
Before we finish off this file, if you want to store anything as a property in this file and then get it later on, you can do so. In order for the data to return properly though, you will need to define a getter function in your exports. Here is an example:
var classList;
module.exports={
getClassList: () => classList,
setup
};
So, with everything above you should have something that looks like this:
const classDataManager = require('./DataManagers/ClassData.js')
const fs = require('node:fs');
var classList;
async function setup(){
console.log('test');
//build option choice lists
let listsBuilt = await buildChoiceLists();
if (listsBuilt){
return true;
} else {
return false;
}
}
async function buildChoiceLists(){
let classListBuilt = await buildClassList();
return true;
}
async function buildClassList(){
let classData = await classDataManager.getClassData();
console.log(classData);
classList = classData;
await writeFiles();
return true;
}
async function writeFiles(){
fs.writeFileSync('./CommandData/classList.json', JSON.stringify(classList));
}
module.exports={
getClassList: () => classList,
setup
};
Next that pesky non-existent DataManager file. For mine, each data type will have its own, but you might want to just combine them all into a single .js file for yours.
Same with the folder name, I called mine DataManagers, if you're combining them all into one, you could just call the file DataManager and leave it in the same folder as your appSetup.js file.
For the data manager file all we really need is a function to get our data and then return it in the format we want it to be in. I am using node-fetch. If you are using some other module for data requests, write your code as needed.
Instead of explaining everything, here is the contents of my file, not much has to be explained here:
const fetch = require('node-fetch');
async function getClassData(){
return new Promise((resolve) => {
let data = "action=GetTestData";
fetch('http://xxx.xxx.xxx.xx/backend/characterHandler.php', {
method: 'post',
headers: { 'Content-Type':'application/x-www-form-urlencoded'},
body: data
}).then(response => {
response.json().then(res => {
let status = res.status;
let clsData = res.classes;
let rcData = res.races;
if (status == "Success"){
let text = '';
let classes = [];
let races = [];
if (Object.keys(clsData).length > 0){
for (let key of Object.keys(clsData)){
let cls = clsData[key];
classes.push({
"name": key,
"code": key.toLowerCase()
});
}
}
if (Object.keys(rcData).length > 0){
for (let key of Object.keys(rcData)){
let rc = rcData[key];
races.push({
"name": key,
"desc": rc.Desc
});
}
}
resolve(classes);
}
});
});
});
}
module.exports = {
getClassData
};
This file contacts our backend php and requests data from it. It queries the data then returns it. Then we format it into an JSON structure for use later on with option choices for the slash command.
Once all of your appSetup and data manager files are complete, we still need to create the commands and register them with the server. So, in your index file add something similar to the following:
async function getCommands(){
let cmds = await comCreator.appSetup();
console.log(cmds);
client.commands = cmds;
}
getCommands();
This should go at or near the top of your index.js file. Note that comCreator refers to a file we haven't created yet; you can name this require const whatever you wish. That's it for this file.
Now, the "comCreator" file. I named mine deploy-commands.js, but you can name it whatever. Once again, here is the full file contents. I will explain anything that needs to be explained after:
const {Collection} = require('discord.js');
const {REST} = require('#discordjs/rest');
const {Routes} = require('discord-api-types/v9');
const app = require('./appSetup.js');
const fs = require('node:fs');
const config = require('./config.json');
async function appSetup(){
console.log('test2');
let setupDone = await app.setup();
console.log(setupDone);
console.log(app.getClassList());
return new Promise((resolve) => {
const cmds = [];
const cmdFiles = fs.readdirSync('./commands').filter(f => f.endsWith('.js'));
for (let file of cmdFiles){
let cmd = require('./commands/' + file);
console.log(file + ' added to commands!');
cmds.push(cmd.data.toJSON());
}
const rest = new REST({version: '9'}).setToken(config.token);
rest.put(Routes.applicationGuildCommands(config.clientId, config.guildId), {body: cmds})
.then(() => console.log('Successfully registered application commands.'))
.catch(console.error);
let commands = new Collection();
for (let file of cmdFiles){
let cmd = require('./commands/' + file);
commands.set(cmd.data.name, cmd);
}
resolve(commands);
});
}
module.exports = {
appSetup
};
Most of this is boiler plate for slash command creation though I did combine the creation and registering of the commands into the same process. As you can see, we are grabbing our command files, processing them into a collection, registering that collection, and then resolving the promise with that variable.
You might have noticed that property, was used to then set the client commands in the index.js file.
Config just contains your connection details for your discord server app.
Finally, how I accessed the data we wrote for the SlashCommandBuilder:
data: new SlashCommandBuilder()
.setName('get-test-data')
.setDescription('Return Class and Race data from database')
.addStringOption(option =>{
option.setName('class')
.setDescription('Select a class for your character')
.setRequired(true)
let ops = [];
let data = fs.readFileSync('./CommandData/classList.json','utf-8');
ops = JSON.parse(data);
console.log('test data class options: ' + ops);
for(let op of ops){
option.addChoice(op.name,op.code);
}
return option
}
),
Hopefully this helps someone in the future!

How can I store the value of a promise and use it once resolved?

I am currently developing an app which interacts with uniswap, and I have developed a Wrapper class to contain the info and variables I'll need about some pair (e.g DAI/WETH).
As some of this values are asynchronous, I have coded a build() async function to get those before calling the constructor, so I can store them. I want to store the result of this build function, which is an instance of the class I have defined, inside a variable to use it later, but I need to know whether the Promise that that build function returns is resolved before using it, so how can I make it?
Here is the code of the class:
'use strict'
const { ChainId, Fetcher, WETH, Route, Trade, TokenAmoun, TradeType, TokenAmount } = require('#uniswap/sdk')
const { toChecksumAddress } = require('ethereum-checksum-address')
const Web3 = require('web3')
const web3 = new Web3()
const chainId = ChainId.MAINNET;
let tok1;
let tok2;
let pair;
let route;
let trade;
class UniswapTokenPriceFetcher
{
constructor(async_params)
{
async_params.forEach((element) => {
if (element === 'undefined')
{
throw new Error('All parameters must be defined')
}
});
this.trade = async_params[0];
this.route = async_params[1];
this.pair = async_params[2];
this.tok1 = async_params[3];
this.tok2 = async_params[4];
}
static async build(token1, token2)
{
var tok1 = await Fetcher.fetchTokenData(chainId, toChecksumAddress(token1))
var tok2 = await Fetcher.fetchTokenData(chainId, toChecksumAddress(token2))
var pair = await Fetcher.fetchPairData(tok1, tok2)
var route = new Route([pair], tok2)
var trade = new Trade(route, new TokenAmount(tok2, web3.utils.toWei('1', 'Ether')), TradeType.EXACT_INPUT)
return new UniswapTokenPriceFetcher([trade, route, pair, tok1, tok2])
}
getExecutionPrice6d = () =>
{
return this.trade.executionPrice.toSignificant(6);
}
getNextMidPrice6d = () =>
{
return this.trade.nextMidPrice.toSignificant(6);
}
}
module.exports = UniswapTokenPriceFetcher
Thank you everybody!
EDIT: I know Uniswap only pairs with WETH so one of my token variables is unneccesary, but the problem remains the same! Also keep in mind that I want to store an instance of this class for latter use inside another file.
You should either call the build function with await
const priceFetcher = await UniswapTokenPriceFetcher.build(token1, token2)
or followed by then
UniswapTokenPriceFetcher.build(token1, token2).then(priceFetcher => {...})
I don't see any other way.

Correct way to organise this process in Node

I need some advice on how to structure this function as at the moment it is not happening in the correct order due to node being asynchronous.
This is the flow I want to achieve; I don't need help with the code itself but with the order to achieve the end results and any suggestions on how to make it efficient
Node routes a GET request to my controller.
Controller reads a .csv file on local system and opens a read stream using fs module
Then use csv-parse module to convert that to an array line by line (many 100,000's of lines)
Start a try/catch block
With the current row from the csv, take a value and try to find it in a MongoDB
If found, take the ID and store the line from the CSV and this id as a foreign ID in a separate database
If not found, create an entry into the DB and take the new ID and then do 6.
Print out to terminal the row number being worked on (ideally at some point I would like to be able to send this value to the page and have it update like a progress bar as the rows are completed)
Here is a small part of the code structure that I am currently using;
const fs = require('fs');
const parse = require('csv-parse');
function addDataOne(req, id) {
const modelOneInstance = new InstanceOne({ ...code });
const resultOne = modelOneInstance.save();
return resultOne;
}
function addDataTwo(req, id) {
const modelTwoInstance = new InstanceTwo({ ...code });
const resultTwo = modelTwoInstance.save();
return resultTwo;
}
exports.add_data = (req, res) => {
const fileSys = 'public/data/';
const parsedData = [];
let i = 0;
fs.createReadStream(`${fileSys}${req.query.file}`)
.pipe(parse({}))
.on('data', (dataRow) => {
let RowObj = {
one: dataRow[0],
two: dataRow[1],
three: dataRow[2],
etc,
etc
};
try {
ModelOne.find(
{ propertyone: RowObj.one, propertytwo: RowObj.two },
'_id, foreign_id'
).exec((err, searchProp) => {
if (err) {
console.log(err);
} else {
if (searchProp.length > 1) {
console.log('too many returned from find function');
}
if (searchProp.length === 1) {
addDataOne(RowObj, searchProp[0]).then((result) => {
searchProp[0].foreign_id.push(result._id);
searchProp[0].save();
});
}
if (searchProp.length === 0) {
let resultAddProp = null;
addDataTwo(RowObj).then((result) => {
resultAddProp = result;
addDataOne(req, resultAddProp._id).then((result) => {
resultAddProp.foreign_id.push(result._id);
resultAddProp.save();
});
});
}
}
});
} catch (error) {
console.log(error);
}
i++;
let iString = i.toString();
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(iString);
})
.on('end', () => {
res.send('added');
});
};
I have tried to make the functions use async/await but it seems to conflict with the fs.openReadStream or csv parse functionality, probably due to my inexperience and lack of correct use of code...
I appreciate that this is a long question about the fundamentals of the code but just some tips/advice/pointers on how to get this going would be appreciated. I had it working when the data was sent one at a time via a post request from postman but can't implement the next stage which is to read from the csv file which contains many records
First of all you can make the following checks into one query:
if (searchProp.length === 1) {
if (searchProp.length === 0) {
Use upsert option in mongodb findOneAndUpdate query to update or upsert.
Secondly don't do this in main thread. Use a queue mechanism it will be much more efficient.
Queue which I personally use is Bull Queue.
https://github.com/OptimalBits/bull#basic-usage
This also provides the functionality you need of showing progress.
Also regarding using Async Await with ReadStream, a lot of example can be found on net such as : https://humanwhocodes.com/snippets/2019/05/nodejs-read-stream-promise/

How do I chain a set of functions together using promises and q in node.js?

I have some dynamic data that needs to have work performed on it. The work must happen sequentially. Using the Q Library, I'd like to create an array of functions and execute the code sequentially using sequences. I can't seem to quite figure out the syntax to achieve this.
const fruits = ["apple", "cherry", "blueberry"]
function makeFruitPie (fruit) {
return Q.Promise((resolve, reject) => {
// Do some stuff here
resolve(fruit+" pie")
// Error handling here
reject(new Error(""))
})
}
const fruitFuncs = new Array(fruits.length)
for(var i = 0; i < fruits.length; i++) {
fruitFuncs[i] = makeFruitPie(fruits[i])
}
// Stole this example from the github docs but can't quite get it right.
i = 0
var result = Q(fruits[i++])
fruitFuncs.forEach((f) => {
result = result(fruits[i++]).then(f)
})
With these lines
for(var i = 0; i < fruits.length; i++) {
fruitFuncs[i] = makeFruitPie(fruits[i])
}
you already run the functions and, hence, their processing will begin.
Assuming you want the execution of the functions in sequence, the following would be more appropriate:
// construct the pipeline
const start = Q.defer();
let result = start.promise; // we need something to set the pipeline off
fruits.forEach( (fruit) => {
result = result.then( () => makeFruitPie( fruit ) );
});
// start the pipeline
start.resolve();
Sidenote: There is a native Promise implementation supported by almost all environments. Maybe consider switching from the library backed version.
You can use Promise.all
Promise.all(fruits.map(fruit=>makeFruitPie(fruit).then(res=> res) )).
then(final_res => console.log(final_res))
final_res will give you array of results
you could use for..of and do things sequentially. something like this
const Q = require("q");
const fruits = ["apple", "cherry", "blueberry"];
function makeFruitPie(fruit) {
return Q.Promise((resolve, reject) => {
// Do some stuff here
resolve(`${fruit} pie`);
// Error handling here
reject(new Error(""));
});
}
for (const fruit of fruits) {
const result = await makeFruitPie(fruit);
console.log(result);
}
By the way also worth considering native Promise insteead of using q

How to translate Kafka pub-sub semantics into a peakNext promise semantics for unittesting in NodeJS

While unittesting my NodeJS application I'm trying to create a simple helper class that will translate the Kafka pub-sub semantics into a simpler API suited for unittesting.
My idea is to be able to write mocha unittest like this:
const testSubscriber = kafkaTestHelper.getTestSubscriber({topic:'test'});
return someKafkaProducer.sendAsync({topic: 'test', message: randomWord})
.then(() =>
testSubscriber.next()
).then(msg => {
msg.should.equal(randomWord);
});
Of course I would also add helper methods such as
testSubscriber.nextUntil(someFilter)
This is inspired by the AKKA.NET TestKit which has a similar approach.
I have two questions:
Is this a reasonable approach or is there some cleaner way to unittest application logic based on Kafka stream processing in NodeJS?
Can anybody post coding examples showing how to make testSubscriber work as I intend?
This might not be the most elegant solution but it seems to work, at least for my initial testing. The trick is to create an ever growing list of Promises for which the resolver function is kept by reference in an array called 'resolvers'. Then when a message comes in, the resolver is invoked with the message. In this way I can return promises to any unittest invoking next() and it will work transparently if either the message was already delivered or it will be delivered in the future.
I still feel I'm reinventing the wheel here, so any comments would still be greatly appreciated.
function TestSubscriber(consumer, initialMessageFilter) {
this.consumer = consumer;
let promiseBuffer = [];
let resolvers = [];
let resolveCounter = 0;
let isStarted = false;
const ensurePromiseBuffer = function() {
if (promiseBuffer.length === 0 || resolveCounter >= resolvers.length) {
const newPromise = new Promise(function(resolve, reject) {
resolvers.push(resolve);
});
promiseBuffer.push(newPromise);
}
}
const that = this;
this.consumer.on('message', function(message) {
if (!isStarted) {
//Determine if we should start now.
isStarted = initialMessageFilter === undefined || initialMessageFilter(message);
}
if (isStarted) {
ensurePromiseBuffer();
const resolver = resolvers[resolveCounter];
resolver(message);
resolveCounter++;
that.consumer.commit(function(err, data) {
if (err) {
//Just log any errors here as we are running inside a unittest
log.warn(err)
}
})
}
});
this.next = function() {
ensurePromiseBuffer();
return promiseBuffer.shift();
};
}
const cache = {};
module.exports = {
getTestSubscriber: function({topic}, initialMessageFilter) {
if (!cache[topic]) {
const consumer = kafka.getConsumer({topic, groupId: GROUP_ID});
cache[topic] = new TestSubscriber(consumer, initialMessageFilter);
}
return cache[topic];
}
}

Resources