How to use other repos in a transactions - node.js

I have several repos with methods and some of these methods use transaction (.tx).
For example, in my DevicesRepository below, the 'add' method have to insert a new Device, which means:
1. Insert a System and return the ID (SystemsRepository)
2. insert the device with the returner systemId and get the new id
3. Insert other pieces (other repos) that uses the deviceId
My problem is that in that transaction I don't know how to access to the other repo methods.
I could use the other repos from my Database object (Database.systems.add, Database.OtherRepo.add, [...]), but if I do that
tx doc
When invoked on the root Database object, the method allocates the connection from the pool, executes the callback, and once finished - releases the connection back to the pool. However, when invoked inside another task or transaction, the method reuses the parent connection.
task doc
When executing more than one request at a time, one should allocate and release the connection only once, while executing all the required queries within the same connection session. More importantly, a transaction can only work within a single connection.
Thanks! :)
P.S : I can add how I initialize the DB and repos
./db/repos/devices.js
'use strict';
var Database = null, pgp = null, Collections = null;
async function add(params) {
// I can use Database.systems.add
return Database.tx('Insert-New-Device', async function(transaction) {
let device = params.data.device;
const system = await transaction.systems.add(params);
device.systemid = system.systemId;
const query = pgp.helpers.insert(device, Collections.insert);
query += " RETURNING deviceId";
device.deviceId = await transaction.one(query);
const OtherRepoInsert = await transaction.otherRepos.add(params);
device.otherRepos.id = OtherRepoInsert.otherReposId;
return device
})
.then(data => { return data; })
.catch(ex => { throw new Error(ex); });
}
function createColumnsets() { /* hidden for brevity (almost the same as the pg-promise-demo */ }
const DevicesRepository = {
add: add
};
module.exports = (db) => {
Database = db;
pgp = db.$config.pgp;
Collections = createColumnsets();
return DevicesRepository;
}
./db/repos/systems.js
'use strict';
var Database = null, pgp = null, Collections = null;
async function add(params) {
var system = params.data.system;
system.archid=2;
system.distributionid=3;
var query = pgp.helpers.insert(system, Collections.insert);
if(params.return) query += " RETURNING *";
return Database.any(query)
.then(data => { return data; })
.catch(ex => { throw new Error(ex); });
}
function createColumnsets() { /* hidden for brevity (almost the same as the pg-promise-demo */ }
const SystemsRepository = {
add: add
};
module.exports = (db) => {
Database = db;
pgp = db.$config.pgp;
Collections = createColumnsets();
return SystemsRepository;
}

I found the real problem.
If you go to my first post, you can see that each of my repo exports an initialization function :
1. which is called by the pg-promise 'extend' event
2. which takes one param : the context
3. which uses this param to initialize the 'pgp' variable in the repo with db.$config.pgp
As explained in the demo, this event occurs when the db is loaded for the first time in the appl and for every task and transaction.
In my case :
The first time the event occurs (full app initialization), the event's param 'obj' is the database context (containing $config, $pool, ...) so it works
When the event occurs for a task or transaction, the event's param 'obj' is a Task context, where $config does not exists so the event can not extend the context with my repo. An exception 'can not read property helpers of undefined' is thrown but does not appear and does not crash my app, I don't know why, maybe catched in the event. That is why I could not use my repo in the transaction.
I modified my code like it and it works :
./db/index.js
'use strict';
/* hidden for brevity */
// pg-promise initialization options:
const initOptions = {
promiseLib: promise,
extend(obj, dc) {
obj.roles = repos.Roles(obj, pgp);
obj.shells = repos.Shells(obj, pgp);
obj.systems = repos.Systems(obj, pgp);
obj.devices = repos.Devices(obj, pgp);
}
};
const pgp = require('pg-promise')(initOptions);
const db = pgp(config);
/* hidden for brevity */
./db/index.js
'use strict';
/* hidden for brevity */
// pg-promise initialization options:
const initOptions = {
promiseLib: promise,
extend(obj, dc) {
obj.roles = repos.Roles(obj, pgp);
obj.shells = repos.Shells(obj, pgp);
obj.systems = repos.Systems(obj, pgp);
obj.devices = repos.Devices(obj, pgp);
}
};
const pgp = require('pg-promise')(initOptions);
const db = pgp(config);
/* hidden for brevity */
./db/repos/{repoFiles}.js
/* hidden for brevity */
module.exports = (db, pgpLib) => {
Database = db;
pgp = pgpLib;
Collections = createColumnsets();
return DevicesRepository;
}

Property $config is there for integration purposes. That's why it exists only on the root Database level, and not inside tasks or transactions.
In order to make use of the helpers namespace, you should pass pgp into repositories when you initialize them, as shown within pg-promise-demo:
extend(obj, dc) {
obj.users = new repos.Users(obj, pgp);
obj.products = new repos.Products(obj, pgp);
}

You can establish the transaction outside the calls, then pass it in to those functions to have them use it.
That said, I'd recommend looking into a slightly higher-level query builder library such as Knex.js to save you from some of these (and future) headaches.

Related

Web3.js pair contract events.Swap not correctly subscribing to swap event

I am trying to run a section of (nodejs using Web3.js) code whenever a swap occurs on a given pair contract (qPair is the pair contract derived from Quickswap's router contract (Polygon blockchain) and sPair is the same pair contract derived from Sushiswap's router contract (also Polygon blockchain)) but the code doesn't work as intended when implented as a class. I have it working in one file, but when I try to create a class for crypto pairs, the code wont work.
Here is the working code:
const main = async () => {
qPair = await getPairContract(quickswapFactoryContract, token0.address, token1.address)
sPair = await getPairContract(sushiswapFactoryContract, token0.address, token1.address)
/* The below code is listening for a swap event on the Quickswap exchange. When a swap event is
detected, the code checks the price of the token pair on Quickswap and compares it to the price
of the token pair on Sushiswap. If the price on Quickswap is higher than the price on Sushiswap, the
code will execute a trade on Quickswap. If the price on Sushiswap is higher than the price on
Quickswap, the code will execute a trade on Uniswap. */
qPair.events.Swap({}, async () => {
console.log("qPair activated")
/*
*
* Do stuff here
*
*/
})
/* The below code is listening for an event on the Sushiswap contract. When the event is detected,
the code will check the price of the token pair and determine if there is an arbitrage
opportunity. If there is an arbitrage opportunity, the code will execute the trade. */
sPair.events.Swap({}, async () => {
console.log("sPair activated")
/*
*
* Do stuff here
*
*/
})
console.log("Waiting for swap event...")
}
And here is the code that doesn't work:
const main = async () => {
qPair1 = new cryptoPair(<same token details as before go here>)
sPair1 = new cryptoPair(<same token details as before go here>)
qPair1.pairContract.events.Swap({}, async () => {
// The code here activates once (after main() reaches the bottom) and never again
})
sPair1.pairContract.events.Swap({}, async () => {
// The code here activates once (after main() reaches the bottom) and never again
})
console.log("waiting for swap event")
} // Once the debugger reaches here, the two "async" console logs activate
The class has the same code as the "working" code but instead would just do this._pairContract = await getPairContract() and then return that variable using a getter function.
Here is the (nonworking) class code:
module.exports = class cryptoPair {
constructor(token0Address, token0Decimals, token1Address, token1Decimals, factory) {
this._token0Address = token0Address;
this._token0Decimals = token0Decimals;
this._token1Address = token1Address;
this._token1Decimals = token1Decimals;
this._factory = factory;
}
// Setter functions
set token0(web3Token) {
this._token0 = web3Token;
}
set token1(web3Token) {
this._token1 = web3Token;
}
set token0Contract(web3Contract) {
this._token0Contract = web3Contract;
}
set token1Contract(web3Contract) {
this._token1Contract = web3Contract;
}
// Getter functions
get token0Address() {
return this._token0Address;
}
get token1Address() {
return this._token1Address;
}
get factory() {
return this._factory;
}
get token0Contract() {
return this._token0Contract;
}
get token1Contract() {
return this._token1Contract;
}
get token0() {
return this._token0;
}
get pairContract() {
return this._pairContract;
}
// The following two functions are nearly identically defined in the "working code"
// But instead don't use the "this.variableName" syntax
async defineTokens(t0Address, t0Decimals, t1Address, t1Decimals) {
try {
this._token0Contract = new web3.eth.Contract(IERC20.abi, t0Address)
this._token1Contract = new web3.eth.Contract(IERC20.abi, t1Address)
const t0Symbol = await this._token0Contract.methods.symbol().call()
const t0Name = await this._token0Contract.methods.name().call()
this._token0 = new Token(
ChainId.POLYGON,
t0Address,
t0Decimals,
t0Symbol,
t0Name
)
const t1Symbol = await this._token1Contract.methods.symbol().call()
const t1Name = await this._token1Contract.methods.name().call()
this._token1 = new Token(
ChainId.POLYGON,
t1Address,
t1Decimals,
t1Symbol,
t1Name
)
} catch (err) {
// For some reason, I keep getting the error "hex data is odd-length" in the
// class but not when this same code is outside of a class
console.log("Token creation failed, retrying...")
this.defineTokens(this._token0Address, this._token0Decimals, this._token1Address, this._token1Decimals)
}
}
async definePairContract() {
this._pairAddress = await this._factory.methods.getPair(this._token0Address, this._token1Address).call();
this._pairContract = new web3.eth.Contract(IUniswapV2Pair.abi, this._pairAddress);
}
}
To reiterate, the "working code" runs the inner code of the async events.Swap() code whenever a swap is triggered, but the same code when implemented as a class does not work. Is this because of the use of classes? Or did I make a mistake somewhere? Thanks in advance!
I fixed the issue. Of course, the issue was outside of the code provided where I defined web3. The working way defined it as:
web3 = new Web3(`wss://polygon-mainnet.g.alchemy.com/v2/${process.env.ALCHEMY_API_KEY}`)
whereas the incorrect class was defining it as
provider = new HDWalletProvider({
privateKeys: [process.env.DEPLOYMENT_ACCOUNT_KEY],
providerOrUrl: `wss://polygon-mainnet.g.alchemy.com/v2/${process.env.ALCHEMY_API_KEY}`
})
web3 = new Web3(provider)

Firebase Functions and Express: listen to firestore data live

I have a website that runs its frontend of Firebase Hosting and its server which is written using node.js and Express on Firebase Functions
What I want to have redirect links from my website so I can map for example mywebsite.com/youtube to my youtube channel. the way I am creating these links is from my admin panel, and adding them to my Firestore database.
My data is roughly something like this:
The first way I approached this, is by querying my Firestore database on every request, but that is heavily expensive and slow.
Another way I tried to approach this is by setting some kind of background listener to the Firestore database which will always provide up to date data. but unfortunately that did not work because Firebase Functions suspends the main function when the current request execution ends.
lastly, which is the most convenience way, I configured an api route, which will be called from my Admin Panel when any change happens to the data, and I would save the new data to some json file. I tried this on my local but it did not work on production because appearently Firebase Functions is a Read-only system, so we can't edit any files after they are deployed. after some research I found out that Firebase Functions allows writing to the tmp directory, so I went forward with this, and tried deploying it. but again, Firebase Functions was resetting the tmp folder when some request execution ends.
here is my api request code which updates the utm_data.json file in the tmp directory:
// my firestore provider
const db = require('../db');
const fs = require('fs');
const os = require('os')
const mkdirp = require('mkdirp');
const updateUrlsAPI = (req, res) => {
// we wanna get the utm list from firestore, and update the file
// tmp/utm_data.json
// query data from firestore
db.collection('utmLinks').get().then(async function(querySnapshot) {
try {
// get the path to `tmp` folder depending on
// the os running this program
let tmpFolderName = os.tmpdir()
// create `tmp` directory if not exists
await mkdirp(tmpFolderName)
let docsData = querySnapshot.docs.map(doc => doc.data())
let tmpFilePath = tmpFolderName + '/utm_data.json'
let strData = JSON.stringify(docsData)
fs.writeFileSync(tmpFilePath, strData)
res.send('200')
} catch (error) {
console.log("error while updating utm_data.json: ", error)
res.send(error)
}
});
}
and this is my code for reading the utm_data.json file on an incoming request:
const readUrlsFromJson = (req, res) => {
var url = req.path.split('/');
// the url will be in the format of: 'mywebsite.com/routeName'
var routeName = url[1];
try {
// read the file ../tmp/utm_data.json
// {
// 'createdAt': Date
// 'creatorEmail': string
// 'name': string
// 'url': string
// }
// our [routeName] should match [name] of the doc
let tmpFolderName = os.tmpdir()
let tmpFilePath = tmpFolderName + '/utm_data.json'
// read links list file and assign it to the `utms` variable
let utms = require(tmpFilePath)
if (!utms || !utms.length) {
return undefined;
}
// find the link matching the routeName
let utm = utms.find(utm => utm.name == routeName)
if (!utm) {
return undefined;
}
// if we found the doc,
// then we'll redirect to the url
res.redirect(utm.url)
} catch (error) {
console.error(error)
return undefined;
}
}
Is there something I am doing wrong, and if not, what is an optimal solution for this case?
You can initialize the Firestore listener in global scope. From the documentation,
The global scope in the function file, which is expected to contain the function definition, is executed on every cold start, but not if the instance has already been initialized.
This should keep the listener active even after the function's execution has completed until that specific instance is running (which should be about ~30 minutes). Try refactoring the code as shown below:
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
admin.initializeApp();
let listener = false;
// Store all utmLinks in global scope
let utmLinks: any[] = [];
const initListeners = () => {
functions.logger.info("Initializing listeners");
admin
.firestore()
.collection("utmLinks")
.onSnapshot((snapshot) => {
snapshot.docChanges().forEach(async (change) => {
functions.logger.info(change.type, "document received");
switch (change.type) {
case "added":
utmLinks.push({ id: change.doc.id, ...change.doc.data() });
break;
case "modified":
const index = utmLinks.findIndex(
(link) => link.id === change.doc.id
);
utmLinks[index] = { id: change.doc.id, ...change.doc.data() };
break;
case "removed":
utmLinks = utmLinks.filter((link) => link.id !== change.doc.id);
default:
break;
}
});
});
return;
};
// The HTTPs function
export const helloWorld = functions.https.onRequest(
async (request, response) => {
if (!listener) {
// Cold start, no listener active
initListeners();
listener = true;
} else {
functions.logger.info("Listeners already initialized");
}
response.send(JSON.stringify(utmLinks, null, 2));
}
);
This example stores all UTM links in an array in global scope which won't be persisted in new instances but you won't have to query each link for every request. The onSnapshot() listener will keep utmLinks updated.
The output in logs should be:
If you want to persist this data permanently and prevent querying in every cold start, then you can try using Google Cloud Compute that keeps running unlike Cloud functions that timeout eventually.

How can I store the value of a promise and use it once resolved?

I am currently developing an app which interacts with uniswap, and I have developed a Wrapper class to contain the info and variables I'll need about some pair (e.g DAI/WETH).
As some of this values are asynchronous, I have coded a build() async function to get those before calling the constructor, so I can store them. I want to store the result of this build function, which is an instance of the class I have defined, inside a variable to use it later, but I need to know whether the Promise that that build function returns is resolved before using it, so how can I make it?
Here is the code of the class:
'use strict'
const { ChainId, Fetcher, WETH, Route, Trade, TokenAmoun, TradeType, TokenAmount } = require('#uniswap/sdk')
const { toChecksumAddress } = require('ethereum-checksum-address')
const Web3 = require('web3')
const web3 = new Web3()
const chainId = ChainId.MAINNET;
let tok1;
let tok2;
let pair;
let route;
let trade;
class UniswapTokenPriceFetcher
{
constructor(async_params)
{
async_params.forEach((element) => {
if (element === 'undefined')
{
throw new Error('All parameters must be defined')
}
});
this.trade = async_params[0];
this.route = async_params[1];
this.pair = async_params[2];
this.tok1 = async_params[3];
this.tok2 = async_params[4];
}
static async build(token1, token2)
{
var tok1 = await Fetcher.fetchTokenData(chainId, toChecksumAddress(token1))
var tok2 = await Fetcher.fetchTokenData(chainId, toChecksumAddress(token2))
var pair = await Fetcher.fetchPairData(tok1, tok2)
var route = new Route([pair], tok2)
var trade = new Trade(route, new TokenAmount(tok2, web3.utils.toWei('1', 'Ether')), TradeType.EXACT_INPUT)
return new UniswapTokenPriceFetcher([trade, route, pair, tok1, tok2])
}
getExecutionPrice6d = () =>
{
return this.trade.executionPrice.toSignificant(6);
}
getNextMidPrice6d = () =>
{
return this.trade.nextMidPrice.toSignificant(6);
}
}
module.exports = UniswapTokenPriceFetcher
Thank you everybody!
EDIT: I know Uniswap only pairs with WETH so one of my token variables is unneccesary, but the problem remains the same! Also keep in mind that I want to store an instance of this class for latter use inside another file.
You should either call the build function with await
const priceFetcher = await UniswapTokenPriceFetcher.build(token1, token2)
or followed by then
UniswapTokenPriceFetcher.build(token1, token2).then(priceFetcher => {...})
I don't see any other way.

How to execute a batch of transactions independently using pg-promise?

We're having an issue in our main data synchronization back-end function. Our client's mobile device is pushing changes daily, however last week they warned us some changes weren't updated in the main web app.
After some investigation in the logs, we found that there is indeed a single transaction that fails and rollback. However it appears that all the transactions before this one also rollback.
The code works this way. The data to synchronize is an array of "changesets", and each changset can update multiple tables at once. It's important that a changset be updated completely or not at all, so each is wrapped in a transaction. Then each transaction is executed one after the other. If a transaction fails, the others shouldn't be affected.
I suspect that all the transactions are actually combined somehow, possibly through the main db.task. Instead of just looping to execute the transactions, we're using a db.task to execute them in batch avoid update conflicts on the same tables.
Any advice how we could execute these transactions in batch and avoid this rollback issue?
Thanks, here's a snippet of the synchronization code:
// Begin task that will execute transactions one after the other
db.task(task => {
const transactions = [];
// Create a transaction for each changeset (propriete/fosse/inspection)
Object.values(data).forEach((change, index) => {
const logchange = { tx: index };
const c = {...change}; // Use a clone of the original change object
transactions.push(
task.tx(t => {
const queries = [];
// Propriete
if (Object.keys(c.propriete.params).length) {
const params = proprietes.parse(c.propriete.params);
const propriete = Object.assign({ idpropriete: c.propriete.id }, params);
logchange.propriete = { idpropriete: propriete.idpropriete };
queries.push(t.one(`SELECT ${Object.keys(params).join()} FROM propriete WHERE idpropriete = $1`, propriete.idpropriete).then(previous => {
logchange.propriete.previous = previous;
return t.result('UPDATE propriete SET' + qutil.setequal(params) + 'WHERE idpropriete = ${idpropriete}', propriete).then(result => {
logchange.propriete.new = params;
})
}));
}
else delete c.propriete;
// Fosse
if (Object.keys(c.fosse.params).length) {
const params = fosses.parse(c.fosse.params);
const fosse = Object.assign({ idfosse: c.fosse.id }, params);
logchange.fosse = { idfosse: fosse.idfosse };
queries.push(t.one(`SELECT ${Object.keys(params).join()} FROM fosse WHERE idfosse = $1`, fosse.idfosse).then(previous => {
logchange.fosse.previous = previous;
return t.result('UPDATE fosse SET' + qutil.setequal(params) + 'WHERE idfosse = ${idfosse}', fosse).then(result => {
logchange.fosse.new = params;
})
}));
}
else delete c.fosse;
// Inspection (rendezvous)
if (Object.keys(c.inspection.params).length) {
const params = rendezvous.parse(c.inspection.params);
const inspection = Object.assign({ idvisite: c.inspection.id }, params);
logchange.rendezvous = { idvisite: inspection.idvisite };
queries.push(t.one(`SELECT ${Object.keys(params).join()} FROM rendezvous WHERE idvisite = $1`, inspection.idvisite).then(previous => {
logchange.rendezvous.previous = previous;
return t.result('UPDATE rendezvous SET' + qutil.setequal(params) + 'WHERE idvisite = ${idvisite}', inspection).then(result => {
logchange.rendezvous.new = params;
})
}));
}
else delete change.inspection;
// Cheminees
c.cheminees = Object.values(c.cheminees).filter(cheminee => Object.keys(cheminee.params).length);
if (c.cheminees.length) {
logchange.cheminees = [];
c.cheminees.forEach(cheminee => {
const params = cheminees.parse(cheminee.params);
const ch = Object.assign({ idcheminee: cheminee.id }, params);
const logcheminee = { idcheminee: ch.idcheminee };
queries.push(t.one(`SELECT ${Object.keys(params).join()} FROM cheminee WHERE idcheminee = $1`, ch.idcheminee).then(previous => {
logcheminee.previous = previous;
return t.result('UPDATE cheminee SET' + qutil.setequal(params) + 'WHERE idcheminee = ${idcheminee}', ch).then(result => {
logcheminee.new = params;
logchange.cheminees.push(logcheminee);
})
}));
});
}
else delete c.cheminees;
// Lock from further changes on the mobile device
// Note: this change will be sent back to the mobile in part 2 of the synchronization
queries.push(t.result('UPDATE rendezvous SET timesync = now() WHERE idvisite = $1', [c.idvisite]));
console.log(`transaction#${++transactionCount}`);
return t.batch(queries).then(result => { // Transaction complete
logdata.transactions.push(logchange);
});
})
.catch(function (err) { // Transaction failed for this changeset, rollback
logdata.errors.push({ error: err, change: change }); // Provide error message and original change object to mobile device
console.error(JSON.stringify(logdata.errors));
})
);
});
console.log(`Total transactions: ${transactions.length}`);
return task.batch(transactions).then(result => { // All transactions complete
// Log everything that was uploaded from the mobile device
log.log(res, JSON.stringify(logdata));
});
I apologize, this is almost impossible to make a final good answer when the question is wrong on too many levels...
It's important that a change set be updated completely or not at all, so each is wrapped in a transaction.
If the change set requires data integrity, the whole thing must be one transaction, and not a set of transactions.
Then each transaction is executed one after the other. If a transaction fails, the others shouldn't be affected.
Again, data integrity is what a single transaction guarantees, you need to make it into one transaction, not multiple.
I suspect that all the transactions are actually combined somehow, possibly through the main db.task.
They are combined, and not through task, but through method tx.
Any advice how we could execute these transactions in batch and avoid this rollback issue?
By joining them into a single transaction.
You would use a single tx call at the top, and that's it, no tasks needed there. And in case the code underneath makes use of its own transactions, you can update it to allow conditional transactions.
Also, when building complex transactions, an app benefits a lot from using the repository patterns shown in pg-promise-demo. You can have methods inside repositories that support conditional transactions.
And you should redo your code to avoid horrible things it does, like manual query formatting. For example, never use things like SELECT ${Object.keys(params).join()}, that's a recipe for disaster. Use the proper query formatting that pg-promise gives you, like SQL Names in this case.

How to cache a mongoose query in memory?

I have the following queries, which starts with the GetById method firing up, once that fires up and extracts data from another document, it saves into the race document.
I want to be able to cache the data after I save it for ten minutes. I have taken a look at cacheman library and not sure if it is the right tool for the job. what would be the best way to approach this ?
getById: function(opts,callback) {
var id = opts.action;
var raceData = { };
var self = this;
this.getService().findById(id,function(err,resp) {
if(err)
callback(null);
else {
raceData = resp;
self.getService().getPositions(id, function(err,positions) {
self.savePositions(positions,raceData,callback);
});
}
});
},
savePositions: function(positions,raceData,callback) {
var race = [];
_.each(positions,function(item) {
_.each(item.position,function(el) {
race.push(el);
});
});
raceData.positions = race;
this.getService().modelClass.update({'_id' : raceData._id },{ 'positions' : raceData.positions },callback(raceData));
}
I have recently coded and published a module called Monc. You could find the source code over here. You could find several useful methods to store, delete and retrieve data stored into the memory.
You may use it to cache Mongoose queries using simple nesting as
test.find({}).lean().cache().exec(function(err, docs) {
//docs are fetched into the cache.
});
Otherwise you may need to take a look at the core of Mongoose and override the prototype in order to provide a way to use cacheman as you original suggested.
Create a node module and force it to extend Mongoose as:
monc.hellocache(mongoose, {});
Inside your module you should extend the Mongoose.Query.prototype
exports.hellocache = module.exports.hellocache = function(mongoose, options, Aggregate) {
//require cacheman
var CachemanMemory = require('cacheman-memory');
var cache = new CachemanMemory();
var m = mongoose;
m.execAlter = function(caller, args) {
//do your stuff here
}
m.Query.prototype.exec = function(arg1, arg2) {
return m.execAlter.call(this, 'exec', arguments);
};
})
Take a look at Monc's source code as it may be a good reference on how you may extend and chain Mongoose methods
I will explain with npm redis package which stores key/value pairs in the cache server. keys are queries and redis stores only strings.
we have to make sure that keys are unique and consistent. So key value should store query and also name of the model that you are applying the query.
when you query, inside the mongoose library, there is
function Query(conditions, options, model, collection) {} //constructor function
responsible for query. inside this constructor,
Query.prototype.exec = function exec(op, callback) {}
this function is responsible executing the queries. so we have to manipulate this function and have it execute those tasks:
first check if we have any cached data related to the query
if yes respond to request right away and return
if no we need to respond to request and update our cache and then respond
const redis = require("client");
const redisUrl = "redis://127.0.0.1:6379";
const client = redis.createClient(redisUrl);
const util = require("util");
//client.get does not return promise
client.get = util.promisify(client.get);
const exec = mongoose.Query.prototype.exec;
//mongoose code is written using classical prototype inheritance for setting up objects and classes inside the library.
mongoose.Query.prototype.exec = async function() {
//crate a unique and consistent key
const key = JSON.stringify(
Object.assign({}, this.getQuery(), {
collection: this.mongooseCollection.name
})
);
//see if we have value for key in redis
const cachedValue = await redis.get(key);
//if we do return that as a mongoose model.
//the exec function expects us to return mongoose documents
if (cachedValue) {
const doc = JSON.parse(cacheValue);
return Array.isArray(doc)
? doc.map(d => new this.model(d))
: new this.model(doc);
}
const result = await exec.apply(this, arguments); //now exec function's original task.
client.set(key, JSON.stringify(result),"EX",6000);//it is saved to cache server make sure capital letters EX and time as seconds
};
if we store values as array of objects we need to make sure that each object is individullay converted to mongoose document.
this.model is a method inside the Query constructor and converts object to a mongoose document.
note that if you are storing nested values instead of client.get and client.set, use client.hset and client.hget
Now we monkey patched
Query.prototype.exec
so you do not need to export this function. wherever you have a query operation inside your code, mongoose will execute above code

Resources