I have the following scanner
const scan = promisify(redisClient1.scan).bind(redisClient1);
const scanAll = async (pattern) => {
const found = [];
let cursor = '0';
do {
const reply = await scan(cursor, 'MATCH', pattern);
cursor = reply[0];
found.push(...reply[1]);
} while (cursor !== '0');
return found;
};
But to use it i have to initialize redisClient1 every time
const redisClient1 = require('redis').createClient(config.redisPort, config.redisUrl, {
no_ready_check: true,
db: 1
});
redisClient1.on('error', function (err) {
console.log('Error ' + err);
});
redisClient1.on('connect', function () {
console.log('Connected to Redis pre/dev.');
});
Problem is, i need the scanAll function to have as parameters the redisPort and redisUrl (db is always 1)
So the client initialization should happen once the function received the parameters
Meaning it would look something like this
const scanAll = async (url, port) => {
const found = [];
let cursor = '0';
do {
const reply = await customScan(url+port;, cursor, 'Match', pattern)
cursor = reply[0];
found.push(...reply[1]);
} while (cursor !== '0');
return found;
};
How can i do something similar?
I ended up with this
const createRedisClient = async (port, url) => {
const redisClient = require('redis').createClient(config.redisPort, config.redisUrl, {
no_ready_check: true,
db: 1
})
return redisClient;
}
const scanAll = async (pattern) => {
const redisClient = await createRedisClient('1111', 'server.com')
const scan = promisify(redisClient.scan).bind(redisClient);
const found = [];
let cursor = '0';
do {
const reply = await scan(cursor, 'MATCH', pattern);
cursor = reply[0];
found.push(...reply[1]);
} while (cursor !== '0');
return found;
};
const serverInfo = async () => {
const redisClient = await createRedisClient('1111', 'server.com')
const info = promisify(redisClient.info).bind(redisClient);
const reply = await info();
return reply;
}
Related
i need to cache a query with redis and node js , the database is aws s3 ,the problem here as a noob ,i will recive the query as a string, i need to encode the keys in the body request so when i will later try to fetsch the data i will use one of those keys(they need to be seperated with '/') can anyone help me with that and bunch of thanks.
here is what i' tried to do:
const { default: axios } = require('axios');
const express = require('express');
const redisClient = require('./helper/Client')
const app = express();
const port = process.env.PORT || 3000
app.use(express.json());
async function fetchApi(species){
const apiResponse = await axios.get(`https://www.fishwatch.gov/api/species/${species}`)
console.log('request sent successfully');
return apiResponse.data
}
async function getSpeciesData(req, res) {
const species = req.body;
const keys = Object.keys(species);
const encodedParams = {};
for (const key of keys) {
const encodedKey = Buffer.from(key).toString('base64');
encodedParams[encodedKey] = species[key];
}
const key = JSON.stringify(encodedParams);
let resultat;
let isCached = false;
const cachedResponse = await redisClient.get(key);
if (cachedResponse) {
isCached = true;
const decodedResponse = Buffer.from(cachedResponse, 'base64').toString('utf-8');
resultat = JSON.parse(decodedResponse);
res.send({
fromCache: isCached,
data: resultat
});
console.log(cachedResponse);
} else {
const responseData = await fetchApi(keys.join('/'));
const encodedResponseData = Buffer.from(JSON.stringify(responseData)).toString('base64');
redisClient.SETEX(key, 3600, encodedResponseData);
res.send(responseData);
}
}
app.post("/fish", getSpeciesData);
app.listen(port, () => {
console.log(`Listening on ${port}`);
});
I have used redis but its not work as expected. when first time I have execute the find query results comes from mongoose its give approx 19MB data but after caching redis only one document. don't now where I am doing wrong below share the configuration file.
const mongoose = require('mongoose');
const { createClient } = require('redis');
const util = require('util');
let client
(async() => {
client = createClient();
await client.connect({
legacyMode: true
});
await client.ping();
})();
const exec = mongoose.Query.prototype.exec;
mongoose.Query.prototype.cache = function (options = { time: 36000 }) {
this.useCache = true;
this.time = options.time;
this.hashKey = JSON.stringify(options.key || this.mongooseCollection.name);
return this;
};
mongoose.Query.prototype.exec = async function () {
if (!this.useCache) {
return await exec.apply(this, arguments);
}
const key = JSON.stringify({
...this.getQuery(),
});
const cacheValue = await client.get(this.hashKey, key);
console.log(cacheValue, "Cache Value");
if (cacheValue) {
const doc = JSON.parse(cacheValue);
console.log('Response from Redis',doc, this.model);
return Array.isArray(doc) ? doc.map((d) => new this.model(d)) : new this.model(doc);
}
const result = await exec.apply(this, arguments);
client.set(this.hashKey, key, JSON.stringify(result));
client.expire(this.hashKey, this.time);
return result;
};
module.exports = {
clearKey(hashKey) {
client.del(JSON.stringify(hashKey));
},
};
Here is Query
Products.find({is_deleted:false})
.select(select)
.populate('type', 'type')
.populate('shape', 'type')
.populate('category', 'type is_pain_scale name').sort({updated_at:1}).lean().cache({
time: 36000
});
I am trying to delete keys in redis using the below code but for some reason, its not deleting the keys in redis but consoling works perfect. Can someone please help what am I missing here
import { RedisClient } from 'redis';
let rediClient: RedisClient = redis.createClient(redisPort, redisHostName, {
auth_pass: authPass,
no_ready_check: true,
prefix: KEY_PREFIX,
retry_strategy: redisRetryStrategy,
tls: { servername: hostName },
});
let cursor = '0';
const scan = (pattern: string, callback: () => void) => {
redisClient.scan(
cursor,
'MATCH',
pattern,
'COUNT',
'1000',
async (err, reply) => {
console.log(err);
if (err) {
throw err;
}
cursor = reply[0];
const keys = reply[1];
console.log(keys);
console.log(keys.length);
console.log(keys[1]);
if (keys) {
await redisClient.del(keys[1], (deleteErr, deleteSuccess) => {
console.log(`err ==> ${deleteErr}`);
console.log(deleteSuccess);
});
console.log(` key 0 is : ${keys[0]}`);
redisClient.del(keys[0]);
// keys.forEach((key) => {
// redisClient.del(key, (deleteErr, deleteSuccess) => {
// console.log(`err ==> ${deleteErr}`);
// console.log(deleteSuccess);
// });
// });
}
if (cursor !== '0') {
console.log(cursor);
return scan(pattern, callback);
}
return callback();
}
);
};
export const deleteResetPin = (pattern: string) => {
scan(pattern, () => {
console.log('Scan Complete');
});
};
Requirement: I want to delete all keys matching the pattern using node js
With the commented part (starting at keys.forEach) running the scan function will delete all the keys that matches the pattern, but there's a couple of thinks to fix/improve here:
the callback (and therefore also the log) will be called before the keys are deleted.
if scan reply with an error the error will be uncaught and the process will exit.
you're mixing callbacks and promises.
you can delete a bunch of keys at once.
Here is a "promised" version of the function:
const { promisify } = require('util'),
client = require('redis').createClient(),
scanAsync = promisify(client.scan).bind(client),
delAsync = promisify(client.del).bind(client);
async function scanAndDelete(pattern: string): Promise<void> {
let cursor = '0';
do {
const reply = await scanAsync(cursor, 'MATCH', pattern, 'COUNT', '1000');
cursor = reply[0];
await delAsync(reply[1]);
} while (cursor !== '0')
}
For Node Redis >= 4
const redisClient = require('redis').createClient(),
async function scanAndDelete(pattern) {
let cursor = '0';
// delete any paths with query string matches
const reply = await redisClient.scan(cursor, { MATCH: pattern, COUNT: 1000 });
for (key of reply.keys) {
cursor = reply.cursor;
await redisClient.del(key);
}
}
I have tried to mock the redisClient.js using redis-mock using jest. But I couldn't find the solution for it. please give me a code sample for it. I need to mock it in controller.
redisClient.js
const redis = require('redis');
const asyncRedis = require("async-redis");
//Redis
const connection = redis.createClient(process.env.REDIS_PORT,
{
retry_strategy: function(options) {
if (options.error && options.error.code === "ECONNREFUSED") {
// End reconnecting on a specific error and flush all commands with
// a individual error
return new Error("The server refused the connection");
}
if (options.total_retry_time > 1000 * 60 * 60) {
// End reconnecting after a specific timeout and flush all commands
// with a individual error
return new Error("Retry time exhausted");
}
if (options.attempt > 10) {
// End reconnecting with built in error
return undefined;
}
// reconnect after
return Math.min(options.attempt * 100, 3000);
},
}
);
module.exports = asyncRedis.decorate(connection);
Controller
const logger = require('../../helper/logger');
const response = require("../../config/response");
const constant = require('../../config/constant');
const QuizService = require('../../services/quiz/quizService');
class QuizController {
constructor() {
this.quizService = new QuizService();
}
async getQuiz(req, res) {
const { userId, query: { campaignId } } = req;
try {
const question = await this.quizService.getQuestion(userId, campaignId);
res.send(response.res(true, constant.MSG.Quiz_FETCHED, question));
} catch (error) {
res.status(constant.RESPONSE.INTERNAL_ERROR.CODE)
.send(response.res(false, error.message, null, error.code))
}
}
}
Service
const _ = require('lodash');
const moment = require('moment');
const { Op } = require('sequelize');
const { v4: uuidv4 } = require("uuid");
const shuffle = require('shuffle-array');
const serialize = require("serialize-javascript");
const utill = require("../../helper/util");
const redis = require("../../cache/redisClient");
const constant = require('../../config/constant');
const scoreHelper = require('./../../helper/scoreHelper');
const db = require("../../models");
const Quiz = db.quiz;
const Campaign = db.campaign;
const campaign = require('../campaign/campaignService')
const SubscriberAnswer = require('../subscriberAnswer/subscriberAnswerService')
const SubscriberProgram = require('../subscriberProgram/SubsciberProgramService')
class quizService {
constructor() {
this.subscriberAnswer = new SubscriberAnswer()
this.subscriberProgram = new SubscriberProgram()
this.campaign = new campaign()
}
async getQuestion(userId, campaignId) {
const subscribedProgramData = await this._checkAvailableQuestionLimit(userId, campaignId)
if(!subscribedProgramData){
throw { message: constant.MSG.TRY_AGAIN }
}
if (subscribedProgramData.no_of_questions > 0) {
const question = await Quiz.findQuestion(userId, campaignId);
if (question.length) {
const data = {
subscriber_id: userId,
campaign_id: campaignId,
questions_id: question[0].id
}
const updateData = {
id: subscribedProgramData.id,
no_of_questions: (subscribedProgramData.no_of_questions - 1)
}
await this.subscriberAnswer.create(data);
await this.subscriberProgram.updateQuota(updateData);
const id = uuidv4();
const {answer, ...questionData } = question[0];
const responseData = await this.handleQuestionData(id, userId, campaignId, questionData, answer);
return responseData;
} else {
throw { code:constant.RESPONSE_COEDES.ALL_ANSWERED, message: constant.MSG.ANSWER_ALL }
}
} else {
throw { message: constant.MSG.QUOTA_OVER }
}
}
}
My Unit Testing Code
const QuizService = require("../../src/services/quiz/quizService");
const QuizController = require("../../src/controllers/quiz/quizController");
const quizService = new QuizService();
const quizController = new QuizController();
const httpMocks = require("node-mocks-http");
jest.mock("../../src/helper/logger");
jest.mock("../../src/cache/redisClient.js");
beforeEach(() => {
req = httpMocks.createRequest();
res = httpMocks.createResponse();
next = jest.fn();
jest.resetAllMocks();
quizService.getQuestion = jest.fn();
});
quizService.getQuestion = jest.fn();
const response = {
id: 1,
name: 'Sandun',
msisdn: '94704377575',
otp: '1234',
deleted: 0,
attempts: 0,
img_url: 'https://'
}
// This test shows how the constructor can be mocked, and how to spy on passed parameters.
describe("Test QuizController", () => {
afterEach(() => {
jest.resetAllMocks();
});
//Because getQuestion is prototype method
it("Test - GetQuiz - Success", async () => {
req.query.programId = 1;
req.userId = 1;
jest.spyOn(QuizService.prototype, "getQuestion").mockReturnValue(response);
await quizController.getQuiz(req, res);
expect(res.statusCode).toBe(200);
});
});
ERROR
FAIL test/controllers/quiz.controller.test.js
● Test suite failed to run
TypeError: Cannot read property 'startsWith' of undefined
//Redis
const connection = redis.createClient(process.env.REDIS_PORT,
^
{
retry_strategy: function(options) {
if (options.error && options.error.code === "ECONNREFUSED") {
at normalizeUrl (node_modules/redis-mock/lib/utils/parseRedisUrl.js:4:11)
at Object.<anonymous>.module.exports (node_modules/redis-mock/lib/utils/parseRedisUrl.js:61:34)
at generateUrlOptions (node_modules/redis-mock/lib/client/createClient.js:25:30)
at unifyOptions (node_modules/redis-mock/lib/client/createClient.js:61:10)
at Object.createClient (node_modules/redis-mock/lib/client/createClient.js:64:47)
at Object.<anonymous> (src/cache/redisClient.js:5:26)
at Object.<anonymous> (src/services/quiz/quizService.js:8:15)
at Object.<anonymous> (test/controllers/quiz.controller.test.js:1:21)
I perform the following task, during the registration of users for the first few months we did not save images of users in Firebase Cloud Storage and took a link that was received from Facebook. Now faced with the problem that some links to images have become expired. Because of this, I decided to make the cloud function and run it once as a script, so that it went through to users who have only one link to the image (which means that this is the first link received from facebook), take the facebook user id and request current profile image. I got a json file with the given users from Firebase, then I get links for each user separately, if the user is deleted then I process this error in a separate catch so that it does not stop the work of other promises. But after running this cloud function, I ran into this error because of this, for almost all users this operation was not successful. Even I increased the memory size in cloud function to 2 gigabytes. Please tell me how it can be fixed?
{ Error: read ECONNRESET
at exports._errnoException (util.js:1018:11)
at TLSWrap.onread (net.js:568:26) code: 'ECONNRESET', errno: 'ECONNRESET', syscall: 'read' }
My function
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const account_file = require('../account_file.json');
var FB = require('fb');
const path = require('path');
const imageDownloader = require('image-downloader');
const os = require('os');
const shortid = require('shortid');
const imageManager = require('../../lib/Images/image-manager.js');
module.exports = functions.https.onRequest((req, res) => {
const token = req.header('X-Auth-Token');
var errorsCount = 0;
return admin.auth().verifyIdToken(token)
.then(function(decodedToken) {
const adminID = decodedToken.uid;
console.log('adminID is', adminID);
const users = account_file['users'];
var fixPhotoPromises = [];
users.forEach(function(user) {
const userID = user['localId'];
const fixPhotoPromise = fixPhoto(userID).catch(error => {
console.log(error);
errorsCount += 1;
});
fixPhotoPromises.push(fixPhotoPromise);
});
return Promise.all(fixPhotoPromises);
}).then(results => {
console.log('results.length', results.length, 'errorsCount', errorsCount);
console.log('success all operations');
const successJSON = {};
successJSON["message"] = "Success operation";
return res.status(200).send(successJSON);
}).catch(error => {
console.log(error);
const errorJSON = {};
errorJSON["error"] = error;
return res.status(error.code).send(errorJSON);
});
});
function fixPhoto(userID) {
var authUser = {};
var filename = '';
return new Promise((resolve, reject) => {
return admin.auth().getUser(userID)
.then(userModel => {
const user = userModel.toJSON();
const facebookID = user['providerData'][0]['uid'];
const userID = user['uid'];
authUser = {'userID' : userID, 'facebookID' : facebookID};
const userImagesPromise = admin.database().ref()
.child('userImages')
.child(userID)
.once('value');
return Promise.all([userImagesPromise])
}).then(results => {
const userImagesSnap = results[0];
if (userImagesSnap.val() !== null && userImagesSnap.val() !== undefined) {
const userProfileImagesDict = userImagesSnap.val()['userProfileImages'];
const keys = Object.keys(userProfileImagesDict);
var userProfileImages = [];
keys.forEach(function(key){
const userProfileImage = userProfileImagesDict[key];
userProfileImages.push(userProfileImage);
});
if (userProfileImages.length > 1) {
const status = 'user has more than one image';
return resolve(status);
}
}
const facebookAppID = functions.config().facebook.appid;
const facebookAppSecret = functions.config().facebook.appsecret;
const facebookAccessPromise = FB.api('oauth/access_token', {
client_id: facebookAppID,
client_secret: facebookAppSecret,
grant_type: 'client_credentials'
});
return Promise.all([facebookAccessPromise]);
}).then(results => {
const facebookResult = results[0];
const facebookAccessToken = facebookResult['access_token'];
const profileImageURL = 'https://graph.facebook.com/' + authUser.facebookID + '/picture?width=9999&access_token=' + facebookAccessToken;
const shortID = shortid.generate() + shortid.generate() + shortid.generate();
filename = shortID + ".jpg";
const tempLocalFile = path.join(os.tmpdir(), filename);
const options = {
url: profileImageURL,
dest: tempLocalFile // Save to /path/to/dest/image.jpg
};
const imageDownloaderPromise = imageDownloader.image(options);
return Promise.all([imageDownloaderPromise])
}).then(results => {
const imageDownloaderResult = results[0];
const userID = authUser.userID;
const localImagePath = imageDownloaderResult['filename'];
const imageManagerPromise = imageManager.saveUserImageToCloudStorage(localImagePath, filename, userID);
return Promise.all([imageManagerPromise]);
}).then(results => {
const result = results[0];
return resolve(result);
}).catch(function(error) {
reject(error)
})
});
}
exports.saveUserImageToCloudStorage = function saveUserImageToCloudStorage(localImagePath, filename, userID) {
const bucketName = functions.config().googlecloud.defaultbacketname;
const bucket = gcs.bucket(bucketName);
const profileImagePath = path.normalize(path.join('userImages', userID, 'profileImages', filename));
const profileImageFile = bucket.file(profileImagePath);
return new Promise((resolve, reject) => {
bucket.upload(localImagePath, {destination: profileImagePath})
.then(() => {
const config = {
action: 'read',
expires: '03-01-2500'
};
const userRefPromise = admin.database().ref()
.child('users')
.child(userID)
.once('value');
return Promise.all([profileImageFile.getSignedUrl(config), userRefPromise])
}).then(function(results) {
const url = results[0][0];
const userSnap = results[1];
if (userSnap.val() === null || userSnap.val() === undefined) {
return resolve('user was deleted from database');
}
const userModel = userSnap.val();
const userCheckID = userModel['id'];
if (userCheckID !== userID) {
return reject("WARNING userCheckID !== userID");
}
// save to database
const userImagesRef = admin.database().ref().child('userImages')
.child(userID)
.child('userProfileImages')
.push();
const timeStamp = timestamp.now();
const imageModelID = userImagesRef.key;
const userImagesRefPromise = userImagesRef.update({
'path': url,
'id': imageModelID,
'fileName': filename,
'timeStamp': timeStamp
});
const userRef = admin.database().ref()
.child('users')
.child(userID)
.child('currentProfileImage');
const userRefPromise = userRef.update({
'path': url,
'id': imageModelID,
'fileName': filename,
'timeStamp': timeStamp
});
return Promise.all([userImagesRefPromise, userRefPromise]);
}).then(() => {
const successJSON = {};
successJSON["message"] = "Success operation";
return resolve(successJSON);
}).catch(function(error) {
return reject(error);
});
});
};
I added this code when init google cloud storage and I did not have this error anymore.
var gcs = require('#google-cloud/storage')({keyFilename: "service-account-credentials.json"});
gcs.interceptors.push({
request: function(reqOpts) {
reqOpts.forever = false
return reqOpts
}
});