I'm trying to delete all nodes with a date greater than '2017-04-05' with a bulk operation with a firebase function. Can you spot what I'm doing wrong here?
The 2 nodes that should get deleted are the ones in red:
Here's the code that is failing - can you see what's wrong? Also, I'm concerned about the performance of this (I'll only run it once in a while though). If there are millions of games in the list, should that concern me if I only run this once a day?
exports.remove = functions.https.onRequest((req, res) => {
const deleteBeforeDate = req.query.deleteBeforeDate;
var ref = admin.database().ref('games');
var keysToDelete = {};
for (var game in this.games) {
var date = items[i]['.date'];
if(date.value > '2017-04-05'){
keysToDelete[game.key] = null;
}
}
this.ref.update(keysToDelete);
});
Thank you very much,
Mike
To determine the keys to delete, you'll need to attach a listener. Since that is needed, you might as well create a query that selects the correct children and deletes only those:
var ref = admin.database().ref('games');
var deleteAfterDate = ref.orderByChild('date').startAt('2017-04-05');
deleteAfterDate.once('value').then(function(snapshot) {
var updates = {};
snapshot.forEach(function(child) {
updates[child.key] = null;
});
ref.update(updates);
});
Related
My goals are to obtain the users nickname by using their ID.
Their ID's are stored as variables which are being collected from a reaction collector.
I have tried a few methods and failed, most of which either return nothing or errors.
The below code returns nothing, the getnames() function is empty. This method was recommended to me buy 2 people from a nodejs discord server which aims to help solve issues, similar to here.
// returns player ID's
function getPlayers() {
let players = [];
players.push(queue.tank[0]); // First (1) in TANK queue
players.push(queue.heal[0]); // First (1) in HEAL queue
players.push(queue.dps[0]); // First (2) in DPS queue
players.push(queue.dps[1]);
return players;
}
// get nick names from ID's
function getnames() {
let players = getPlayers();
let playerNicks = [];
let newPlayer = "";
players.forEach(async player => {
newPlayer = await message.guild.members.fetch(player).then(function (user) {return user.displayName });
playerNicks.push(newPlayer)
return playerNicks;
})}
//formats nicknames into string
function formatnicknames() {
let formatted_string2 = '';
let playerNicks = getnames();
if (playerNicks)
formatted_string2 = `${playerNicks[0]} \n${playerNicks[1]} \n${playerNicks[2]} \n${playerNicks[3]}`;
return formatted_string2;
}
I have also tried a few variations of the below code, still unable to obtain nickname.
message.guild.members.cache.get(user.id)
Edit #1
now tried the following code with no success. (boost1ID contains the ID of 1 user)
var mem1 = message.guild.members.fetch(boost1ID).nickname
Edit #2
tried a new method of obtaining displayname from ID.
var guild = client.guilds.cache.get('guildid');
var mem1 = guild.member(boost1ID);
var mem2 = guild.member(boost2ID);
var mem3 = guild.member(boost3ID);
var mem4 = guild.member(boost4ID);
var nickname1 = mem1 ? mem1.displayName : null;
var nickname2 = mem2 ? mem2.displayName : null;
var nickname3 = mem3 ? mem3.displayName : null;
var nickname4 = mem4 ? mem4.displayName : null;
var Allnicknames = `${nickname1} ${nickname2} ${nickname3} ${nickname4}`
message.channel.send(`testing nicknames: ${Allnicknames}`)
I managed to only return my own name since i dont have a nickname on this server, but the other three users who does have a nickname returned null.
This is the simplest solution:
// your users ids
const IDs = [ '84847448498748974', '48477847847844' ];
const promises = IDs.map((userID) => {
return new Promise(async (resolve) => {
const member = message.guild.member(userID) || await message.guild.members.fetch(userID);
resolve(member.displayName || member.user.username);
});
});
const nicknames = await Promise.all(promises);
// you now have access to ALL the nicknames, even if the members were not cached!
The members you are trying to get the nicknames of are not necessarily cached, and this fixes that.
I made an example that could help you.
let testUsers = [];
module.exports = class extends Command {
constructor(...args) {
super(...args, {
description: 'Testing.',
category: "Information",
});
}
async run(message) {
function getNicknames(userArr, guild) {
let playerNicks = [];
for(var i = 0; i < userArr.length; i++) {
playerNicks.push(guild.member(userArr[i]).displayName);
}
return playerNicks;
}
let testUser = message.guild.members.cache.get(message.author.id);
testUsers.push(testUser);
let guild = message.guild;
console.log(getNicknames(testUsers, guild));
}
}
I created a function getNicknames that takes in two parameters. The first one is an Array of users (as you get one from your function getPlayers()) and the second one is the guild you are playing in. You need to provide the guild, because every user should be a GuildMember, because you want to use .displayName. I created a user Array outside of my command code, because otherwise there will only be one user in the Array everytime you use the command. Inside of the getNicknames() function I have created a new Array playerNicks that I basically fill with the user nicknames we get from our provided user Array.
Now you have to implement that into your code.
The call of the function getNicknames(), for your code should look like this:
getNicknames(getPlayers(), message.guild);
My node js code usually runs in less than 0.5seconds locally but when I deploy on firebase function it sometimes take up to 3 seconds to 5 seconds to return the response I want. is there a way to fix this perfomance issue? the problem is I do all the computation in a single request function because I need to parse text from that request and use it in my code but I couldn't figure out any other way to fix it. any suggestions would be appreciated and thanks in advance
exports.Response = functions.https.onRequest(async (req, res) => {
const original = req.query.text;
var returned = original.split(',');
var startLat= returned[0];
var startLong = returned[1];
var endLat = returned[2];
var endLong = returned[3];
var maxDistance = returned[4];
var output = binding.Main(startLat, startLong, endLat, endLong, Number(maxDistance));
var Response = output.split(' ');
var Array = [];
for(i = 0; i<Response.length ; i++)
{
Array.push(Response[i]);
}
var data = {};
data.table = [];
var obj =
{
Start: Array[0]
}
data.table.push(obj);
for (i=1; i < Array.length-1 ;i+=7)
{
var obj =
{
transportType: Array[i],
Price: Array[i+1],
Type: Array[i+2],
startLatitude: Array[i+3],
startLongitude: Array[i+4],
endLatitude: Array[i+5],
endLongitude: Array[i+6]
}
data.table.push(obj);
}
var obj = {
TotalPrice: Array[Array.length-1]
}
data.table.push(obj);
res.status(200).json({
data
})
});
I know code is unprofessional but am still new to node js and addons so I tried my best to get the required output. any suggestions/notes would be taken and appreciated. Also if any further code/explanation is required please let me know.
Helping Renaud Tarnec's reply to be posted as an answer:
The delay you encounter after deploying your Cloud function is most probably not caused by your code but by the cold start ("Functions are stateless, and the execution environment is often initialized from scratch, which is called a cold start"). See more details and possible improvement tips at: https://cloud.google.com/functions/docs/bestpractices/tips, https://youtube.com/watch?v=v3eG9xpzNXM, https://youtube.com/watch?v=IOXrwFqR6kY, https://medium.com/#duhroach/improving-cloud-function-cold-start-time-2eb6f5700f6
I have Redis with a lot of keys in some format and I want to get keys that match some pattern and do some operations on them. I don't use KEYS method since it's not recommend in production. Using SCAN I'm wondering what is the best way to write it in code. I have to do something like a while loop but using promises, my current solution looks like this (code is simplified a little):
'use strict'
const Promise = require('bluebird');
const config = require('./config');
const client = require('./clinet');
let iterator = 0;
Promise.coroutine(function* () {
do {
iterator = yield clinet.scanAsync(iterator, 'myQuery', 'COUNT', config.scanChunkSize)
.then(data => {
let nextIterator = data[0];
let values = data[1];
//do some magic with values
return nextIterator;
})
} while (iterator !== '0');
})();
Is there a better way to do it that I'm missing?
I realize this is a really old question, but I found all of the other answers very unsatisfying. Here is yet another attempt to scan in a relatively clean way using async await (WITHOUT the use of yet another external dependency). You can easily modify this to continuously delete each set of found keys (you would want to tackle them in batches like this in case there are LOTS). Pushing them into an array just demonstrates one very basic thing you could do with them during this stage.
const redis = require('redis');
const { promisify } = require('util');
const client = redis.createClient({...opts});
const scan = promisify(client.scan).bind(client);
const scanAll = async (pattern) => {
const found = [];
let cursor = '0';
do {
const reply = await scan(cursor, 'MATCH', pattern);
cursor = reply[0];
found.push(...reply[1]);
} while (cursor !== '0');
return found;
}
You can use recursion to keep calling scan until done.
function scanAsync(cursor, pattern, returnSet){
return redisClient.scanAsync(cursor, "MATCH", pattern, "COUNT", "100").then(
function (reply) {
cursor = reply[0];
var keys = reply[1];
keys.forEach(function(key,i){
returnSet.add(key);
});
if( cursor === '0' ){
return Array.from(returnSet);
}else{
return scanAsync(cursor, pattern, returnSet)
}
});
}
Pass in a Set() to make sure keys aren't duplicated
myResults = new Set();
scanAsync('0', "NOC-*[^listen]*", myResults).map(
function( myResults ){ console.log( myResults); }
);
You can try this snippet to scan (1000) keys per iteration and 'delete`.
var cursor = '0';
function scan(pattern,callback){
redisClient.scan(cursor, 'MATCH',pattern,'COUNT', '1000', function(err, reply){
if(err){
throw err;
}
cursor = reply[0];
if(cursor === '0'){
return callback();
}else{
var keys = reply[1];
keys.forEach(function(key,i){
redisClient.del(key, function(deleteErr, deleteSuccess){
console.log(key);
});
});
return scan(pattern,callback);
}
});
}
scan(strkey,function(){
console.log('Scan Complete');
});
Nice option for node-redis module is to use scan iterators. Example:
const redis = require("redis");
const client = redis.createClient();
async function getKeys(pattern="*", count=10) {
const results = [];
const iteratorParams = {
MATCH: pattern,
COUNT: count
}
for await (const key of client.scanIterator(iteratorParams)) {
results.push(key);
}
return results;
}
(Of course you can also process your keys on the fly in for await loop without storing them in additional array if that's enough for you).
If you do not want to override scan parameters (MATCH/COUNT) you can just skip them and execute client.scanIterator() without parameter (defaults will be used then, MATCH="*", COUNT=10).
I think the node bindings for Redis are pushing too much responsibility to the caller here. So I created my own library for scanning as well, using generators in node:
const redis = require('redis')
const client = redis.createClient(…)
const generators = require('redis-async-gen')
const { keysMatching } = generators.using(client)
…
for await (const key of keysMatching('test*')) {
console.info(key)
}
It's the last bit that obviously is the thing that you should care about. Instead of having to carefully control an iterator yourself, all you need to do is use a for comprehension.
I wrote more about it here.
Go through this, it may help.
https://github.com/fritzy/node-redisscan
do not use the library as it, go through the code available at
https://github.com/fritzy/node-redisscan/blob/master/index.js
I have node js files restservice.js and mysql.js
In mysql.js i have two functions as elementlevelpricing and pricingdetail
In restservice.js i have api which has the code as :
var workload = req.body;
var workloadinfo = {
workloadId: workload.workloadId,
ownerId: workload.ownerId,
uniqueName: workload.uniqueName,
name: workload.name
}
if(workload.elements && workload.elements.length > 0)
{
var elementlevelpricingSummary = {};
var elementArray = [];
var elementinfo = {};
var metadataModified = {};
var pricingDetail = {};
async.forEachSeries(workload.elements, createResponse, function (err) {
res.send(workloadinfo);
});
function createResponse(elements,callback) {
var resourceIdentifierArray = [];
elementinfo = elements;
resourceIdentifierArray.push(elements.uri);
var resourceIdentifiers = resourceIdentifierArray.join(',');
// Get element level pricing summary
mysql.elementlevelpricing(resourceIdentifiers, function(result){
// do some stuff here
return callback();
});
};
};
I need to call the function pricingdetail in mysql.js and append the result to global variable workloadinfo (which already should have result set of elementlevelpricing and Can thats what is sent within foreachSeries ). Can anyone suggest me the profession way to accomplish this?
Use asynchronous functions. The whole point of Node.js is to avoid blocking. Blocking in Node.js is worse than blocking in threaded environments, because there aren't any other threads (though there may be other clustered processes). You're blocking the only event loop available. That means that your whole server has to wait, doing absolutely no work until your I/O is done.
I met a wired problem, that when i use mongodb to store data, some data is missing, which I think it is because of its asynchronous feature
So for this list the timetable, I would use re
/* Here is the a application, in which by using a train_uid and today,
*/
var today = new Date();
var day = today.getDay();
scheduleModel.findByTrainAndTime(train_uid,today,function(err, doc){
var a = new Object();
if(err){}
else{
if(doc != null)
{
//mongodb database can give me some data about the train_id ,uid
a.train_uid = doc.train_uid;
a.train_id = train_id;
and most importantly a train schedule time table, the train schedule time table is a list ( doc.time_schedule )of json objects like arrival, departure and tiploc. However, I need to change the tiploc to sanox number, which referenceModel can help find sanox by providing tiploc number.
//doc.time_schedule
// here is to add a array
so I use async, for each item in the list, I use referenceModel to query sanox and construct a array - a.timeline to store each b, at last when async each operation is finished, trainModel is to store a object with an array of sanox object. However when it comes to the mongodb database, only the array of sanox objects are empty, I guess it is because of asynchronous operation, but since I used async , why it doesn't work
a.train_uid = doc.train_uid; //works
a.train_id = train_id; works
a.timeline = [] // doesn't work
a.timeline = new Array();
var b ;
async.forEachSeries(doc.time_schedule,
function(item,callback){
referenceModel.findStanoxByTicloc(item.tiploc_code,function(err,sanox){
try{
b = new Object();
b.sanox = sanox;
a.time.push(b);
}catch(err2){
}
});
callback();
},
function(err){
trainModel.createNewTrain(a,function(){});
}
}
});
You're calling callback after you fire off the asynchronous find, but before it actually comes back. You need to wait until after you've gotten the data to do that. The following should work better:
async.forEachSeries(doc.time_schedule,
function(item,callback){
referenceModel.findStanoxByTicloc(item.tiploc_code,function(err,sanox){
try{
b = new Object();
b.sanox = sanox;
a.time.push(b);
}catch(err2){
}
callback();
});
},
function(err){
trainModel.createNewTrain(a,function(){});
}