While loop to check uniqueness of custom id - node.js

I have a MongoDB databse set up with some objects that have a unique code (not the primary key).
I should also note that I'm using NodeJS and this code is in my server.js to connect to the MongoDB database.
To generate a new code, I generate one randomly and I want to check if it already exists. If not then we use it no problem, but if it already exists I want to generate another code and check it again. This is the code I use to check if the id already exists:
function createPartyId(callback) {
var min = 10000, max = 99999;
var partyId = -1, count = -1;
async.whilst(
function () { return count != 0; },
function (callback) {
partyId = min + Math.floor(Math.random() * (max - min + 1));
partyId = 88888;
getPartyIdCount(partyId, function(num) {
count = num;
});
},
function (err) {
}
);
}
function getPartyIdCount(partyId, callback) {
count = -1;
db.db_name.find({id: partyId}, function(err, records) {
if(err) {
console.log("There was an error executing the database query.");
callback(count);
}
count = records.length;
callback(count);
});
}

First of all, is there any particular reason you're not using a simple number increment sequence? This type of code is prone to inefficiency, the more numbers you generate the more chance you have of collisions which means you're going to be spending more time on generating an ID for your data than you are on the rest of your processing. Not a good idea.
But I can still tell you what's going wrong.
OK, so getPartyIdCount() will only, ever, always, without fail, return undefined (or, basically, nothing).
Your mongo call processes the return value in a callback, and that callback doesn't assign its value to anything, so return records.length just gets lost into nothingness.
You've mixed up createPartyId(), which it appears you want to run synchronously, with your mongo call, which must run asynchronously.
return always goes with the nearest containing function, so in this case it goes with function(err, records), not function getPartyIdCount(partyId).

(Expanding my comment from above)
The issue is that createPartyId is an asynchronous function, but you're trying to return the value synchronously. That won't work. Once you touch an async operation, the rest of the call stack has to be async as well.
You don't include the code that's calling this, but I assume you want it to be something like:
var partyId = createPartyId();
// do stuff...
That's not going to work. Try this:
function createPartyId(callback) {
var min = 10000, max = 99999;
var partyId = -1, count = -1;
async.whilst(
function () { return (count == 0); },
function (callback) {
partyId = min + Math.floor(Math.random() * (max - min + 1));
partyId = 88888;
getPartyIdCount(partyId, function(err, num) {
if (!err) {
count = num;
}
callback(err);
});
},
function (err) {
// this is called when the loop ends, error or not
// Invoke outer callback to return the result
callback(err, count);
}
);
}
function getPartyIdCount(partyId, callback) {
count = -1;
db.db_name.find({id: partyId}, function(err, records) {
if(err) {
console.log("There was an error executing the database query.");
callback(err);
}
count = records.length;
callback(null, count);
});
}
(I've also adopted the default node.js convention of always returning errors as the first argument to callback functions.)
So, to use this you would do:
getPartyId(function (err, num) {
if (err) { return aughItFellOver(err); }
// do stuff
});

Related

Issue with asynchronous mongodb query

I am trying to loop through an array and find the amount of tickets assigned to each person.
Unfortunately, I noticed that my taskcount is getting the same values but in different order, because of its asynchronous nature.
Some queries might take long and so the ones that gets finished first gets inserted and hence my array has the same values but in different order. Now, I want to avoid that and make it so, that once a query gets completed, only then the next value from the array is being picked up and pushed to search from the db. How can i modify my existing code.
exports.find_task_count = function(callback) {
var names = ['Evan', 'Surajit', 'Isis', 'Millie', 'Sharon', 'Phoebe', 'Angel', 'Serah']
var taskcount = []
var resultsCount = 0;
for (var i = 0; i < names.length; i++) {
_tasks.find({'assignee': names[i]}, function (err, tickets) {
resultsCount++
if (err) {
console.log(err)
return callback(err)
} else {
taskcount.push(tickets.length)
if (resultsCount === names.length) {
return callback(taskcount);
taskcount=[]
}
}
})
}
}
You can use the async module designed to handle such scenarios.
I have updated the code as follows
var async = require('async');
exports.find_task_count = function (callback) {
var names = ['Evan', 'Surajit', 'Isis', 'Millie', 'Sharon', 'Phoebe', 'Angel', 'Serah'];
async.map(names, function (name, iterateeCallback) {
_tasks.find({ 'assignee': name }, function (err, tickets) {
if (err) {
return iterateeCallback(err);
}
return iterateeCallback(null, tickets.length);
});
}, function (error, results) {
if (error) {
return callback(error);
}
return callback(null, results);
});
}
As per the documentation of async
Note, that since this function applies the iteratee to each item in parallel, there is no guarantee that the iteratee functions will complete in order. However, the results array will be in the same order as the original coll.
if you still want to process the array in series use mapSeries instead of map in the above code

Nodejs async.eachSeries

I asked few questions about this few months ago and recently I got back to that script. I figured out some thing and a friend helped with the script but now I have another problem.
This is my script now:
var j = schedule.scheduleJob('*/5 * * * * *', function(){
var steamids = [];
con.query("SELECT * FROM counterStrikeGlobalOffensive", function (err, rows) {
for (var i = 0; i < rows.length; i++) {
steamids.push(rows[i].steam64ID);
}
//const steamIDs = ["2342342341234123", "23423412341234", "2342314123423"]; // Steam IDs to check
eachSeries(steamids, (steamID, callback) => {
CSGOCli.playerProfileRequest(CSGOCli.ToAccountID(steamID));
CSGOCli.on("playerProfile", function(profile) {
console.log(JSON.stringify(profile, null, 2));
callback();
});
}, (err) => {
// error thrown = set, else we're done
});
});
});
When I use constant steamIDs, it works perfectly, but when I use steamids, it gives me an error.(I will explain)...
When I do, console.log(steamids); it returns me this
[ '76561198152643711', '76561198213530057' ]
and steamIDs is
const steamIDs = ["2342342341234123", "23423412341234", "2342314123423"];
so its almost the same as constant SteamIDs but constant has " " around the numbers which shouldn't be why it isn't working but maybe I'm wrong?
Also, I have the callback() but how can I make it stop giving me an error
Error: Callback was already called.
Ask for any other info please :)
You get the Error: Callback was already called. because the CSGOCli.on() is executed multiple times. So it calls the callback once, and later the event fires again. So the callback gets called again but it should only be called once.
For a simple reproduction see this example:
async.eachSeries([1, 2, 3], (data, callback) => {
console.log("Data:", data);
for(let i = 0; i < 2; i++) {
callback();
}
},
(err) => {
console.log("Callback: ", err);
});
But if you add return before the callback like this: return callback();, then the problem disappears because the function will return and no callbacks will be called again.
So change your code to this and see if it works:
CSGOCli.on("playerProfile", function(profile) {
console.log(JSON.stringify(profile, null, 2));
return callback();
});

node.js - sqlite3 read all records in table and return

I'm trying to read all records in a sqlite3 table and return them via callback. But it seems that despite using serialize these calls are still ASYNC. Here is my code:
var readRecordsFromMediaTable = function(callback){
var db = new sqlite3.Database(file, sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE);
var allRecords = [];
db.serialize(function() {
db.each("SELECT * FROM MediaTable", function(err, row) {
myLib.generateLog(levelDebug, util.inspect(row));
allRecords.push(row);
}
callback(allRecords);
db.close();
});
}
When the callback gets fired the array prints '[]'.
Is there another call that I can make (instead of db.each) that will give me all rows in one shot. I have no need for iterating through each row here.
If there isn't, how do I read all records and only then call the callback with results?
I was able to find answer to this question. Here it is for anyone who is looking:
var sqlite3 = require("sqlite3").verbose();
var readRecordsFromMediaTable = function(callback){
var db = new sqlite3.Database(file, sqlite3.OPEN_READONLY);
db.serialize(function() {
db.all("SELECT * FROM MediaTable", function(err, allRows) {
if(err != null){
console.log(err);
callback(err);
}
console.log(util.inspect(allRows));
callback(allRows);
db.close();
});
});
}
A promise based method
var readRecordsFromMediaTable = function(){
return new Promise(function (resolve, reject) {
var responseObj;
db.all("SELECT * FROM MediaTable", null, function cb(err, rows) {
if (err) {
responseObj = {
'error': err
};
reject(responseObj);
} else {
responseObj = {
statement: this,
rows: rows
};
resolve(responseObj);
}
db.close();
});
});
}
The accepted answer using db.all with a callback is correct since db.each wasn't actually needed. However, if db.each was needed, the solution is provided in the node-sqlite3 API documentation, https://github.com/mapbox/node-sqlite3/wiki/API#databaseeachsql-param--callback-complete:
Database#each(sql, [param, ...], [callback], [complete])
...
After all row callbacks were called, the completion callback will be called if present. The first argument is an error object, and the second argument is the number of retrieved rows
So, where you end the first callback, instead of just } put }, function() {...}. Something like this:
var readRecordsFromMediaTable = function(callback){
var db = new sqlite3.Database(file, sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE);
var allRecords = [];
db.serialize(function() {
db.each("SELECT * FROM MediaTable", function(err, row) {
myLib.generateLog(levelDebug, util.inspect(row));
allRecords.push(row);
}, function(err, count) {
callback(allRecords);
db.close();
}
});
}
I know I'm kinda late, but since you're here, please consider this:
Note that it first retrieves all result rows and stores them in memory. For queries that have potentially large result sets, use the Database#each function to retrieve all rows or Database#prepare followed by multiple Statement#get calls to retrieve a previously unknown amount of rows.
As described in the node-sqlite3 docs, you should use .each() if you're after a very large or unknown number or rows, since .all() will store all result set in memory before dumping it.
That being said, take a look at Colin Keenan's answer.
I tackled this differently, since these calls are asynchronous you need to wait until they complete to return their data. I did it with a setInterval(), kind of like throwing pizza dough up into the air and waiting for it to come back down.
var reply = '';
db.all(query, [], function(err, rows){
if(err != null) {
reply = err;
} else {
reply = rows;
}
});
var callbacker = setInterval(function(){
// check that our reply has been modified yet
if( reply !== '' ){
// clear the interval
clearInterval(callbacker);
// do work
}
}, 10); // every ten milliseconds
Old question, but I came across the issue, with a different approach as to solve the problem. The Promise option works, though being a little too verbose to my taste, in the case of a db.all(...) call.
I am using instead the event concept of Node:
var eventHandler = require('events')
In your Sqlite function:
function queryWhatever(eventHandler) {
db.serialize(() => {
db.all('SELECT * FROM myTable', (err, row) => {
// At this point, the query is completed
// You can emit a signal
eventHandler.emit('done', 'The query is completed')
})
})
}
Then, give your callback function to the eventHandler, that "reacts" to the 'done' event:
eventHandler.on('done', () => {
// Do something
})

async.whilst with internal callback

I am trying to loop while the count of some array is less than 50, or if the loop has gone through more than 14 iterations. This seems like a perfect use for async.whilst.
However, my complication is that my work function has an asynchronous query inside of it (a database query).
Here is a simple version of my code:
var items = [];
var key = 20150713;
var iterations = 0;
async.whilst(
function(){
return items.length < 50 || iterations < 14;
},
function(callback){
iterations+=1;
dbQuery("my query", function(err, res){
key -=1;
//add res to items.
callback();
});
},
function(err){
});
Of course this code doesn't work because dbQuery() returns immediately, so async.whilst just blows through 14 iterations and returns an empty array before the first dbQuery even returns.
How do I handle this so that async.whilst waits for the return of the inner function before running again?
Or is async.whilst not suited to my task?
You're using it correctly. Only, from the code you're posting it doesn't look like you're doing anything with the result:
async.whilst(
function(){
return items.length < 50 || iterations < 14;
},
function(callback){
iterations+=1;
dbQuery("my query", function(err, res){
key -=1;
//add res to items.
callback();
});
},
function(err){
// this function will be called when whilst completes
// or when there's an error
if (!err) {
// use items:
console.log(items);
}
else {
console.log('OOps.. something went wrong somewhere');
}
}
);

NodeJS + redis gives weird results

Maybe the results ain't weird, but I started using Node 1-2 months ago so for me they are...
I have a loop which sorts out every other value of the array returned by hgetall (Redis command) and in that loop I call a function to get all values from another table with keys stored in the sorted array. This was more difficult to explain than I thought. Here's my code:
Pastebin: http://pastebin.com/tAVhSUV1 (or see below)
function getInfo (cn, callback) {
var anArray = [];
redis_client.hgetall('chat_info:' + cn, function (err, vals) {
if(err) { throw err; }
for(i in vals) {
anArray.push(vals[i]);
}
return callback(anArray);
});
}
redis_client.hgetall('chat_rooms:' + POST.chat_name, function (err, val) {
if(err) { throw err; }
var vars = [],
rArr = [];
for (i in val) {
vars.push(i);
}
for(var i = 0; i < vars.length; i += 1) {
if(i%2 === 0) {
getInfo(vars[i], function (hej) {
rArr.push(hej);
});
}
}
});
The callback from the call to getInfo() is executed after the entire loop. Am I missing out on something here? Because it can't do that, right? (when I use rArr (right after the loop) it's empty, nbBut if I log it in the callback it gets logged after everything else written after the loop)
Yes, that's probably normal.
Understand that callbacks are executed after the hgetall call. Which mean that when the redis functions receive somehting it will call the callbacks. In other words, all the callbacks can be executed later.
As javascript only works in one thread, the calls to hgetall should be blocking to be executed as they come in the for loop. But as you're more certainly using async IO. The for loop ends and then it will start calling each callbacks that were queued inside the javascript event loop.
Edit
Unfortunately, to achieve what you're trying to do, you should wrap your code inside many other callbacks. You can use this project to make it easier: https://github.com/caolan/async
You should be able to install it using npm install async.
You'd have to do something like that:
function getInfo (cn) {
return function(callback) {
var anArray = [];
redis_client.hgetall('chat_info:' + cn, function (err, vals) {
if(err) { throw err; }
for(i in vals) {
anArray.push(vals[i]);
}
return callback(anArray);
});
};
}
redis_client.hgetall('chat_rooms:' + POST.chat_name, function (err, val) {
if(err) { throw err; }
var vars = [],
rArr = [],
callbacks = [];
for (i in val) {
vars.push(i);
}
for(var i = 0; i < vars.length; i += 1) {
if(i%2 === 0) {
callbacks.push(getInfo(vars[i]));
}
}
async.series(callbacks, function (err, results) {
// Final code here
});
});

Resources