NodeJS class always return the same value - node.js

In NodeJS, I tried to create 2 object of a same class. However, these 2 object are always the same despite having different values. Here is the class.
function reading(){
var readingArr = [];
};
reading.prototype.dbValue = function(counter, limit, type, mIndex) {
db.data.find({ 'type': type }).limit(limit).sort({timestamp:-1}).skip(counter, function(err, docs){
readingArr = [];
if( docs != 'undefined' ){
for(var i=0; i<limit; i++){
readingArr.push(docs[i].measurement[mIndex].value.toFixed(2)); //2 Decimal Placet;
}
}
});
if(typeof readingArr == 'undefined'){
readingArr = [];
}
return readingArr;
};
Here is the object creation.
var spo2 = new reading();
var spo2Arr = spo2.dbValue(0, 5, 'Oximeter', 1);
var temp1 = new reading();
var temp1Arr = temp1.dbValue(0, 5, 'Temperature', 0);
Both spo2Arr and temp1Arr return the same value despite having different value in the database. Example
spo2Arr: 98.00
temp1Arr: 98.00
spo2Arr: 37.91
temp1Arr 37.91
May I know how to create two unique object in NodeJS?

You're performing an asynchronous function call which is not going to complete until some time after dbValue() has finished executing.
Try this:
reading.prototype.dbValue = function(counter, limit, type, mIndex, cb) {
db.data.find({ 'type': type }).limit(limit).sort({timestamp:-1}).skip(counter, function(err, docs){
if (err)
return cb(err);
var readingArr = [];
if (docs !== undefined) {
for (var i = 0; i < limit; i++)
readingArr.push(docs[i].measurement[mIndex].value.toFixed(2));
}
cb(null, readingArr);
});
};
Then you might use it like:
var spo2 = new reading();
spo2.dbValue(0, 5, 'Oximeter', 1, function(err, spo2Arr) {
// check for `err`, if it's falsey, use `spo2Arr`
});
var temp1 = new reading();
temp1.dbValue(0, 5, 'Temperature', 0, function(err, temp1Arr) {
// check for `err`, if it's falsey, use `temp1Arr`
});
If the temperature readings depend on the oximeter readings, you'll have to mode the temperature reading code inside the oximeter reading callback or you can use a module like async to help structure your control flow.

Related

Node.js and Redis: How do I add items to a function wide object from within the callback?

So, if I have something like the following:
let searchObj = {};
for (var i = 0; i < list.length; i++){
var info = client.runFunction(variable, 0, -1, async function(err, result){
var amount = parseInt(result[0]);
searchObj[result1[i]] = amount;
});
}
How can I access searchObj from outside of the callback? If I debug and print from within the callback, it works, otherwise it doesn't.

async.waterfall randomly sorts results

I was writing a nest code, tried using async.waterfall or async.series but I am getting random results every time I refresh. it seems to be because of the queries of the first 2 functions randomly finishing.
first query was sorting to committed_date DESC. but when I add the 2 sub queries. the sort gets distorted.
Step1: Loop landingpages
Step1.1 - fetch details1 repositories
Step1.2 - fetch details2 versions
Step2: build array
db.collection('landingpages').find({is_deleted:{$ne:1}}).sort({committed_date:-1}).limit(10).toArray(function(err, db_results) {
var data_array = [];
var x=1;
if(db_results.length == 0) {
return_data.lps = data_array;
parallel_done();
}else{
async.each(db_results, function (db_results1, cb) {
async.watefall(
[
function(callback) {
//if this is removed or passed as callback(null, ""); the sort is fixed from committed - 1
var data_repo = {};
db.collection('repositories').find({repository_id: repository_id}).toArray(function(err, db_results1) {
if(db_results1.length == 0){
var data_repo = {};
callback(null, data_repo);
}else{
var data_repo = db_results1[0];
callback(null, data_repo);
}
});
},
function(callback) {
//if this is removed or passed as callback(null, ""); the sort is fixed from committed - 1
var data_version = {};
db.collection('versions').find({landingpage_id: landingpage_id}).sort({_id:-1}).limit(1).toArray(function(err, db_results1) {
if(db_results1.length == 0){
var data_version = {};
callback(null, data_version);
}else{
var data_version = db_results1[0];
callback(null, data_version);
}
});
}
],
function (err, data_repo,data_version) {
var document = {
"x": x++,
"landingpage_id": db_results1.landingpage_id,
"repository_id": db_results1.repository_id,
"version_id": data_version.version_id,
"val": db_results1,
"data_repo": data_repo,
"data_version": data_version,
};
data_array.push(document);
if(data_array.length == db_results.length) {
return_data.lps = data_array;
}
}
);
});
}
});

Node.js passing variable to parent function

So i ran into a problem. I don't know how to pass single string to the parental function from a child function and then pass that string as a response to the client side.
This whole thing gets five recent matches from API and then checks for a win or a loss depending on the player name.
Question 1: as i said before i don't know how to pass string from a child function to the parental function and then send it as a response to client side.
Question 2: the output of this should be WWWLW and how i think it should be ordered like that. But every time it outputs in different order like LWWWW WLWWW and so on... it has good arguments but different order and i am missing something here.
code:
var request = require('request');
app.get('/history',getmatches, getwins);
function getmatches(req, res, next){
var match = {};
request({
url: "https://eun1.api.riotgames.com/lol/match/v3/matchlists/by-account/"+ID+"/recent?api_key=" + key,
json: true
}, function (error, res) {
if (!error && res.statusCode === 200) {
for(var i=0; i < 5; i++){ //getting ID's of five last matches
match[i] = res.body.matches[i].gameId;
}
req.somevariable = match;
next();
}
}
);
};
function getwins(req, res, callback){
var match = req.somevariable;
var streak = '';
var pending = 0;
for( i = 0; i < 5; i++){ // passing ID's to another api link to get single match data
request({
url: "https://eun1.api.riotgames.com/lol/match/v3/matches/"+match[i]+"?api_key=" + key,
json: true
}, function(req,res, body){
for(var j = 0; j < 10; j++){ //looping through 10 players in a match to find specific one
if(body.participantIdentities[j].player.summonerName == nickname){
if( body.participants[j].stats.win == true){
streak += 'W';
}else{
streak += 'L';
}
}
}
if(pending == 4){
console.log(streak); // need this to pass to parent function
return callback(null, streak); // is this something i need ?
}
pending++
});
}
// res streak string to client.js
};
There is solution to process all results when it done. The result variable have all results use any appropriate key instead of url;
function getwins(req, res, callback){
var match = req.somevariable;
var streak = '';
var pending = 0;
var results = {};
var total = 5;
for( i = 0; i < total; i++){ // passing ID's to another api link to get single match data
var url = "https://eun1.api.riotgames.com/lol/match/v3/matches/"+match[i]+"?api_key=" + key;
request({
url: url,
json: true
}, function(req,res, body){
for(var j = 0; j < 10; j++){ //looping through 10 players in a match to find specific one
if(body.participantIdentities[j].player.summonerName == nickname){
if( body.participants[j].stats.win == true){
streak += 'W';
}else{
streak += 'L';
}
}
}
console.log(streak); // need this to pass to parent function
results[url] = streak;
if( total == Object.keys(results).length ) {
// here all requests are done - do with all result what you need
console.log( results );
}
return callback(null, streak); // is this something i need ?
}
});
}
// res streak string to client.js
};

node JS: Code for my situation not working as expected

I want to query sql to get the name and store it in an array.
Next time if it is present in array, then It should not query SQL.
But here it is querying each time to sql.
How can I avoid this ??
var ids = [62519, 1, 2, 3, 2656, 82969, 36201, 82969, 78788, 82969, 97647, 82969, 53745, 54510, 60850, 82969, 29872, 51407, 29347, 105113];
var cnames = {};
for (var i = 0; i < ids.length; i++) {
var nlid = ids[i];
console.log(nlid);
// It should be != not !== according to my knowledge
if (cnames[nlid] != undefined) {
console.log('already present');
}
else {
console.log('not present');
getCname(nlid)
function getCname(nlid) {
sqlcon.query('select name from ss_newsletters where newsletterid=' + nlid + ' limit 1', function (err, rows, fields) {
if (err) {
console.error(err);
}
else {
var x = rows[0].name;
cnames[nlid] = x;
}
});
}
}
}
Nodejs is async, you need to use callbacks, but you run asynchronous code in synchronous loop.
Look at async module: https://github.com/caolan/async . There is async loops, like forEach or eachSerie.
Node JS query function is asynchronous, so first remove the duplicates from the array and then start query.
The following code will remove duplicates from the array ids.
ids.filter(function(elem, index) {
return ids.indexOf(elem) == index;
});
Edited:
Otherwise query synchronously. But it is not a good practice in node js.
var ids = [62519, 1, 2, 3, 2656, 82969, 36201, 82969, 78788, 82969, 97647, 82969, 53745, 54510, 60850, 82969, 29872, 51407, 29347, 105113];
var cnames = {};
getAllNames(ids.length);
function getAllNames(lastIndex) {
var nlid = ids[lastIndex];
console.log(nlid);
if(lastIndex <= 0) // fully iterated
return;
if (cnames[nlid] !== undefined) {
console.log('already present');
} else {
console.log('not present');
sqlcon.query('select name from ss_newsletters where newsletterid=' + nlid + ' limit 1', function(err, rows, fields) {
if (err) {
console.error(err);
} else {
var x = rows[0].name;
cnames[nlid] = x;
}
getAllNames(lastIndex - 1); // Call Synchronously
});
}
}

How to perform mass inserts into mongodb using NodeJS

I Have to Insert about 10,00000 documents in mongodb using nodejs.
I'm generating these documents using a for loop storing them into an array before finally inserting them into mongodb.
var codeArray = new Array();
for (var i = 0; i<1000000; i++){
var token = strNpm.generate();
var now = moment().format('YYYYMMDD hhmmss');
var doc1 = {id:token,
Discount_strId:"pending",
Promotion_strCode:token,
Promotion_strStatus:"I",
Promotion_dtmGeneratedDate:now,
User_strLogin:"test",
Promotion_strMode:"S",
Promotion_dtmValidFrom:"pending",
Promotion_dtmValidTill:"pending",
LastModified_dtmStamp:now
};
codeArray.push(doc1);
db.collection('ClPromoCodeMaster').insert(codeArray, function (err, result) {
if (err){
console.log(err);
}else{
console.log('Inserted Records - ', result.ops.length);
}
});
The problem I'm facing is mongo has an inserting limit of 16mb, so I can't insert the entire array at once.
Please suggest most optimum solutions.
The main problem is in the request size and not the document size, but it amounts to the same limitation. Bulk operations and the async library with async.whilst will handle this:
var bulk = db.collection('ClPromoCodeMaster').initializeOrderedBulkOp(),
i = 0;
async.whilst(
function() { return i < 1000000; },
function(callback) {
var token = strNpm.generate();
var now = moment().format('YYYYMMDD hhmmss');
var doc = {
id:token,
Discount_strId:"pending",
Promotion_strCode:token,
Promotion_strStatus:"I",
Promotion_dtmGeneratedDate:now,
User_strLogin:"test",
Promotion_strMode:"S",
Promotion_dtmValidFrom:"pending",
Promotion_dtmValidTill:"pending",
LastModified_dtmStamp:now
};
bulk.insert(doc);
i++;
// Drain every 1000
if ( i % 1000 == 0 ) {
bulk.execute(function(err,response){
bulk = db.collection('ClPromoCodeMaster').initializeOrderedBulkOp();
callback(err);
});
} else {
callback();
}
},
function(err) {
if (err) throw err;
console.log("done");
}
);
I should note that regardless there is an internal limit on bulk operations to 1000 operations per batch. You can submit in larger sizes, but the driver is just going to break these up and still submit in batches of 1000.
The 1000 is a good number to stay at though, since it is already in line with how the request will be handled, as well as being a reasonable number of things to hold in memory before draining the request queue and sending to the server.
For inserting millions of record at a time, Create node.js child process fork with MongoDb bulk api.
Child Process Creation:(index.js)
const {fork} = require("child_process");
let counter = 1;
function createProcess(data){
const worker = fork("./dbOperation");
worker.send(data);
worker.on("message", (msg) => {
console.log("Worker Message :",counter, msg);
counter++;
})
}
function bulkSaveUser(records) {
const singleBatchCount = 10000; // Save 10,000 records per hit
const noOfProcess = Math.ceil(records/singleBatchCount);
let data = {};
console.log("No of Process :", noOfProcess);
for(let index = 1; index <= noOfProcess; index++) {
data.startCount = (index == 1) ? index : (((index - 1) * singleBatchCount) + 1);
data.endCount = index * singleBatchCount;
createProcess(data);
}
}
bulkSaveUser(1500000);
DB Operation (dbOperation.js)
const MongoClient = require('mongodb').MongoClient;
// Collection Name
const collectionName = "";
// DB Connection String
const connString = "";
process.on("message", (msg) => {
console.log("Initialize Child Process", msg)
const {startCount, endCount} = msg;
inputStudents(startCount, endCount);
});
function initConnection() {
return new Promise(function(r, e) {
MongoClient.connect(connString, function(err, db) {
if (err) e(err)
r(db);
});
});
}
function inputStudents(startCount, endCount) {
let bulkData = [];
for(let index = startCount; index <= endCount; index++ ){
var types = ['exam', 'quiz', 'homework', 'homework'];
let scores = []
// and each class has 4 grades
for (j = 0; j < 4; j++) {
scores.push({'type':types[j],'score':Math.random()*100});
}
// there are 500 different classes that they can take
class_id = Math.floor(Math.random()*501); // get a class id between 0 and 500
record = {'student_id':index, 'scores':scores, 'class_id':class_id};
bulkData.push({ insertOne : { "document" : record } })
}
initConnection()
.then((db) => {
const studentDb = db.db("student");
const collection = studentDb.collection(colName)
console.log("Bulk Data :", bulkData.length);
collection.bulkWrite(bulkData, function(err, res) {
if (err) throw err;
//console.log("Connected Successfully",res);
process.send("Saved Successfully");
db.close();
});
})
.catch((err) => { console.log("Err :", err) });
}
Sample project to insert millions of record in mongodb using child process fork

Resources