I am creating an insert script that does some business logic.
Basically, I want to check to see if a value in the inserted item exists in a table. But, it seems like if I find a problem Request.Send() doesn't stop execution and get an error.
I think there is an async issue here. I'm not 100% sure how to solve.
Is there a way to stop execution of the script?
if (item.memberType === 'Family' && item.primaryFamilyMember) {
table
.where({
memberNumber: item.primaryFamilyMember,
memberType: 'Family',
primaryFamilyMember: null })
.read({
success: function(results) {
if (results.length == 0) {
request.respond(statusCodes.BAD_REQUEST,
'Invalid Primary Family Member specified.');
console.error('Invalid Primary Family Member specified:' + item.primaryFamilyMember);
validInsert = false;
} else {
item.memberType = results[0].memberType;
item.memberLevel = results[0].memberLevel;
item.dateOfExpiry = results[0].dateOfExpiry;
}
}
});
}
if (validInsert) {
var today = new Date();
var prefix = today.getFullYear().toString().substr(2,2) + ('0' + (today.getMonth() + 1)).slice(-2);
table.includeTotalCount().where(function(prefix){
return this.memberNumber.substring(0, 4) === prefix;
}, prefix)
.take(0).read({
success: function (results) {
if (isNaN(results.totalCount)) {
results.totalCount = 0;
}
item.memberNumber = prefix + ('00' + (results.totalCount + 1)).slice(-3);
request.execute();
}
});
}
Yes, validInsert is declared at the top of the insert function.
I assume what's happening is the if(validInsert) runs before the read callback. But if so, i'm not sure why I'm getting "Error: Execute cannot be called after respond has been called." That implies the callback is running first.
Also, the record is being inserted when it shouldn't be even though the 400 error is sent back to the client.
This is an express app right? Should I just call response.end() after the error occurs?
Yes, there are definitely asyn issues in that code. To solve get rid of your validInsert flag and simply move the if (validInsert) section into the success callback (or make it a function called from the success callback). For example:
success: function(results) {
if (results.length == 0) {
request.respond(statusCodes.BAD_REQUEST,
'Invalid Primary Family Member specified.');
console.error('Invalid Primary Family Member specified:' + item.primaryFamilyMember);
} else {
item.memberType = results[0].memberType;
item.memberLevel = results[0].memberLevel;
item.dateOfExpiry = results[0].dateOfExpiry;
var today = new Date();
var prefix = today.getFullYear().toString().substr(2,2) + ('0' + (today.getMonth() + 1)).slice(-2);
...
//respond successfully
}
}
Related
I am using expressJs to route some POST requests.
From the client side I pass an object of objects and in the server I iterate over each of them with a for loop.
My problem, the variable cantidad in the loop only takes the first value instead of being refreshed into the pool.query, but before the pool.query it takes the right value.
So, the line below is ok.
console.log("cantidad before query: " + cantidad);
But the line below is bad. It has the first value.
console.log("cantidad in query: " + cantidad);
This is part of my code.
for (var key in objects) {
if (objects.hasOwnProperty(key)) {
...
console.log("cantidad before query: " + cantidad);
pool.query(qProducto,idProducto, function (error, results, fields {
if (error) {
...
} else {
console.log("cantidad in query: " + cantidad);
...
This is the full POST in ExpressJs.
app.post("/commanda", function (req, res) {
var idCuenta = req.body.idCuenta;
var idEmpleado = req.body.idEmpleado;
var fechaRegistro = req.body.fechaRegistro;
var cuenta_mesero = "C:" + idCuenta + ":E:" + idEmpleado;
var objects = req.body.objects;
var element = {};
for (var key in objects) {
if (objects.hasOwnProperty(key)) {
var qProducto = "SELECT descripcionProducto FROM PRODUCTO WHERE idProducto = ? ;";
var descProducto = '';
console.log("cantidad in commanda2 : " + objects[key].cantidad );
try {
pool.query(qProducto, objects[key].idProducto, function (error, results, fields) {
if (error) {
console.error(error);
console.error("Failed with query: " + qProducto);
res.status(500).end();
throw error;
} else {
console.log("cantidad in commanda4 : " + objects[key].cantidad );
descProducto = JSON.stringify(results[0].descripcionProducto);
element = {
idProducto:objects[key].idProducto,
cantidad:objects[key].cantidad,
descProducto:descProducto,
cuenta_mesero:cuenta_mesero,
fechaRegistro:fechaRegistro
};
imprimirOrden(element);
}
});
} catch (error) {
callback(error);
}
}
}
printer.printVerticalTab();
res.status(200).end();
});
This is how an object looks like.
{ '0':
{ idProducto: '28',
cantidad: '3',
descProducto: 'Product1',
precioProducto: '3500',
precioTotal: 10500,
'$$hashKey': 'object:345' },
'1':
{ idProducto: '29',
cantidad: '2',
descProducto: 'Product2',
precioProducto: '4500',
precioTotal: 9000,
'$$hashKey': 'object:346' } }
This happens because the function for is synchronous but the function poll.query is asynchronous. What this means is that using the for loop you are essentially queuing some queries. You are not executing them one by one. So the for loop will finish before even one result is returned from the query. If you want to use data from the query for the next iteration you should start using async.js, an npm module that helps you avoid this problems. TL;DR the console log that you think runs in query is actually run before even one query has finished. More information is needed on where you declare the variable cantidad and when you change it to accurately understand the problem.
UPDATE:
What I told you at first was quite wrong because of the fact that I misunderstood the in-detention of the else {}. But what I told you already is actually the problem. It was well obfuscated.The for loop finishes before even one query has finished. They are just queued. So the second console.log will have the key of the last key in the loop. If you need logic that requires knowing in which iteration you are you should implement an async function in order to know in which iteration you actually are. If you don't want to use the async library you can use something like this.
First add this function in the bottom of your js file
https://pastebin.com/4tR0xaTY
You essentially created an async for loop that you can now know in which iteration you are using loop.iteration(). Then replace your post code with the code written below ( To include the async loop ).
https://pastebin.com/YzZU7bqp
I have a big collection in MongoDB. Want to migrate all data by running some business logic nodejs scripts on that data to cassandra. What is the best way to do this ?
I have made a script in which i am getting 5000 documents in a single request from mongo and processing the data and inserting the documents into cassandra. It takes a lot of time after 40-50 iterations. CPU usage shows 100%. is this because of a lot of callbacks happening ? I am new to node js so not able to conclude anything.` var cassandra = require('../models/tracking_cassandra');
var TrackingEvents = require('../models/tracking_mongo_events');
var counter = 0;
var incr = 5000;
var final_counter = 0;
var start_point = function (callback){
TrackingEvents.count(function(err, data){
final_counter = data;
TrackingEvents.getEventsByCounter(counter, function(counter, obj) {
var prevId = obj[0].toObject()._id;
getMessagesFromMongo(prevId, callback);
});
});
};
function getMessagesFromMongo(prevId, callback){
counter = counter + incr;
TrackingEvents.getEventsByCounter(counter, function(counter, obj) {
var nextId = obj[0].toObject()._id;
var start_time = new Date();
TrackingEvents.getEventsBtwIds(prevId, nextId, function ( err, userEvents ) {
if(userEvents.length !== 0){
insert_into_cassandra( userEvents, callback );
}else{
console.log('empty data set');
}
});
if(counter >= final_counter){
callback();
}else{
getMessagesFromMongo(nextId, callback);
}
});
};
var insert_into_cassandra = function( events, callback ){
var inserts = 0;
total_documents = total_documents + events.length;
for(var i = 0 ; i< events.length ; i++){
var userEventData = events[i].toObject();
if(typeof userEventData.uid == 'undefined'){
total_nuid ++;
}else{
create_cassandra_query( userEventData );
}
}
};
var create_cassandra_query = function ( eventData ) {
delete eventData._id;
delete eventData[0];
delete eventData.appid;
delete eventData.appversion;
var query = "INSERT INTO userwise_events ";
var keys = "(";
var values = "(";
for(var key in eventData){
if(eventData[key] == null || typeof eventData[key] == 'undefined'){
delete eventData[key];
}
if (eventData.hasOwnProperty(key)) {
keys = keys + key + ', ';
values = values + ':' + key + ', ';
}
if(key != 'uid' && key!= 'date_time' && key != 'etypeId'){
eventData[key] = String(eventData[key]);
}
}
keys = keys.slice(0, -2);
values = values.slice(0, -2);
keys = keys + ")";
values = values + ")";
query = query + keys + " VALUES " + values;
cassandra.trackingCassandraClient.execute(query, eventData, { prepare: true }, function (err, data) {
if(err){
console.log(err);
}
});
};
var start_time = new Date();
start_point(function(res, err){
var end_time = new Date();
var diff = end_time.getTime() - start_time.getTime();
var seconds_diff = diff / 1000;
var totalSec = Math.abs(seconds_diff);
console.log('Total Execution Time : ' + totalSec);
});
process.on('uncaughtException', function (err) {
console.log('Caught exception: ' + err);
});`
is this because of a lot of callbacks happening ?
There may be no callbacks at all for all I know - it's impossible to tell you what's the problem with your code of which you didn't include even a single line of code.
For such a vague question I can only give you a general advice: make sure you don't have long running for or while loops. And don't ever use a blocking system call anywhere else than on the first tick of the event loop. If you don't know what is the first tick of the event loop then don't use blocking calls at all. Whenever you can, use streams for data - especially if you have lots of it.
A 100% CPU utilization is a bad sign and should never happen for I/O-heavy operation like the one that you are trying to perform. You should easily be able to handle insane amounts of data, especially when you use streams. Having your process max out the CPU for an inherently I/O-bound operation like moving large amounts of data through a network is a sure sign that you're doing something wrong in your code. What exactly is that? That will remain a mystery since you didn't show us even a single line of your code.
I have a nodejs program simply just copy a field from a collection to another collection. I wrote two of it. one copies field naming(string), another copies ids(array of string). the collection is not large, roughly only 900 forms to be iterated. I can see it runs and saved some of the form, but I don't understand why this error occurs as the program continues running:
FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - process out of memory
Here is the program:
var mongoose = require('mongoose'),
config = require('../modules/system/node-js/parseConfig'),
schemas = require('../modules/system/node-js/schemas.js'),
cde_schemas = require('../modules/cde/node-js/schemas'),
form_schemas = require('../modules/form/node-js/schemas'),
mongo_cde = require('../modules/cde/node-js/mongo-cde'),
async = require('async');
var mongoUrl = config.mongoUri;
var conn = mongoose.createConnection(mongoUrl);
var DataElement = conn.model('DataElement', cde_schemas.dataElementSchema);
var Form = conn.model('Form', form_schemas.formSchema);
var formCounter = 0;
Form.find({
archived: null
}).exec(function (err, forms) {
if (err) {
console.log(err);
process.exit(0);
}
async.eachSeries(forms, function (form, doneOneForm) {
console.log("begin " + formCounter + " form id: " + form.tinyId);
var questionCount = 0;
var areYouDone = function () {
console.log(questionCount);
if (questionCount === 0) {
form.save(function (err) {
if (err)
process.exit(1);
else {
console.log('saved form id: ' + form.tinyId);
formCounter++;
doneOneForm();
}
});
}
};
var formElements = form.formElements;
var getQuestions = function (formElements) {
formElements.forEach(function (fe) {
if (fe.elementType === 'question') {
questionCount++;
var cdeTinyId = fe.question.cde.tinyId;
var version = fe.question.cde.version;
DataElement.findOne({tinyId: cdeTinyId, version: version}).exec(function (err, cde) {
questionCount--;
if (err) {
console.log(err);
process.exit(0);
}
console.log('found cde id: ' + cdeTinyId + ' version: ' + version);
if (cde && cde.ids) fe.question.cde.ids = cde.ids;
//if I run this program with this comment instead of above, there is no error, but error happens on the ids which is array of string.
//fe.question.cde.name = cde.naming[0].designation;
else {
console.log("no CDE with id: " + cdeTinyId)
}
areYouDone();
});
}
else {
getQuestions(fe.formElements);
}
});
};
getQuestions(formElements);
areYouDone();
form.markModified('formElements');
}, function doneAllForms() {
console.log('finished all forms, # form: ' + formCounter);
process.exit(0);
});
});
Without seeing any output from your logging statements, my guess is that you're getting into some kind of infinite recursion. The likely culprit in the code you've shown thus far is the getQuestions(fe.formElements) line.
Either the formElements property refers to itself (or refers to another element in a similar way that creates a circular reference) and possibly the first value is such that fe.elementType !== 'question' so it just keeps calling the function over and over again and none of the forEach()s ever complete.
I suppose a similar thing could also happen if there is no circular references but the link from one set of formElements to the next is long enough to cause problems and causes getQuestions() to be executed at least once for each forEach().
You may want to start with a smaller collection of forms and/or verify that your fe.elementType values and formElements links/references are what they should be.
buses_near_stops begins as an empty array. Inside the asynchronous calls to the database, it is supposed to be filled. Then after the calls finish, I want to use the data inside of it. When I run this code, the final console log of buses_near_stops executes before the inner database calls, even though I have the looped calls inside of a closure. According to this post, a closure should work, but here it is doing nothing for me.
var buses_near_stops = [];
buses_near_stops.test = "TEST";
// return these fields of each location document in the database
Location.find({}, 'service_name coordinates vehicle_id last_gps_fix', function(err, doc) {
//console.log('location found ' + JSON.stringify(doc));
if(err){return next(err);}
doc.forEach(function(j,k) {
//Find a stop that is near enough to each given bus that we can say the bus is 'at' that stop
//Making sure it returns 1 stop now because I don't know proper distance
(function(buses_near_stops) {
Stop.findOne({coordinates: { $near : j.coordinates, $maxDistance: .0001}
}, function(err, stop){
if(err){return next(err);}
console.log('stop found ' + j.service_name + " " + JSON.stringify(stop));
// service_name is null if bus is out of service (I believe)
if(stop !== null && j.service_name !== null) {
var service_name_of_bus = j.service_name;
console.log('service name of bus ' + service_name_of_bus);
// Find the service document associated with service_name_of_bus
var service_of_name = Service.findOne({name: service_name_of_bus}, function(err, service_of_name){
if(err){return next(err);}
// If the service has 'stop' on its route
if(service_of_name != null && service_of_name.routes[0].stops.indexOf(stop.stop_id) > -1) {
console.log('stop found on service');
// We have now found a bus that is stopped at a stop on its route
console.log('test ' + buses_near_stops.test);
buses_near_stops.push(
{
time: j.last_gps_fix,
bus_coords: j.coordinates,
stop_coords: stop.coordinates,
vehicle_id: j.vehicle_id,
stop_id: stop.stop_id,
service_name: service_name_of_bus
});
console.log('length ' + buses_near_stops.length);
}
});
}
})}(buses_near_stops));
});
console.log('buses near stops ' + JSON.stringify(buses_near_stops));
});
Hi I have a backbone web app using Jquery and NodeJs/mongo as the server side framework. I'm having problems with making a http get call with a foreah loop and the results of the get call being iteratively added to each row of the loop.
var eventid = this.model.get("_id");
var inPromise = $.get("/registrants/list?eventid="+eventid,null,null,"json").then(
function (result){
var temp;
var finalVal = '';
var tempfinalVal = "";
var loop = 0
percentage = 0;
$.each(result.registrants,function(index,registrant){
temp = JSON.parse(registrant.fields);
for (var key in temp) {
if(key =="Email"){
if(temp[key] != ""){
$.get("/stats/registrant?userid="+temp[key]+"&eventid="+eventid,null,null,"json").then(function(result2){
percentage = (result2.Stats.type ===undefined || result2.Stats.type ==null) ? "0": result2.Stats.type;
finalVal +=percentage+"\n";
}).fail(function(){
percentage = "0";
});
}
}else if(key =="eventid"){
loop++;
finalVal = finalVal.slice(0, - 1);
finalVal +='\n';
}
finalVal +=temp[key] + ',';
}
});
//promises.push(inPromise);
}
).done(function(finalVal){
$("#webcast-download-registrants-tn").attr("href",'data:text/csv;charset=utf-8;filename=registration.csv",'+encodeURIComponent(finalVal));
console.log("DONE");
}).fail(function(){
console.log("fail");
});
// promise.done(function () {
// console.log(" PROMISE DONE");
// });
So I have the loop through a collection and the last item of the docuemnt gets a content froma nother http call and when all is fone it will create a CSV file. The problem is that THE "DONE" text echos firts then the "CALL" text is displayed
Rick, your problem is not the simplest due to :
the need for nested asynchronous gets
the need to build each CSV data row partly synchronously, partly asynchronously.
the need for a mechanism to handle the fulfilment of multiple promises generated in the inner loop.
From what you've tried, I guess you already know that much.
One important thing to note is that you can't rely on for (var key in temp) to deliver properties in any particular order. Only arrays have order.
You might try something like this :
var url = "/stats/registrant",
data = { 'eventid': this.model.get('_id') },
rowTerminator = "\n",
fieldNames = ['firstname','lastname','email','company','score'];
function getScore(email) {
return $.get(url, $.extend({}, data, {'userid':email}), null, "json").then(function(res) {
return res.Stats ? res.Stats.type || 0 : 0;
}, function() {
//ajax failure - assume score == 0
return $.when(0);
});
}
$.get("/registrants/list", data, null, "json").then(function(result) {
var promises = [];//An array in which to accumulate promises of CSV rows
promises.push($.when(fieldNames)); //promise of CSV header row
if(result.registrants) {
$.each(result.registrants, function(index, registrant) {
if(registrant.fields) {
// Synchronously initialize row with firstname, lastname, email and company
// (omitting score for now).
var row = fieldNames.slice(0,-1).map(function(fieldName, i) {
return registrant.fields[fieldName] || '';
});
//`row` remains available to inner functions due to closure
var promise;
if(registrant.fields.Email) {
// Fetch the registrant's score ...
promise = getScore(registrant.fields.Email).then(function(score) {
//... and asynchronously push the score onto row
row.push(score);
return row;
});
} else {
//or synchronously push zero onto row ...
row.push(0);
//... and create a resolved promise
promise = $.when(row);
}
promises.push(promise);//Accumulate promises of CSV data rows (still in array form), in the correct order.
}
});
}
return $.when.apply(null, promises).then(function() {
//Join all the pieces, in nested arrays, together into one long string.
return [].slice.apply(arguments).map(function(row) {
return row.join(); //default glue is ','
}).join(rowTerminator);
});
}).done(function(str) {
$("#webcast-download-registrants-tn").attr("href",'data:text/csv;charset=utf-8;filename=registration.csv",'+encodeURIComponent(str));
console.log("DONE");
}).fail(function() {
console.log("fail");
});
partially tested
See comments in code for explanation and please ask if there's anything you don't follow.