Multiple queries inside mongodb query - node.js

I'm having an issue when trying to query based on the result of another query on mongodb.
I'm trying to make an initial query and then do another query for each one of the result of the first query. The reason I'm doing it like this is because I have two different collections and I need to join some data from one collection with the data of the other collection. In a SQL world I could easily do this with a JOIN, but as I'm using mongodb in this one I can't really use JOINs, so I guessed doing a for loop inside the first query's callback function would be the way to go.
Here's the code I'm using...
var resultSet = [];
db.get('busstopcollection').find({id_bus_stop: parseInt(req.body.busstopid)}, function(e, docs){
if(e || docs.length === 0) {
console.log("Sorry, wrong id.");
return e;
}
for(var m=0; m<docs.length; m++){
var auxRes = {};
auxRes.id_bus = docs[m].id_bus;
auxRes.id_bus_stop = docs[m].id_bus_stop;
auxRes.coord_x = docs[m].coord_x;
auxRes.coord_y = docs[m].coord_y;
auxRes.id_bus_variation = docs[m].id_bus_variation;
db.get('buscollection').find({id_bus: parseInt(docs[m].id_bus)}, function(e, busDocs){
auxRes.s_origin_description = busDocs[0].s_origin_description;
auxRes.s_destination_description = busDocs[0].id_destination_description;
resultSet.push(auxRes);
});
res.send(JSON.stringify(resultSet));
}
});
I need to res.send the resultSet array after all the values have been added.
I've tried some other ways of doing this, but the thing is that when the res.send line is reached the second query hasn't finished at all. I also tried doing that inside the inner query's callback, but I need to check if it's the last in the for loop, and checking the value o m won't do it as it always is equivalent to docs.length.
As far as I know there's no such thing as a synchronous query in mongodb, but maybe I'm wrong.
What's the right way of doing this?
EDIT
I found a way around it, but I'm sure there's got to be a better way. Here's how I'm doing it...
db.get('busstopcollection').find({id_bus_stop: parseInt(req.body.busstopid)}, function(e, docs){
if(e || docs.length === 0) {
console.log("Ha ocurrido un error, no existe esa parada");
return e;
}
var busIDs = [];
for(var m=0; m<docs.length; m++){
busIDs.push(parseInt(docs[m].id_bus));
var auxRes = {};
auxRes.id_bus = docs[m].id_bus;
auxRes.id_bus_stop = docs[m].id_bus_stop;
auxRes.coord_x = docs[m].coord_x;
auxRes.coord_y = docs[m].coord_y;
auxRes.id_bus_variation = docs[m].id_bus_variation;
resultSet.push(auxRes);
}
db.get('buscollection').find({id_bus: {$in: busIDs}}, function(e, busDocs){
for(var n = 0; n<busDocs.length; n++){
for(var k=0; k<resultSet.length; k++){
if(resultSet[k].id_bus == busDocs[n].id_bus){
resultSet[k].s_origin_description = busDocs[n].s_origin_description;
resultSet[k].s_destination_description = busDocs[n].id_destination_description;
}
}
}
res.send(JSON.stringify(resultSet));
});
});

Node.js behavior is asynchronous , programmer has to code taking consideration of this behavior. Use callbacks or promises or a flow control library . In your your program , you
have put mongo query inside loop , which is a bad approach of querying . Instead if querying multiple times , use $in operator . It will optimize your code performance and
solves your response sending problem also.
var resultSet = [];
db.get('busstopcollection').find({id_bus_stop: parseInt(req.body.busstopid)}, function(e, docs){
if(e || docs.length === 0) {
console.log("Sorry, wrong id.");
return e;
}
var bus_ids = [];
for(var m=0; m<docs.length; m++){
var auxRes = {};
auxRes.id_bus = docs[m].id_bus;
bus_ids.push(parseInt(docs[m].id_bus)); // collect all ids
auxRes.id_bus_stop = docs[m].id_bus_stop;
auxRes.coord_x = docs[m].coord_x;
auxRes.coord_y = docs[m].coord_y;
auxRes.id_bus_variation = docs[m].id_bus_variation;
resultSet.push(auxRes);
}
// Query at one time for all document
db.get('buscollection').find({id_bus: {$in : bus_ids}}).toArray( function(e, busDocs){
// Now find and merge in one go
busDocs.forEach(function(eachBusDoc){
for(var i=0,len = resultSet.length;i< len;i++){
if(resultSet[i].id_bus == busDocs.id_bus ){
resultSet[i].s_origin_description = eachBusDoc.s_origin_description;
resultSet[i].s_destination_description = eachBusDoc.id_destination_description;
}
}
});
res.send(JSON.stringify(resultSet));
});
});

Your updated solution in your question is generally fine, as using $in is an excellent way of fetching a set of results (you'll want to make sure that you've indexed the id_bus property).
Here are a few tweaks (with a bit of cleanup and optimization):
db.get('busstopcollection')
.find({id_bus_stop: parseInt(req.body.busstopid)}).toArray(function(e, docs){
var auxById = {}; // store a dictionary of all the results for later
if(e || docs === null || docs.length === 0) {
console.log("Ha ocurrido un error, no existe esa parada");
return e;
}
var busIDs = [];
docs.forEach(function(doc) {
busIDs.push(parseInt(doc.id_bus));
// consider just using the doc directly rather than copying each property
// especially if you're not manipulating any of the data as it passes
var auxRes = {
id_bus : doc.id_bus,
id_bus_stop : doc.id_bus_stop,
coord_x : doc.coord_x,
coord_y : doc.coord_y,
id_bus_variation : doc.id_bus_variation
};
// you could just use each doc directly ...
// var auxRes = doc; ??
// ** importantly, store off the id_bus for each one so you can
// ** avoid a costly loop trying to match an id below.
auxById[doc.id_bus] = auxRes;
resultSet.push(auxRes);
});
// might want to consider using a cursor ... here's an example
db.get('buscollection')
.find({id_bus: {$in: busIDs}}).each(function(e, busDoc){
// the last item in the cursor will be null
if (busDoc === null) {
res.send(JSON.stringify(resultSet));
return;
}
var res = auxById[busDoc.id_bus];
if (res) { // did we find it in our dictionary of results?
// yes, we did == copy the necessary field data
res.s_origin_description = busDoc.s_origin_description;
res.s_destination_description = busDoc.id_destination_description;
}
});
});

Related

In Node/Express, how to match or compare values in nested promises using csv-to-array

I have a Node/Express partial that is being called with AJAX, and is supposed to send a status update back to the view, after 2 subsequent API calls are made. This workflow relies on the csv-to-array module to read a ship-orders.csv file, and determine if the second API call (POST to Shipments) has already occured. It is supposed to do this by matching the OrderNumber in the csv file to the returned OrderNumber from the FindOrders endpoint (the first API).
The problem is that I am creating 2 arrays of order numbers to compare, but matching the first set of order numbers to the second set either always returns true or always returns false, and it very clearly should show "true" for the first record in the csv, and "false" for every other.
Before getting into the bulk of the code, here's the promise that reads the csv file into an array:
csv-to-array:
var csvShipPromise = new Promise(function(resolve, reject){
var csvColumns = ['ChannelName', 'OrderNumber', 'LineNumber', 'WarehouseCode', 'Qty', 'Carrier', 'TrackingNumber', 'Shipdate', 'ShipMethod'];
var csvShipArr;
var csvArr;
csvArray({
file: shipLog,
columns: csvColumns
}, function(err, array){
csvShipArr = array;
resolve(csvShipArr);
});
});
Next I have a long promise that gets executed when the request to the partial is made. The comparison between logged OrderNumbers and OrderNumbers that need to be posted to Shipments is the 5th "then" block (and it's commented in the code below).
router.get and chained promise:
router.get('/', function(req, res, next) {
findPromise.then(function(findData){
//Properly format xml string
var foundData = replaceAll(findData, '<', '<');
foundData = replaceAll(foundData, '>', '>');
return foundData;
}).then(function(foundData){
//Parse xml to JSON and stringify
var parsedFound;
parseString(foundData, function(err, result){ //uses an xml to json module
parsedFound = JSON.stringify(result);
});
return(parsedFound);
}).then(function(parsedStr){
//Parse JSON and return an array of objects
var parsedJson = JSON.parse(parsedStr);
var orders = parsedJson['soap:Envelope']['soap:Body'][0]['FindOrders'][0]['orders'][0]['order'];
return orders;
}).then(function(orders){
//Get only orders with a tracking number.
var trackArray = [];
var ord;
for(ord in orders){
var postObj = orders[ord];
if(postObj.TrackingNumber[0].length > 1){
trackArray.push(postObj);
}
}
return trackArray; //array of orders that contain tracking numbers
}).then(function(trackArray){
/**** This is the block that is causing problems. *****/
var tItm;
var loggedOrders = [];
for(tItm in trackArray){
var alreadyLogged = false;
var trackedItm = trackArray[tItm];
var trackedOrderNum = trackedItm.ReferenceNum;
csvShipPromise.then(function(csvOrders){
var csv;
var loggedOrderArr = [];
for (csv in csvOrders){
var csvItm = csvOrders[csv];
var csvOrderNum = csvItm.OrderNumber; //gets the OrderNumber as expected
loggedOrderArr.push(csvOrderNum);
}
return loggedOrderArr; //Return a simple array of all OrderNumbers
}).then(function(loggedOrderArr){
console.log(loggedOrderArr);
console.log(trackedOrderNum);
var ord;
for (ord in loggedOrderArr){
if(trackedOrderNum == loggedOrderArr[ord]){
console.log('found');
alreadyLogged = true;
}
else {
console.log('not found');
alreadyLogged = false;
}
}
return loggedOrderArr; //Simply returning this value because the alreadyLogged test isn't working.
});
/* Here is where the test fails.
It shouldn't, because there are, say, 4 OrderNumbers in the result of the first API call,
and only 1 Order number logged in the CSV.
So it should be true once, and false 3 times.
But it is true all the time.
*/
if(alreadyLogged){
console.log('found'); //Always logs true/found.
} else {
console.log('not found');
}
}
return trackArray; //Just passing the array to the view, for now.
}).then(function(obj){
res.send(obj);
return(obj);
}).catch(function(err){
console.log(err);
});
});
When I console.log the values of trackArray and loggedOrderArr, I see that there should be an intersection between an array of 4 values and an array of 1 value, but for some reason the comparison, if(trackedOrderNumber == loggedOrderArr[ord]) isn't working.
Alright, I'm gonna be honest, your code made my eyes swim. but as far as I can tell, a few things pop up:
move var alreadyLogged = false; to before the loop;
then add alreadyLogged = false; after if(alreadyLogged) statement
I think it has to do with scope. You are basically checking bool value of a var that has not changed yet because your promises has not resolved at the point of if(alreadyLogged)
Might I suggest a different approach?
why not make use of array.indexOf() ?
lets say you have two arrays to compare arrA & arrB; you can see if an item exists like so:
var index = arrA.indexOf(arrB[0]);
if(index == -1){
console.log('No Match');
}
else{
console.log('Match found');
}
no need for any preset flags to see if one array contains an element.
Hope it helps.
A bit more context:
var index = loggedOrderArray.indexOf(trackedOrderNum);
if(index == -1){
console.log('No Match');
// -1 basicaly means that there is not instance of trackedOrderNum in loggedOrderArray
}
else{
console.log('Match found');
}
What you are attempting appears to be reasonably simple. You are just overwhelming yourself with awkward flow control and bulky code.
As it stands, asynchronous flow isn't quite right chiefly due to parseString() not being promisified. A value returned from a raw nodeback won't propagate down a .then chain.
In addition, asynchronous flow will improve with :
application of Promise.all() up front to aggregate the two essential data-delivering promises csvShipPromise and findPromise.
the realisation that wholly synchronous steps in a promise chain can be merged with next step.
And, the bulk of the synchronous code will reduce by employing several Array methods:
Array.prototype.filter()
Array.prototype.map()
Array.prototype.includes()
Boiling it down to somewhere near the bare minimum, I get the following router.get() expression:
router.get('/', function(req, res, next) {
return Promise.all([csvShipPromise, findPromise])
.then([csvOrders, findData] => { // destructuring
let loggedOrderArr = csvOrders.map(order => order.OrderNumber);
let foundData = replaceAll(findData, '<', '<');
foundData = replaceAll(foundData, '>', '>');
return new Promise((resolve, reject) => { // promisify parseString() on the fly
parseString(foundData, (err, result) => {
if(err) reject(err);
else resolve(result['soap:Envelope']['soap:Body'][0].FindOrders[0].orders[0].order); // does this expression really return `orders` (plural)?
});
})
.then(orders => {
let trackArray = orders.filter(postObj => postObj.TrackingNumber[0].length > 1); // filter orders to eliminate those without a tracking number.
let loggedOrders = trackArray.filter(trackedItm => loggedOrderArr.includes(trackedItm.ReferenceNum));
// let unloggedOrders = trackArray.filter(trackedItm => !loggedOrderArr.includes(trackedItm.ReferenceNum));
res.send(loggedOrders); // or res.send(unloggedOrders), depending on what you want.
});
})
.catch(err => {
console.log(err);
res.error(err); // or similar
});
});
untested - I may have made mistakes, though hopefully ones that are simple to correct

Using knex SELECT query results for another SELECT query

I am trying to run a PostgreSQL query with Knex and then use the results to run another query.
exports.buildBuoyFeaturesJSON = function (conditionA, conditionB) {
var query = null;
var selectedFields = knex.select
(
knex.raw('t_record.id AS id'),
...
knex.raw('t_record.latitude AS latitude'),
knex.raw('t_record.longitude AS longitude')
)
.from('t_record')
.then(function (response) {
var geometry_array = [];
var rows = response.rows;
var keys = [];
for (var key = 0; key <= rows.length - 1; key++) {
var geometry =
{
"id" : rows[key].id,
"type" : "Feature",
"geometry" : rows[key].geometry,
"properties" : {
...
"sensors" : []
}
};
keys.push(rows[key].id);
geometry_array.push(geometry);
}
getMeasurementsAndSensors(keys, geometry_array);
});
};
The latter function uses some of the results from the previous function. Due to asynchronous nature of Knex, I need to call the second function from inside the first function's .then() statement:
function getMeasurementsAndSensors (keys, geometry_array) {
var query = knex
.select
(
't_record_id',
'i_sensor_id',
'description',
'i_measurement_id',
't_sensor_name',
't_measurement_name',
'value',
'units'
)
.from('i_record')
...
.whereRaw('i_record.t_record_id IN (' + keys + ')')
.orderByRaw('t_record_id, i_sensor_id ASC')
.then(function (response) {
var rows = response.rows;
var t_record_id = 0;
var i_sensor_id = 0;
var record_counter = -1;
var sensor_counter = -1;
for (var records = 0; records <= rows.length -1; records++) {
if (t_record_id !== rows[records].t_record_id) {
t_record_id = rows[records].t_record_id;
record_counter++;
sensor_counter = -1;
}
if (i_sensor_id !== rows[records].i_sensor_id) {
i_sensor_id = rows[records].i_sensor_id;
geometry_array[record_counter].properties.sensors[++sensor_counter] =
{
'i_sensor_id' : rows[records].i_sensor_id,
't_sensor_name' : rows[records].t_sensor_name,
'description' : rows[records].description,
'measurements' : []
};
}
geometry_array[record_counter].properties.sensors[sensor_counter].measurements.push
({
'i_measurement_id': rows[records].i_measurement_id,
'measurement_name': rows[records].t_measurement_name,
'value': rows[records].value,
'units': rows[records].units
});
}
//wrapping features with metadata.
var feature_collection = GEOGRAPHY_METADATA;
feature_collection.features = geometry_array;
JSONToFile(feature_collection, 'buoy_features');
});
}
Currently I save end result to a JSON file because I couldn't get Promises to work. JSON is later used to power a small OpenLayers application, hence the JSON-ification after getting results.
I am quite sure that getting the data from a database, saving it to file, then accessing it from another process and using it for OpenLayers is a very redundant way to do it, but so far, it is the only one that works.
I know there are a lot of ways to make these functions work better, but I am new to promises and don't know how to work with them outside of most basic functions. Any suggestions how to make this code better are welcome.
All you appear to be missing is a bunch of returns.
Here are skeletonized versions of the two functions, including the necessary returns :
exports.buildBuoyFeaturesJSON = function(conditionA, conditionB) {
return knex.select (...)
^^^^^^
.from(...)
.then(function(response) {
// synchronous stuff
// synchronous stuff
return getMeasurementsAndSensors(keys, geometry_array);
^^^^^^
}).then(function(geometry_array) {
var feature_collection = GEOGRAPHY_METADATA;
feature_collection.features = geometry_array;
return feature_collection;
^^^^^^
});
};
function getMeasurementsAndSensors(keys, geometry_array) {
return knex.select(...)
^^^^^^
.from(...)
...
.whereRaw(...)
.orderByRaw(...)
.then(function(response) {
// heaps of synchronous stuff
// heaps of synchronous stuff
// heaps of synchronous stuff
return geometry_array;
^^^^^^^^^^^^^^^^^^^^^
});
}
I moved the feature_collection collection part into buildBuoyFeaturesJSON() on the basis that it seems to sit there more logically. If not, then it would be very simple to move it back into getMeasurementsAndSensors().
I have not tried to fix the additional issue highlighted by #vitaly-t.

Best way to migrate GB's of data from MongoDB to Cassandra using nodejs script

I have a big collection in MongoDB. Want to migrate all data by running some business logic nodejs scripts on that data to cassandra. What is the best way to do this ?
I have made a script in which i am getting 5000 documents in a single request from mongo and processing the data and inserting the documents into cassandra. It takes a lot of time after 40-50 iterations. CPU usage shows 100%. is this because of a lot of callbacks happening ? I am new to node js so not able to conclude anything.` var cassandra = require('../models/tracking_cassandra');
var TrackingEvents = require('../models/tracking_mongo_events');
var counter = 0;
var incr = 5000;
var final_counter = 0;
var start_point = function (callback){
TrackingEvents.count(function(err, data){
final_counter = data;
TrackingEvents.getEventsByCounter(counter, function(counter, obj) {
var prevId = obj[0].toObject()._id;
getMessagesFromMongo(prevId, callback);
});
});
};
function getMessagesFromMongo(prevId, callback){
counter = counter + incr;
TrackingEvents.getEventsByCounter(counter, function(counter, obj) {
var nextId = obj[0].toObject()._id;
var start_time = new Date();
TrackingEvents.getEventsBtwIds(prevId, nextId, function ( err, userEvents ) {
if(userEvents.length !== 0){
insert_into_cassandra( userEvents, callback );
}else{
console.log('empty data set');
}
});
if(counter >= final_counter){
callback();
}else{
getMessagesFromMongo(nextId, callback);
}
});
};
var insert_into_cassandra = function( events, callback ){
var inserts = 0;
total_documents = total_documents + events.length;
for(var i = 0 ; i< events.length ; i++){
var userEventData = events[i].toObject();
if(typeof userEventData.uid == 'undefined'){
total_nuid ++;
}else{
create_cassandra_query( userEventData );
}
}
};
var create_cassandra_query = function ( eventData ) {
delete eventData._id;
delete eventData[0];
delete eventData.appid;
delete eventData.appversion;
var query = "INSERT INTO userwise_events ";
var keys = "(";
var values = "(";
for(var key in eventData){
if(eventData[key] == null || typeof eventData[key] == 'undefined'){
delete eventData[key];
}
if (eventData.hasOwnProperty(key)) {
keys = keys + key + ', ';
values = values + ':' + key + ', ';
}
if(key != 'uid' && key!= 'date_time' && key != 'etypeId'){
eventData[key] = String(eventData[key]);
}
}
keys = keys.slice(0, -2);
values = values.slice(0, -2);
keys = keys + ")";
values = values + ")";
query = query + keys + " VALUES " + values;
cassandra.trackingCassandraClient.execute(query, eventData, { prepare: true }, function (err, data) {
if(err){
console.log(err);
}
});
};
var start_time = new Date();
start_point(function(res, err){
var end_time = new Date();
var diff = end_time.getTime() - start_time.getTime();
var seconds_diff = diff / 1000;
var totalSec = Math.abs(seconds_diff);
console.log('Total Execution Time : ' + totalSec);
});
process.on('uncaughtException', function (err) {
console.log('Caught exception: ' + err);
});`
is this because of a lot of callbacks happening ?
There may be no callbacks at all for all I know - it's impossible to tell you what's the problem with your code of which you didn't include even a single line of code.
For such a vague question I can only give you a general advice: make sure you don't have long running for or while loops. And don't ever use a blocking system call anywhere else than on the first tick of the event loop. If you don't know what is the first tick of the event loop then don't use blocking calls at all. Whenever you can, use streams for data - especially if you have lots of it.
A 100% CPU utilization is a bad sign and should never happen for I/O-heavy operation like the one that you are trying to perform. You should easily be able to handle insane amounts of data, especially when you use streams. Having your process max out the CPU for an inherently I/O-bound operation like moving large amounts of data through a network is a sure sign that you're doing something wrong in your code. What exactly is that? That will remain a mystery since you didn't show us even a single line of your code.

Mongoose promise built in but not working?

Or quite possibly I am doing it wrong, in fact, more than likely I am doing it wrong.
Have a table which contains a "tree" of skill, starting at the root level and may be as deep as ten levels (only two so far), but I want to return it as one big fat JSON structure, so I want to ask the database for each set of data, build my structure then ask for the next level.
Of course if I just send of my requests using mongoose, they will come back at any time, as they are all nice asyncronous calls. Normally a good things.
Looking at the documentation for Mongoose(using 4.1.1) it seems like it has a promise built in, but whenever I try to use it the api call throws a hissy fit and I get a 500 back.
Here is my simple function:
exports.getSkills = function(req,res) {
console.log("Will return tree of all skills");
for (var i = 0; i<10; i++){
var returnData = [];
console.log("Lets get level " + i );
var query = Skill.find({level: i });//The query function
var promise = query.exec; //The promise?
promise.then(function(doc) { //Totally blows up at this point
console.log("Something came back")
return "OK";
});
}
}
The Mongoose documentation on the subject can be found here
http://mongoosejs.com/docs/api.html#promise_Promise
var promise = query.exec;
// =>
var promise = query.exec()
exports.getSkills = function(req,res) {
console.log("Will return tree of all skills");
var p;
for (var i = 0; i < 10; i ++) {
if (i == 0 ) {
p = Skill.find({level:i}).exec();
} else {
p.then(function (){
return Skill.find({level:i}).exec()
})
}
p.then(function (data) {
//deal with your data
})
}
p.then(function () {
// deal with response
})
}

http call in backbone promise

Hi I have a backbone web app using Jquery and NodeJs/mongo as the server side framework. I'm having problems with making a http get call with a foreah loop and the results of the get call being iteratively added to each row of the loop.
var eventid = this.model.get("_id");
var inPromise = $.get("/registrants/list?eventid="+eventid,null,null,"json").then(
function (result){
var temp;
var finalVal = '';
var tempfinalVal = "";
var loop = 0
percentage = 0;
$.each(result.registrants,function(index,registrant){
temp = JSON.parse(registrant.fields);
for (var key in temp) {
if(key =="Email"){
if(temp[key] != ""){
$.get("/stats/registrant?userid="+temp[key]+"&eventid="+eventid,null,null,"json").then(function(result2){
percentage = (result2.Stats.type ===undefined || result2.Stats.type ==null) ? "0": result2.Stats.type;
finalVal +=percentage+"\n";
}).fail(function(){
percentage = "0";
});
}
}else if(key =="eventid"){
loop++;
finalVal = finalVal.slice(0, - 1);
finalVal +='\n';
}
finalVal +=temp[key] + ',';
}
});
//promises.push(inPromise);
}
).done(function(finalVal){
$("#webcast-download-registrants-tn").attr("href",'data:text/csv;charset=utf-8;filename=registration.csv",'+encodeURIComponent(finalVal));
console.log("DONE");
}).fail(function(){
console.log("fail");
});
// promise.done(function () {
// console.log(" PROMISE DONE");
// });
So I have the loop through a collection and the last item of the docuemnt gets a content froma nother http call and when all is fone it will create a CSV file. The problem is that THE "DONE" text echos firts then the "CALL" text is displayed
Rick, your problem is not the simplest due to :
the need for nested asynchronous gets
the need to build each CSV data row partly synchronously, partly asynchronously.
the need for a mechanism to handle the fulfilment of multiple promises generated in the inner loop.
From what you've tried, I guess you already know that much.
One important thing to note is that you can't rely on for (var key in temp) to deliver properties in any particular order. Only arrays have order.
You might try something like this :
var url = "/stats/registrant",
data = { 'eventid': this.model.get('_id') },
rowTerminator = "\n",
fieldNames = ['firstname','lastname','email','company','score'];
function getScore(email) {
return $.get(url, $.extend({}, data, {'userid':email}), null, "json").then(function(res) {
return res.Stats ? res.Stats.type || 0 : 0;
}, function() {
//ajax failure - assume score == 0
return $.when(0);
});
}
$.get("/registrants/list", data, null, "json").then(function(result) {
var promises = [];//An array in which to accumulate promises of CSV rows
promises.push($.when(fieldNames)); //promise of CSV header row
if(result.registrants) {
$.each(result.registrants, function(index, registrant) {
if(registrant.fields) {
// Synchronously initialize row with firstname, lastname, email and company
// (omitting score for now).
var row = fieldNames.slice(0,-1).map(function(fieldName, i) {
return registrant.fields[fieldName] || '';
});
//`row` remains available to inner functions due to closure
var promise;
if(registrant.fields.Email) {
// Fetch the registrant's score ...
promise = getScore(registrant.fields.Email).then(function(score) {
//... and asynchronously push the score onto row
row.push(score);
return row;
});
} else {
//or synchronously push zero onto row ...
row.push(0);
//... and create a resolved promise
promise = $.when(row);
}
promises.push(promise);//Accumulate promises of CSV data rows (still in array form), in the correct order.
}
});
}
return $.when.apply(null, promises).then(function() {
//Join all the pieces, in nested arrays, together into one long string.
return [].slice.apply(arguments).map(function(row) {
return row.join(); //default glue is ','
}).join(rowTerminator);
});
}).done(function(str) {
$("#webcast-download-registrants-tn").attr("href",'data:text/csv;charset=utf-8;filename=registration.csv",'+encodeURIComponent(str));
console.log("DONE");
}).fail(function() {
console.log("fail");
});
partially tested
See comments in code for explanation and please ask if there's anything you don't follow.

Resources