Update variable value in module - node.js

I'm a real newbie in node.js so pls understand my possible stupidity
I'm trying to use a external file to serve as a module so I can use it in other files. The project is bigger than this but let's say my module is:
var Tools = module.exports = {
result_arr: [],
object_data: {
times : [],
temps1 : [],
temps2 : [],
temps3 : [],
temps4 : [],
levels : [],
flows : []
},
getLastNRows: function (whereIsData, DB_info, table, NRows) {
if (whereIsData == "MySQL") {
function setValue (value) {
Tools.result_arr = value;
}
function dataArray2Object (array_data) {
Tools.object_data.times = array_data.map(row => row.timestamp);
Tools.object_data.temps1 = array_data.map(row => row.temp1);
Tools.object_data.temps2 = array_data.map(row => row.temp2);
Tools.object_data.temps3 = array_data.map(row => row.temp3);
Tools.object_data.temps4 = array_data.map(row => row.temp4);
Tools.object_data.levels = array_data.map(row => row.level_ice_bank);
Tools.object_data.flows = array_data.map(row => row.flow);
}
var queryString = "SELECT timestamp, temp1, temp2, temp3, temp4, level_ice_bank, flow FROM " +
table + " ORDER BY id DESC LIMIT " + NRows + ";";
var connnection = mysql.createConnection(DB_info);
connnection.connect(function(err) {
console.log("connected");
if (err) throw err;
});
connnection.query(queryString, function (err, rows) {
console.log("queried");
if (err) throw err;
setValue(rows);
dataArray2Object(Tools.result_arr);
console.log(Tools.result_arr);
console.log(Tools.object_data);
});
}
else {
console.log("Function only accepts data stored in MySQL.\n(u still have to improve...)");
return;
}
};
The variable object_data is supposed to be used in a main file. This way, whenever I call getLastNRows, I expect object_data to be updated by the operations in getLastNRows. The main file would be:
var tools = require('./tools');
var where2save = "MySQL";
var info_db = {
host : "127.0.0.1",
user : "root",
password: "xxxx",
database: "mydb",
port : 3306
};
var table = "tempdata";
var NRows = 4;
tools.getLastNRows(where2save, info_db, table, NRows);
console.log(tools.object_data);
What is observed is that, in fact, tools.object_data is not updated by getLastNRows in the main file, although console.log(Tools.object_data); from the tools.js (module) file logs the updated values. So my question is:
How can I make getLastNRows update tools.object_data (which is empty when created) in the main file?

Is getLastNRows asynchronous? Cuz it seems to me that is the cause of the problem.
It calls the getLastNRows in main which then calls connection.query, which gets put on a worker thread then immediately continues to the console.log where tools.object_data has not been updated.
Try:
getLastNRows: function (whereIsData, DB_info, table, NRows, cb) {
// ...
connnection.query(queryString, function (err, rows) {
console.log("queried");
if (err) throw err;
setValue(rows);
dataArray2Object(Tools.result_arr);
console.log(Tools.result_arr);
console.log(Tools.object_data);
cb()
});
// ...
}
// in main
tools.getLastNRows(where2save, info_db, table, NRows, function() {
console.log(tools.object_data);
});

Related

Node.js Readable stream slows over time, CPU usage falls

I am trying to launch a cluster that will stream files (new line delimited JSON) from google cloud storage and transform each row after fetching data from MongoDB. After transforming the row, i want to store it in Google's bigquery - 10000 rows at a time. All of this is working fine but the issue is that the rate at which the streamed files are being processed decreases significantly over time.
I have setup the node application on one server and mongodb on another. Both 8 core machines with 30GB RAM. When the script is executed, initially the CPU usage for the application server and mongodb server is around 70%-75%. After 30 minutes, the CPU usage falls down to 10% and then finally 1%. The application generates no exceptions. I can see the application log and find that it finished processing a few files and took up new files for processing. One execution can be observered below a little later than 3:00PM and a almost upto 5:20PM.
var cluster = require('cluster'),
os = require('os'),
numCPUs = os.cpus().length,
async = require('async'),
fs = require('fs'),
google = require('googleapis'),
bigqueryV2 = google.bigquery('v2'),
gcs = require('#google-cloud/storage')({
projectId: 'someproject',
keyFilename: __dirname + '/auth.json'
}),
dataset = bigquery.dataset('somedataset'),
bucket = gcs.bucket('somebucket.appspot.com'),
JSONStream = require('JSONStream'),
Transform = require('stream').Transform,
MongoClient = require('mongodb').MongoClient,
mongoUrl = 'mongodb://localhost:27017/bigquery',
mDb,
groupA,
groupB;
var rows = [],
rowsLen = 0;
function transformer() {
var t = new Transform({ objectMode: true });
t._transform = function(row, encoding, cb) {
// Get some information from mongodb and attach it to the row
if (row) {
groupA.findOne({
'geometry': { $geoIntersects: { $geometry: { type: 'Point', coordinates: [row.lon, row.lat] } } }
}, {
fields: { 'properties.OA_SA': 1, _id: 0 }
}, function(err, a) {
if (err) return cb();
groupB.findOne({
'geometry': { $geoIntersects: { $geometry: { type: 'Point', coordinates: [row.lon, row.lat] } } }
}, {
fields: { 'properties.WZ11CD': 1, _id: 0 }
}, function(err, b) {
if (err) return cb();
row.groupA = a ? a.properties.OA_SA : null;
row.groupB = b ? b.properties.WZ11CD : null;
// cache processed rows in memory
rows[rowsLen++] = { json: row };
if (rowsLen >= 10000) {
// batch insert rows in bigquery table
// and free memory
log('inserting 10000')
insertRowsAsStream(rows.splice(0, 10000));
rowsLen = rows.length;
}
cb();
});
});
} else {
cb();
}
};
return t;
}
var log = function(str) {
console.log(str);
}
function insertRowsAsStream(rows, callback) {
bigqueryV2.tabledata.insertAll({
"projectId": 'someproject',
"datasetId": 'somedataset',
"tableId": 'sometable',
"resource": {
"kind": "bigquery#tableDataInsertAllRequest",
"rows": rows
}
}, function(err, res) {
if (res && res.insertErrors && res.insertErrors.length) {
console.log(res.insertErrors[0].errors)
err = err || new Error(JSON.stringify(res.insertErrors));
}
});
}
function startStream(fileName, cb) {
// stream a file from Google cloud storage
var file = bucket.file(fileName),
called = false;
log(`Processing file ${fileName}`);
file.createReadStream()
.on('data', noop)
.on('end', function() {
if (!called) {
called = true;
cb();
}
})
.pipe(JSONStream.parse())
.pipe(transformer())
.on('finish', function() {
log('transformation ended');
if (!called) {
called = true;
cb();
}
});
}
function processFiles(files, cpuIdentifier) {
if (files.length == 0) return;
var fn = [];
for (var i = 0; i < files.length; i++) {
fn.push(function(cb) {
startStream(files.pop(), cb);
});
}
// process 3 files in parallel
async.parallelLimit(fn, 3, function() {
log(`child process ${cpuIdentifier} completed the task`);
fs.appendFile(__dirname + '/complete_count.txt', '1');
});
}
if (cluster.isMaster) {
for (var ii = 0; ii < numCPUs; ii++) {
cluster.fork();
}
} else {
MongoClient.connect(mongoUrl, function(err, db) {
if (err) throw (err);
mDb = db;
groupA = mDb.collection('groupageo');
groupB = mDb.collection('groupbgeo');
processFiles(files, process.pid);
// `files` is an array of file names
// each file is in newline json delimited format
// ["1478854974993/000000000000.json","1478854974993/000000000001.json","1478854974993/000000000002.json","1478854974993/000000000003.json","1478854974993/000000000004.json","1478854974993/000000000005.json"]
});
}
Okay, I have found the culprit! Google APIs Node.js client library makes use of a module called "stream-events" which implements Streams 0.8. Streams 0.8 do not control the rate at which it emits the 'data' event based on the consumer's ability to consume data. The rate controlling feature was introduced in Streams 1.0. So this essentially meant that the readable stream was throwing data at MongoDB at a rate that it was not able to process.
Solution:
I used the 'request' module instead of Google's client library. I supplied a signed URL to the request module which in turn fetched results as a stream which I could pipe into my transformer.
Take Away:
Always check the modules you use for the stream versions they are using.

How to perform mass inserts into mongodb using NodeJS

I Have to Insert about 10,00000 documents in mongodb using nodejs.
I'm generating these documents using a for loop storing them into an array before finally inserting them into mongodb.
var codeArray = new Array();
for (var i = 0; i<1000000; i++){
var token = strNpm.generate();
var now = moment().format('YYYYMMDD hhmmss');
var doc1 = {id:token,
Discount_strId:"pending",
Promotion_strCode:token,
Promotion_strStatus:"I",
Promotion_dtmGeneratedDate:now,
User_strLogin:"test",
Promotion_strMode:"S",
Promotion_dtmValidFrom:"pending",
Promotion_dtmValidTill:"pending",
LastModified_dtmStamp:now
};
codeArray.push(doc1);
db.collection('ClPromoCodeMaster').insert(codeArray, function (err, result) {
if (err){
console.log(err);
}else{
console.log('Inserted Records - ', result.ops.length);
}
});
The problem I'm facing is mongo has an inserting limit of 16mb, so I can't insert the entire array at once.
Please suggest most optimum solutions.
The main problem is in the request size and not the document size, but it amounts to the same limitation. Bulk operations and the async library with async.whilst will handle this:
var bulk = db.collection('ClPromoCodeMaster').initializeOrderedBulkOp(),
i = 0;
async.whilst(
function() { return i < 1000000; },
function(callback) {
var token = strNpm.generate();
var now = moment().format('YYYYMMDD hhmmss');
var doc = {
id:token,
Discount_strId:"pending",
Promotion_strCode:token,
Promotion_strStatus:"I",
Promotion_dtmGeneratedDate:now,
User_strLogin:"test",
Promotion_strMode:"S",
Promotion_dtmValidFrom:"pending",
Promotion_dtmValidTill:"pending",
LastModified_dtmStamp:now
};
bulk.insert(doc);
i++;
// Drain every 1000
if ( i % 1000 == 0 ) {
bulk.execute(function(err,response){
bulk = db.collection('ClPromoCodeMaster').initializeOrderedBulkOp();
callback(err);
});
} else {
callback();
}
},
function(err) {
if (err) throw err;
console.log("done");
}
);
I should note that regardless there is an internal limit on bulk operations to 1000 operations per batch. You can submit in larger sizes, but the driver is just going to break these up and still submit in batches of 1000.
The 1000 is a good number to stay at though, since it is already in line with how the request will be handled, as well as being a reasonable number of things to hold in memory before draining the request queue and sending to the server.
For inserting millions of record at a time, Create node.js child process fork with MongoDb bulk api.
Child Process Creation:(index.js)
const {fork} = require("child_process");
let counter = 1;
function createProcess(data){
const worker = fork("./dbOperation");
worker.send(data);
worker.on("message", (msg) => {
console.log("Worker Message :",counter, msg);
counter++;
})
}
function bulkSaveUser(records) {
const singleBatchCount = 10000; // Save 10,000 records per hit
const noOfProcess = Math.ceil(records/singleBatchCount);
let data = {};
console.log("No of Process :", noOfProcess);
for(let index = 1; index <= noOfProcess; index++) {
data.startCount = (index == 1) ? index : (((index - 1) * singleBatchCount) + 1);
data.endCount = index * singleBatchCount;
createProcess(data);
}
}
bulkSaveUser(1500000);
DB Operation (dbOperation.js)
const MongoClient = require('mongodb').MongoClient;
// Collection Name
const collectionName = "";
// DB Connection String
const connString = "";
process.on("message", (msg) => {
console.log("Initialize Child Process", msg)
const {startCount, endCount} = msg;
inputStudents(startCount, endCount);
});
function initConnection() {
return new Promise(function(r, e) {
MongoClient.connect(connString, function(err, db) {
if (err) e(err)
r(db);
});
});
}
function inputStudents(startCount, endCount) {
let bulkData = [];
for(let index = startCount; index <= endCount; index++ ){
var types = ['exam', 'quiz', 'homework', 'homework'];
let scores = []
// and each class has 4 grades
for (j = 0; j < 4; j++) {
scores.push({'type':types[j],'score':Math.random()*100});
}
// there are 500 different classes that they can take
class_id = Math.floor(Math.random()*501); // get a class id between 0 and 500
record = {'student_id':index, 'scores':scores, 'class_id':class_id};
bulkData.push({ insertOne : { "document" : record } })
}
initConnection()
.then((db) => {
const studentDb = db.db("student");
const collection = studentDb.collection(colName)
console.log("Bulk Data :", bulkData.length);
collection.bulkWrite(bulkData, function(err, res) {
if (err) throw err;
//console.log("Connected Successfully",res);
process.send("Saved Successfully");
db.close();
});
})
.catch((err) => { console.log("Err :", err) });
}
Sample project to insert millions of record in mongodb using child process fork

How can i write a mocha test for the following function?

I want to write a test for this node.js funcion,This has two arguments request and response. I set the request variable . But dont know how to set response variable.
function addCustomerData(request, response) {
common.getCustomerByMobile(request.param('mobile_phone'), function (customerdata) {
if (!customerdata) {
var areaInterest = request.param('area_interest');
var customerInfo = {userType: request.param('userType'),
firstName : request.param('first_name'),
middleName : request.param('middle_name'),
lastName : request.param('last_name'),
email : request.param('email'),
mobilePhone : request.param('mobile_phone'),
uniqueName : request.param('user_name'),
company : request.param('company')
};
if(customerInfo.email){
customerInfo.email = customerInfo.email.toLowerCase();
}
if(customerInfo.uniqueName){
customerInfo.uniqueName = customerInfo.uniqueName.toLowerCase();
}
if(areaInterest) {
customerInfo.areaInterest = '{' + areaInterest + '}';
}else
areaInterest = null;
addCustomer(request, response, customerInfo, function (data) {
request.session.userId = data;
return response.send({success: true, message: 'Inserted successfully'});
}
);
} else {
return response.send({success: false, message: 'User with this mobile number already exists'});
}
});
}
I wrote the test as follows
describe('signup', function(){
describe('#addCustomer()', function(){
before(function (done) {
request = {};
request.data = {};
request.session = {};
request.data['userType'] = '3';
request.data['first_name'] = 'Shiji';
request.data['middle_name'] = '';
request.data['last_name'] = 'George';
request.data['email'] = 'shiji#lastplot.com';
request.data['mobile_phone'] = '5544332333';
request.data['user_name'] = 'shiji';
request.session['imageArray'] = [];
request.param=function(key){
// Look up key in data
return this.data[key];
};
request1 = {};
request1.data = {};
request1.session = {};
request1.data['area_interest']=["aluva","ernakulam"];
request1.data['userType'] = '1';
request1.data['first_name'] = 'Hari';
request1.data['middle_name'] = 'G';
request1.data['last_name'] = 'Ganesh';
request1.data['email'] = 'hari#lastplot.com';
request1.data['mobile_phone'] = '5544332321';
request1.data['user_name'] = 'hariganesh';
request1.data['company'] = 'HG Realestate';
request1.session['imageArray'] = [];
request1.param=function(key){
// Look up key in data
return this.data[key];
};
done();
});
it('It should list the matching properties', function(done){
async.parallel([
function(callback) {
signup.addCustomerData(request, response, function (result, err) {
should.not.exist(err);
should.exists(result);
callback();
});
},
function(callback) {
signup.addCustomerData(request1, response, function (result, err) {
should.not.exist(err);
should.exists(result);
callback();
});
}],function(){
done();
});
});
But i got the error as response has no method send()
Thanks in Advance.
Your addCustomerData function does not have a callback, it just calls respond.send(). You need to mock the response object, as well as the send method, and put your tests inside of it, but you won't be able to use async.parallel() as, like I already mentioned, your function does not have a callback parameter. If you're testing request/response functions, I suggest you look into Supertest https://github.com/visionmedia/supertest which is widely used for cases like this.

Loopback discoverAndBuildModels not generating models

I'm trying to get Loopback to discover and build my first table. I've used the simple example on their page at the bottom here:
http://docs.strongloop.com/display/LB/Database+discovery+API#DatabasediscoveryAPI-Exampleofbuildingmodelsviadiscovery
and I see the output of the table I'm discovering, but the API Explorer doesn't show the table or any newly generated endpoints. Also, the model-config.js file is not updated with the new table object. Here is the basic section of the code done on server start:
var loopback = require('loopback');
var boot = require('loopback-boot');
var DataSource = require('loopback-datasource-juggler').DataSource;
var mysqlSource = require('./datasources.json');
var dataSource = new DataSource('mssql', mysqlSource.mysqlserver);
var app = module.exports = loopback();
// Set up the /favicon.ico
app.use(loopback.favicon());
// request pre-processing middleware
app.use(loopback.compress());
// -- Add your pre-processing middleware here --
dataSource.discoverAndBuildModels('CATS', {owner: 'mamacat'}, function (err, models) {
models.Cat.find(function (err, cat) {
if (err) {
console.error(err);
} else {
console.log(cat);
}
dataSource.disconnect();
});
});
// boot scripts mount components like REST API
boot(app, __dirname);
To summarize, this runs, no errors. But no new models show on http://localhost:3000/explorer
Seems that discovery scripts only shows the output and doesn't create the model files. I found some instructions on loopback docs:
http://docs.strongloop.com/display/public/LB/Discovering+models+from+relational+databases
In section Basic Procedure, the second step:
2. Use fs.writeFile() to save the output in common/models/model-name.json.
So you can try the following approach:
Setup your mysql data in yourloopbackproject/server/datasources.json file:
{
"db": {
"name": "db",
"connector": "memory"
},
"accountDs": {
"host": "mysqlServerName",
"port": 3306,
"database": "databaseName",
"username": "username",
"password": "password!",
"name": "accountDs",
"connector": "mysql"
}
}
Create the models folder if doesn't exist: yourloopbackproject/common/models.
Create discovery-and-build.js script on yourloopbackproject/server/bin folder:
var path = require('path');
var fs = require('fs');
var app = require(path.resolve(__dirname, '../server'));
var outputPath = path.resolve(__dirname, '../../common/models');
var dataSource = app.dataSources.accountDs;
function schemaCB(err, schema) {
if(schema) {
console.log("Auto discovery success: " + schema.name);
var outputName = outputPath + '/' +schema.name + '.json';
fs.writeFile(outputName, JSON.stringify(schema, null, 2), function(err) {
if(err) {
console.log(err);
} else {
console.log("JSON saved to " + outputName);
}
});
}
if(err) {
console.error(err);
return;
}
return;
};
dataSource.discoverSchema('tableName',{schema:'schemaName'},schemaCB);
This script is based on: http://www.reddit.com/r/strongloop/comments/2upy76/autodiscoveryjs_recipe/
After the script execution you will find a .json file on models folder. Go to step 3 on Basic Procedure section:
http://docs.strongloop.com/display/public/LB/Discovering+models+from+relational+databases
Follow these steps to expose your model over REST:
http://docs.strongloop.com/display/public/LB/Exposing+models+over+REST
I hope this helps!
Use Arc for this.
Run slc arc from the project folder and it will show up the gui tool called arc in default browser. If you've not already registered, sign up and log in. You will be directed to GUI tool of StrongLoop, the Arc. Select your model from list on the left pane. You'll be able to see save and migrate button. Just click the migrate button and your table will be created into model.(within millisecs!)
Cheers!
discovery api is used to only discover the schema not to create models for now.
please use the following project to create models with one to one and one to many relationships and all the models.
https://github.com/savsharma2/loopback-sql-create-model-with-relation/
Building off of #Underskay's answer, I did something like
var fs = require('fs');
var app = require(__dirname + '/server/server');
function makePromise(f, parent) {
return function(...args) {
return new Promise((resolve, reject) => {
f.call(parent, ...args, (err, ...data) => {
if (err) return reject(err);
resolve(data.length === 1 ? data[0] : data);
});
});
};
}
var readFile = makePromise(fs.readFile, fs);
var writeFile = makePromise(fs.writeFile, fs);
function writeSchemas(schemas) {
return Promise.all(schemas.map(data => {
var schema = data[Object.keys(data)[0]];
return writeFile('common/models/' + schema.name + '.json', JSON.stringify(schema, null, '\t'));
}))
.then(() => readFile('server/model-config.json'))
.then(JSON.parse)
.then(conf => {
for (let schema of schemas)
conf[schema[Object.keys(schema)[0]].name] = { "dataSource": "mysql" };
return conf;
})
.then(conf => writeFile('server/model-config.json', JSON.stringify(conf, null, '\t')));
}
function getSchemas(ds) {
var discoverSchemas = makePromise(ds.discoverSchemas, ds);
return makePromise(ds.discoverModelDefinitions, ds)({})
.then(tables => Promise.all(tables.map(t => discoverSchemas(t.name, { relations: true }))))
.then(data => { ds.disconnect(); return data; });
}
Promise.resolve(app.datasources.mysql)
.then(ds => getSchemas(ds))
.then(schemas => writeSchemas(schemas))
.catch(err => log.error(err));

Is it possible to build a dynamic task list in nodejs Async (waterfall, series, etc...)

I am pulling information from some collections in mongo that contain node and edge data. First i must get the node so i can grab its edges. Once i have a list of edges i then go back out and grab more nodes (etc.. based on a depth value). The following code is an loose example of how i am attempting to use async.waterfall and the task list.
Initially i have only a single task but once i make my first query i add to the task array. Unfortunately this does not seem to register with async and it does not continue to process the tasks i am adding.
Is there a better way to do this?
var async = require('async')
var mongoose = require('mongoose')
var _ = requrie('underscore')
var Client = this.Mongo.connect(/*path to mongo*/)
var Node = mongoose.Schema({
id : String,
graph_id : String
})
var Edge = mongoose.Schema({
id : String,
source_id : String,
destination_id : String
})
var Nodes = Client.model('myNode', Node)
var Edges = Client.model('myEdge', Edge)
var funcs = []
var round = 1
var depth = 2
var query = {
node : {
id : '12345'
},
edge : {
id : '12345'
}
}
var addTask = function(Nodes, Edges, query, round, depth) {
return function(callback) {
queryData(Nodes, Edges, query, function(err, node_list) {
if(depth > round) {
round++
function_array.push(addTask(Nodes, Edges, query, round, depth))
}
})
}
}
var queryData = function(Nodes, Edges, query, cb) {
async.waterfall([
function(callback) {
Nodes.find(query.node, function(err, nodes) {
var node_keys = _.map(nodes, function(node) {
return node.id
})
callback(null, nodes, node_keys)
})
},
function(nodes, node_keys, callback) {
query.edge.$or = [ {'source_id' : {$in:node_keys}}, {'destination_id' : {$in:node_keys}} ]
Edges.find(query.edge, function(err, edges) {
var edge_keys = _.map(edges, function(edge) {
if(edge['_doc']['source_id'] != query.node.id) {
return edge['_doc']['source_id']
} else {
return edge['_doc']['destination_id']
}
callback(null, nodes, edges, node_keys, edge_keys)
})
})
}
], function(err, nodes, edges, node_keys, edge_keys) {
// update the results object then...
cb(null, _.uniq(edge_keys)
})
}
var function_array = []
function_array.push(addTask(Nodes, Edges, query, round, depth))
async.waterfall(function_array, function(err) {
Client.disconnect()
//this should have run more than just the initial task but does not
})
--------------------- UPDATE ---------------------------
So after playing around with trying to get Async waterfall or series to do this by adding trailing functions I decided to switch to using async.whilst and am now happy with the solution.
function GraphObject() {
this.function_array = []
}
GraphObject.prototype.doStuff = function() {
this.function_array.push(this.buildFunction(100))
this.runTasks(function(err) {
console.log('done with all tasks')
}
}
GraphObject.prototype.buildFunction = function(times) {
return function(cb) {
if(times != 0) {
this.function_array.push(this.buildFunction(times - 1))
}
cb(null)
}
}
GraphObject.prototype.runTasks = function(cb) {
var tasks_run = 0
async.whilst(
function(){
return this.function_array.length > 0
}.bind(this),
function(callback) {
var func = this.function_array.shift()
func.call(this, function(err) {
tasks_run++
callback(err)
})
}.bind(this),
function(err) {
console.log('runTasks ran '+tasks_run+' tasks')
if(err) {
cb(500)
}
cb(null)
}.bind(this)
)
}
A task in your function_array can only add a new task to the array provided it is NOT the last task in the array.
In your case, your function_array contained only 1 task. That task itself cannot add additional tasks since it's the last task.
The solution is to have 2 tasks in the array. A startTask to bootstrap the process, and a finalTask that is more of a dummy task. In that case,
function_array = [startTask, finalTask];
Then startTask would add taskA, taskB will add task C and eventually
function_array = [startTask, taskA, taskB, taskC, finalTask];
The sample code below that illustrates the concepts.
var async = require('async');
var max = 6;
var nodeTask = function(taskId, value, callback){
var r = Math.floor(Math.random() * 20) + 1;
console.log("From Node Task %d: %d", taskId, r);
// add an edge task
if (taskId < max) {
function_array.splice(function_array.length-1, 0, edgeTask);
}
callback(null, taskId + 1, value + r);
};
var edgeTask = function(taskId, value, callback){
var r = Math.floor(Math.random() * 20) + 1;
console.log("From Edge Task %d: %d", taskId, r);
// add a node task
if (taskId < max) {
function_array.splice(function_array.length-1, 0, nodeTask);
}
callback(null, taskId + 1, value + r);
};
var startTask = function(callback) {
function_array.splice(function_array.length-1, 0, nodeTask);
callback(null, 1, 0);
};
var finalTask = function(taskId, value, callback) {
callback(null, value);
};
var function_array = [startTask, finalTask];
async.waterfall(function_array, function (err, result) {
console.log("Sum is ", result);
});

Resources