Getting a 503 error with NodeJS and ExressJS - node.js

I get a 503 error using NodeJS. My function works in localhost but on my remote server I get a 503 error.
Functions :
function postEdit(request, response) {
var updatedParams = request.body.updatedParams,
blockName = request.params.blockname,
primaryColor, secondaryColor, variables, scssStr;
if (blockName === 'themeparam') {
primaryColor = updatedParams.primary_color;
secondaryColor = updatedParams.secondary_color;
variables = updatedParams.variables;
if (primaryColor || secondaryColor || variables) {
scssStr = fs.readFileSync('./public/sass/_functions.scss', 'utf8');
scssStr += fs.readFileSync('./public/sass/_settings.scss', 'utf8');
if (primaryColor) {
scssStr = scssStr.replace(/\$main-color:.+;/g, '$main-color: ' + primaryColor + ';');
}
if (secondaryColor) {
scssStr = scssStr.replace(/\$secondary-color:.+;/g, '$secondary-color: ' + secondaryColor + ';');
}
scssStr += fs.readFileSync('./public/sass/app.scss', 'utf8');
scssStr = scssStr.replace(/#import.+;/g, '');
if (variables) {
scssStr += variables;
}
fs.readdir('./public/sass/components', function(err, files) {
files.forEach(function(file) {
scssStr += fs.readFileSync('./public/sass/components/' + file, 'utf8');
});
fs.readdir('./public/sass/shortcodes', function(err, shortcodeFiles) {
shortcodeFiles.forEach(function(shortcodeFile) {
scssStr += fs.readFileSync('./public/sass/shortcodes/' + shortcodeFile, 'utf8');
});
fs.writeFileSync('./public/uploads/theme.scss', scssStr);
sass.render({
file: './public/uploads/theme.scss'
}, function(err, result) {
if (err) {
console.log(err);
}
fs.writeFileSync('./public/uploads/theme.css', result.css);
});
});
});
} else {
fs.stat('./public/uploads/theme.scss', function(err) {
if (err) {
console.log(err);
}
if (!err) {
fs.unlinkSync('./public/uploads/theme.scss');
fs.unlinkSync('./public/uploads/theme.css');
}
});
}
}
if (blockName === 'robotparam') {
fs.writeFileSync('./public/robots.txt', updatedParams.robot);
}
editJsonFile(request, response, './configs/options.json', function(options) {
_.forEach(updatedParams, function(value, name) {
var blockParam = _.find(options.blocks, { name: blockName }),
param = _.find(blockParam.params, { name: name });
param.value = value;
});
}, OptionController.baseUrl + '/edit/block/' + blockName);
}
function editJsonFile(request, response, jsonFile, updater, redirectUrl) {
fs.readFile(jsonFile, 'utf8', function(err, data) {
if (err) {
throw err;
}
var options = JSON.parse(data);
updater(options);
fs.writeFile(jsonFile, JSON.stringify(options, null, 4), function(err) {
if (err) {
throw err;
}
request.flash('success', 'Done !');
response.redirect(redirectUrl);
});
});
}
Error 503 :
Service Unavailable
The server is temporarily unable to service your request due to maintenance downtime or capacity problems. Please try again later.
Apache/2.4.7 (Ubuntu) Server at ****** Port 443
Is there any solution to get the error message to find the issue ?

Related

Error: NJS-012: encountered invalid bind data type in parameter 2

Even though I have searched for the solution of this error and i found some answers but none of them helped me fix this error, Error: NJS-012: encountered invalid bind data type in parameter 2.Maybe, one error can occur in a different scenarios.
Stored procedure definition
create or replace PROCEDURE SP_MEAL_GETMEALTYPES
(
p_DataSource OUT Sys_RefCursor
)
AS
BEGIN
OPEN p_DataSource FOR
select mealtypeid,description from mealtypes;
END;
File name: menusStoredProc.js
"use strict";
var dbParams = require('../../oracle/dbParams');
function storedProcs() {
this.SP_USER_GETMENUS = {
name: 'sp_meal_getmealtypes',
params: {
dataSource: {val: null, type: dbParams.CURSOR, dir: dbParams.BIND_OUT}
},
resultSetColumns: ['mealTypeId','description']
}
}
module.exports = storedProcs;
File name: menus.js
var express = require('express');
var MenusStoreProc = require('../storedProcedures/menusStoredProc');
var oraDbAssist = require('../../oracle/oracleDbAssist');
var router = express.Router();
router.get('/getmenus', (req, res, next) => {
var sp = new MenusStoreProc().SP_USER_GETMENUS;
oraDbAssist.getConnection(function (err, conn) {
if (err)
return console.log('Connecting to db failed - ' + err);
oraDbAssist.executeSqlWithConn(sp, false, conn, function (err, menus) {
if (err)
return console.log('Executing ' + sp.name + ' failed - ' + err);
res.status(200).json(JSON.stringify(menus));
});
});
});
module.exports = router;
Function definition added - executeSqlWithConn
function executeSqlWithConn(sp, autoCommit, connection, next) {
var sql = createProcedureSqlString(sp.name, sp.params);
var params = buildParams(sp.params);
connection.execute(sql, params, {autoCommit: autoCommit}, function(err, result) {
if (err) {
next(err, null);
return;
}
var allRows = [];
var numRows = 50; // number of rows to return from each call to getRows()
for(var attributeName in result.outBinds) {
if(result.outBinds[attributeName] && result.outBinds[attributeName].metaData) { // db response is a result set
function fetchRowsFromResultSet(pResultSet, pNumRows) {
pResultSet.getRows(pNumRows, function(readErr, rows) {
if(err) {
pResultSet.close(function (err) { // always close the result set
next(readErr);
});
return;
}
allRows.push(rows);
if (rows.length === pNumRows) {
fetchRowsFromResultSet(result.outBinds[attributeName], numRows);
return;
}
var allRowsResult = Array.prototype.concat.apply([], allRows);
generateJsonFromDbResultSet(pResultSet.metaData, allRowsResult, sp, function(resultSet) {
pResultSet.close(function (err) { // always close the result set
next(null, resultSet);
});
});
});
}
fetchRowsFromResultSet(result.outBinds[attributeName], numRows);
return;
}
}
next(null, result.outBinds);
});
}
Function definition added - buildParams
function buildParams(params) {
for(var attributeName in params) {
params[attributeName].val = typeof params[attributeName].val === 'undefined' ? null : params[attributeName].val;
if(params[attributeName].type.is(dbParams.DATE))
params[attributeName].val = params[attributeName].val ? new Date(params[attributeName].val) : null;
params[attributeName].type = params[attributeName].type.value;
params[attributeName].dir = params[attributeName].dir.value;
}
return params;
}
Any help, dear members ?

Parent function is not waiting for child to finish

Inside getMetadata(), Object.keys function is not waiting for httprequest to finish. How can I make object.keys function to wait till httprequest function manipluates the result variable?
I'm using node. I tried to make promise but failed.
function fetchAirportPageIDsListWithMetaJSON(faa, cb){
logger.info('[airportcms-data-processor] fetching airport pages list with Metadata');
var faa = faa
async.waterfall([
getAirportPageIDsList,
getMetadata,
], function (err, result) {
cb(null, result);
});
function getAirportPageIDsList(callback) {
httpRequests.fetchData('//s3 url to fetch data', function (err, data) {
var idsMap={};
data["page-ids"].forEach( (obj) => {
obj.list.forEach((item) => idsMap[item] = obj.id);
});
callback(null, idsMap);
})
}
function getMetadata(data,callback) {
var result=[];
Object.keys(data).sort().forEach( function (t) {
var tempJson={};
var urlcheck = verifySpecialPageId(t);
if (urlcheck){
var url = config.urls.s3_airports_base_url+'/'+faa+'/'+urlcheck;
}else{
var url = config.urls.s3_airports_base_url+'/'+faa+'/'+t;
}
tempJson["sectionId"]= t;
tempJson["page"]= data[t];
httpRequests.makeHeadRequestWithCallerId(url, function (err, metax) {
if (metax){
let z = metax.split('|')[0];
tempJson["SummaryRange"]= getSummaryRangeAirportPageList(z);
tempJson["timestamp"]= new Date(parseInt(z)).toLocaleDateString();
tempJson["callerId"]= metax.split('|')[1];
}else{
tempJson["timestamp"]='';
tempJson["callerId"]='';
tempJson["SummaryRange"]='';
}
})
result.push(tempJson);
});
logger.info("Final result: ", result);
callback(null, result);
}
}
http request function:
function makeHeadRequestWithCallerId (url, cb) {
httpRequest.head(url, function (err, res) {
if (err) {
logger.error('Error ' + err);
return cb(err, null);
}
if(res.code === 200) {
if (res.headers['x-amz-meta-x-amz-meta-lastmodified'] || res.headers['x-amz-meta-x-amz-meta-callerid']) {
var dateModified = res.headers['x-amz-meta-x-amz-meta-lastmodified'];
var timeStamp = Date.parse(dateModified);
var callerid = res.headers['x-amz-meta-x-amz-meta-callerid'];
if(timeStamp && callerid) {
return cb(null, timeStamp+'|'+callerid);
} else if (callerid){
return cb(null, '|'+callerid);
}else if(timeStamp){
return cb(null, timeStamp+'|');
}else{
return cb(null, null);
}
}else{
return cb(null, null);
}
}
});
}
Current log=> Final result:
[{ sectionId: 'internet-wifi', page: 'internet-wifi' },
{ sectionId: 'layover-ideas', page: 'layover-ideas' }]
Expected log=> Final result:
{ sectionId: 'internet-wifi',
page: 'internet-wifi',
SummaryRange: '12-99',
timestamp: '1/29/2018',
callerId: '' },
{ sectionId: 'layover-ideas',
page: 'layover-ideas',
SummaryRange: '12-99',
timestamp: '1/26/2017',
callerId: '' },
function getMetadata(data, callback) {
var result = [];
var count = Object.keys(data).length;
var i = 0;
Object.keys(data).sort().forEach(function (t) {
var tempJson = {};
var urlcheck = verifySpecialPageId(t);
if (urlcheck) {
var url = config.urls.s3_airports_base_url + '/' + faa + '/' + urlcheck;
} else {
var url = config.urls.s3_airports_base_url + '/' + faa + '/' + t;
}
tempJson["sectionId"] = t;
tempJson["page"] = data[t];
httpRequests.makeHeadRequestWithCallerId(url, function (err, metax) {
if (metax) {
let z = metax.split('|')[0];
tempJson["SummaryRange"] = getSummaryRangeAirportPageList(z);
tempJson["timestamp"] = new Date(parseInt(z)).toLocaleDateString();
tempJson["callerId"] = metax.split('|')[1];
} else {
tempJson["timestamp"] = '';
tempJson["callerId"] = '';
tempJson["SummaryRange"] = '';
}
result.push(tempJson);
i++;
if(count === i){
logger.info("Final result: ", result);
callback(null, result);
}
})
});
}

Data is not Passing into SQL Server

I am doing a project where I have to save the sensor data into SQL Server. I have created a coding for the sensor and SQL Server. I am able to INSERT the coding using POST (postman) but I was not able to pass the sensor data into SQL Server.
How can I pass the Sensor parameters into a T-SQL query? It would be great if someone could guide me to sort it out this issue.
exports.add = function (req, resp, reqBody) {
try {
if (!reqBody) throw new Error("Input not valid");
var data = JSON.parse(reqBody);
var outputJSON = JSON.stringify(reqBody);
if (data) {//add more validations if necessary
var sql = "INSERT INTO arduinoData (Machine, StartTime, EndTime, LengthTime) VALUES ";
sql += util.format("(%s, '%s', '%s','%s') ", reqBody.data.Machine, reqBody.data.StartTime, reqBody.data.EndTime, reqBody.data.LengthTime);
db.executeSql(sql, function (data, err) {
if (err) {
httpMsgs.show500(req, resp, err);
}
else {
httpMsgs.send200(req, resp);
}
});
}
else {
throw new Error("Input not valid");
}
}
catch (ex) {
httpMsgs.show500(req, resp, ex);
}
};
function vibrationStart()
{
data = [{
Machine : "Machine",
StartTime : "StartTime",
EndTime : "EndTime",
LengthTime : "LengthTime"
}];
var jsonTable = { "table": [] };
var startTime = getTime();
console.log(startTime[0] + " " + startTime[1]);
var startData = {
Machine: Machine,
start_time: startTime[0] + " " + startTime[1],
day_night: startTime[3],
active: "true"
};
const options = {
url: serverURL,
method: "POST",
form: startData
};
request.post(options, function (error, response, body) {
if (!error) {
console.log("Sent starting message!");
sendBackupData();
} else {
console.log("CANT'T SEND");
// Write to JSON file for backup if can't send to server
fs.readFile("backup.json", "utf8", function readFileCallback(err, data) {
if (err) throw err;
jsonTable = JSON.parse(data);
jsonTable.table.push(startData);
var outputJSON = JSON.stringify(jsonTable);
fs.writeFile("backup.json", outputJSON, "utf8", function (err) {
if (err) throw err;
});
});
}
});
return startTime[2];
}
function vibrationStop(startTimeUnix)
{
// Should get:
// - End time and date the vibration ended
// - Total length of time
// Will send the message, if there is network connection, once complete.
// Will store message into a JSON file if there is no network connection.
data = [{
Machine : "Machine",
StartTime : "StartTime",
EndTime : "EndTime",
LengthTime : "LengthTime"
}];
var jsonTable = { "table": [] };
var endTime = getTime();
console.log(endTime[0] + " " + endTime[1]);
var endTimeUnix = endTime[2];
var LengthTime = endTimeUnix - startTimeUnix;
console.log("Length time: " + LengthTime);
var endData = {
Machine: Machine,
end_time: endTime[0] + " " + endTime[1],
LengthTime: LengthTime,
active: "false"
};
const options = {
url: serverURL,
method: "POST",
form: endData
};
request.post(options, function (error, response, body) {
if (!error) {
console.log("Sent end message!");
sendBackupData();
} else {
console.log("CANT'T SEND");
// Write to JSON file for backup if can't send to server
fs.readFile("backup.json", "utf8", function readFileCallback(err, data) {
if (err) throw err;
jsonTable = JSON.parse(data);
jsonTable.table.push(endData);
var data = JSON.stringify(executeSql);
var outputJSON = JSON.stringify(jsonTable);
fs.writeFile("backup.json", outputJSON, "utf8", function (err) {
if (err) throw err;
});
});
}
});
}
arduinoBoard.on("ready", function () {
// Main function that runs when Arduino is 'ready'
console.log("Board ready!");
var tilt = new johnnyFive.Sensor.Digital(8);
var sensorCount = 0;
var sensorFlag = false, prevSensorFlag = false;
var startTime = 0;
var sendEmailTimeout;
// When sensor changes value
tilt.on("change", function () {
sensorCount = 0;
sensorFlag = true;
console.log("TILTING!");
});
// Continuously loops
var timeoutValue = 250; // Change timeout value later on.
tilt.on("data", function () {
// Sensor just started turning on
if (sensorFlag & !prevSensorFlag) {
prevSensorFlag = true;
startTime = vibrationStart();
console.log("Vibration started.");
clearTimeout(sendEmailTimeout); // Don't send email if switch activated before 5 minutes of inactivity
}
// Sensor just turned off
if (!sensorFlag && prevSensorFlag) {
prevSensorFlag = false;
EndTime = vibrationStop(startTime);
console.log("Vibration stopped.");
sendEmailTimeout = setTimeout(sendEmail, userOptions.email_time * 1000); // Send email after 5 minutes of inactivity
}
// Sensor reaches timeout value
if (sensorCount == timeoutValue) {
sensorCount = 0;
sensorFlag = false;
}
sensorCount++;
});
});

how to override express-session destroy method Nodejs

I am working on a project that need to be notified when session destroy is called Or other alternative is that overriding destroy method of the session and implement my codes there. Anyone knows how can we do that ?
I have tried to understand express-session code at
https://github.com/konteck/express-sessions
but I could not get how can I do that yet. Also in some part of codes he uses
for (var i in MongoStore) {
SessionStore.prototype[i] = MongoStore[i];
}
which to me it is the first time that I see prototype array form like that and have no idea what does it mean (I almost know about prototype but not in this form) Anyone has any explanation from this code?
Thanks
var express = require('express');
var mongoose = require('mongoose');
var redis = require("redis");
var MongoStore = {
client: null,
options: {},
get: function (sid, cb) {
MongoStore.client.findOne({sid: sid}, function (err, doc) {
try {
if (err) return cb(err, null);
if (!doc) return cb();
cb(null, doc.data); // JSON.parse(doc.data)
}
catch (err) {
cb(err);
}
});
},
set: function (sid, data, cb) {
try {
var lastAccess = new Date();
var expires = lastAccess.setDate(lastAccess.getDate() + 1);
if (typeof data.cookie != 'undefined') {
expires = data.cookie._expires;
}
if (typeof data.lastAccess != 'undefined') {
lastAccess = new Date(data.lastAccess);
}
MongoStore.client.findOneAndUpdate({sid: sid}, {
data: JSON.parse(JSON.stringify(data)), //JSON.stringify(data)
lastAccess: lastAccess,
expires: expires
}, { upsert: true }, cb);
}
catch (err) {
console.log('express-sessions', err);
cb && cb(err);
}
},
destroy: function (sid, cb) {
MongoStore.client.remove({ sid: sid }, cb);
},
all: function (cb) {
MongoStore.client.find(function (err, doc) {
if (err) {
return cb && cb(err);
}
cb && cb(null, doc);
});
},
length: function (cb) {
MongoStore.client.count(function (err, count) {
if (err) {
return cb && cb(err);
}
cb && cb(null, count);
});
},
clear: function (cb) {
MongoStore.client.drop(function () {
if (err) {
return cb && cb(err);
}
cb && cb();
});
}
}
var RedisStore = {
client: null,
options: {},
get: function (sid, cb) {
RedisStore.client.get(RedisStore.options.collection + ':' + sid, function (err, doc) {
try {
if (err) return cb(err, null);
if (!doc) return cb();
cb(null, JSON.parse(doc)); // JSON.parse(doc.data)
}
catch (err) {
cb(err);
}
});
},
set: function (sid, data, cb) {
try {
var lastAccess = new Date();
var expires = lastAccess.setDate(lastAccess.getDate() + 1);
if (typeof data.cookie != 'undefined') {
expires = data.cookie._expires;
}
if (typeof data.lastAccess != 'undefined') {
lastAccess = new Date(data.lastAccess);
}
RedisStore.client.set(RedisStore.options.collection + ':' + sid, JSON.stringify(data), cb);
if (RedisStore.options.expire) {
RedisStore.client.expire(RedisStore.options.collection + ':' + sid, parseInt(RedisStore.options.expire));
}
}
catch (err) {
console.log('express-sessions', err);
cb && cb(err);
}
},
destroy: function (sid, cb) {
RedisStore.client.del(RedisStore.options.collection + ':' + sid, cb);
},
all: function (cb) {
RedisStore.client.keys(RedisStore.options.collection + ':*', function (err, docs) {
if (err) {
return cb && cb(err);
}
cb && cb(null, docs);
});
},
length: function (cb) {
RedisStore.client.keys(RedisStore.options.collection + ':*', function (err, docs) {
if (err) {
return cb && cb(err);
}
cb && cb(null, docs.length);
});
},
clear: function (cb) {
RedisStore.client.del(RedisStore.options.collection + ':*', cb);
}
}
var SessionStore = function (options, cb) {
var options = {
storage: options.storage || 'mongodb',
host: options.host || 'localhost',
port: options.port || (options.storage == 'redis' ? 6379 : 27017),
db: options.db || 'test',
collection: options.collection || 'sessions',
instance: options.instance || null,
expire: options.expire || 86400
};
express.session.Store.call(this, options);
switch (options.storage) {
case 'mongodb':
if (options.instance) {
mongoose = options.instance;
} else {
mongoose.connect('mongodb://' + options.host + ':' + options.port + '/' + options.db);
}
var schema = new mongoose.Schema({
sid: { type: String, required: true, unique: true },
data: { type: {} },
lastAccess: { type: Date, index: { expires: parseInt(options.expire) * 1000} },
expires: { type: Date, index: true }
});
MongoStore.options = options;
MongoStore.client = mongoose.model(options.collection, schema);
for (var i in MongoStore) {
SessionStore.prototype[i] = MongoStore[i];
}
break;
case 'redis':
if (options.instance) {
RedisStore.client = options.instance;
} else {
RedisStore.client = redis.createClient(options.port, options.host);
}
RedisStore.options = options;
for (var i in RedisStore) {
SessionStore.prototype[i] = RedisStore[i];
}
break;
}
if (cb) cb.call(null);
}
SessionStore.prototype = new express.session.Store();
module.exports = SessionStore;

NodeJs async waterfall (callback method is not a function)

I am having a problem using async waterfall where I find that after calling the second callback (cbNumPages), the first parameter "pages" is the actual callback for the next function, instead of the last parameter "cbGetFiles" which it should be (as far as I know async waterfall says that last parameter should always be the callback, well in this case is apparently not).
The code is the following:
async.waterfall
([
function(cbNumPages)
{
request({
url: 'any-url',
qs: {},
method: 'GET',
headers: {
'Authorization' : 'any-auth'
}
}, (err, response, body) => {
if (!err && response.statusCode == 200)
{
var $ = cheerio.load(body);
var pagesList = $('ol.aui-nav').children();
if(pagesList.length<1)
{
var numPages = 1;
} else {
var numPages = pagesList.length-2;
}
console.log(numPages);
var pages = new Array(numPages),
total = numPages*20,
iterator = 0;
async.eachSeries(pages, function(page, cb)
{
if(page>1)
{
pages[iterator] = iterator;
}else {
pages[iterator] = iterator*20;
}
iterator++;
cb();
}, function(err){
if(err) cbNumPages(err);
cbNumPages(null, pages);
});
} else {
cbNumPages(err);
}
})
},
function(pages, cbGetFiles)
{
var files = [];
var limitDate = moment().tz('Europe/Madrid').subtract(330,'days').format();
async.eachSeries(pages, function(page, cb)
{
request({
url: 'any-url'+page,
qs: {},
method: 'GET',
headers: {
'Authorization' : 'any-auth'
}
}, (err, response, body) => {
if(!err && response.statusCode == 200)
{
var $ = cheerio.load(body);
var rows = $('tr[id^=\'attachment-\']');
async.eachLimit(rows, 1, function(row, cb)
{
var id = row.attribs['id'];
var file = row.attribs['data-attachment-filename'];
var author = $(row).children('.creator').text().trim();
var created = $(row).children('.created-date').text().trim();
created = moment.tz(created, 'MMM D, YYYY', 'Europe/Madrid').format();
var urlFile = 'simple-file' + $(row).children('.filename-column').children('.filename').attr('href');
var extension = row.attribs['data-attachment-filename'].split('.');
extension = extension[extension.length-1];
if(created<limitDate && validExtensions.indexOf(extension)>-1)
{
var f = '{ "id": "' + id + '",';
f += ' "file": "' + file + '",';
f += ' "url": "' + urlFile + '",';
f += ' "author": "' + author + '",';
f += ' "modified": "' + created + '" }';
files.push(JSON.parse(f));
}
cb();
}, (err) => {
if(err) cbGetFiles(err);
});
cb();
} else {
cb(err);
}
});
}, function(err){
if(err){
cbGetFiles(err);
} else {
cbGetFiles(null, files);
}
});
},
function(files, cbGetAutors)
{
var filesFinal = {};
for(var f in files)
{
if(!filesFinal[files[f].author])
{
var ff = {};
for(var i in files)
{
if(files[i].author === files[f].author)
{
ff[files[i].file] = files[i].url;
}
}
filesFinal[files[f].author] = ff;
}
}
cbGetAutors(null, JSON.parse(JSON.stringify(filesFinal)));
},
function(filesFinal, cbSendEmail)
{
var authors = Object.keys(filesFinal);
async.eachSeries(authors, function(author, cb)
{
var name = author.split(' ');
var email = 'simple-mail#gmail.com';
var msg = '<p>Hi ' + author + ',</p><p>how is it going:</p><p>';
for(var a in Object.keys(filesFinal[author]))
{
msg += '<p style="margin-left:20px"> '+ICON_DOC+' <a href="';
msg += filesFinal[author][Object.keys(filesFinal[author])[a]]+'">'+Object.keys(filesFinal[author])[a]+'</a></p>';
}
msg += '</p></p><p><b>NOTE: This is a no-reply address.</b></p><p>Have a nice day! '+ICON_MONKEY+'</p>';
var message = {
text: msg,
from: 'test#mail.com',
to: email,
bcc: '',
subject: 'Sample subject',
attachment: [{data: msg, alternative: true}]
};
serverEmail.send(message, function(err, message)
{
if(err)
{
cb(err);
} else {
console.log(message);
cb();
}
});
}, function(err){
if(err) cbSendEmail(err);
cbSendEmail();
});
}
], (err) => {
if(err) console.log(err);
});
I would like to know if there is a way to control this issue or at least if there are another options for what I want to do.
Thanks.
A better (neat) way to use async waterfall.
Make sure you use return before any callback function. I have added them in the code.
Also, if you are nesting eachSeries, it is better to give a different name to callback function than the parent callback function.
I have changed 'cb' of child async.series to 'inner_cb'
Updated Code:
async.waterfall
([
funcOne,
funcTwo,
funcThree,
funcFour
], (err) => {
if(err) console.log(err);
});
funciton funcOne(cbNumPages) {
request({
url: 'any-url',
qs: {},
method: 'GET',
headers: {
'Authorization' : 'any-auth'
}
}, (err, response, body) => {
if (!err && response.statusCode == 200)
{
var $ = cheerio.load(body);
var pagesList = $('ol.aui-nav').children();
if(pagesList.length<1)
{
var numPages = 1;
} else {
var numPages = pagesList.length-2;
}
console.log(numPages);
var pages = new Array(numPages),
total = numPages*20,
iterator = 0;
async.eachSeries(pages, function(page, cb)
{
if(page>1)
{
pages[iterator] = iterator;
}else {
pages[iterator] = iterator*20;
}
iterator++;
return cb();
}, function(err){
if(err) return cbNumPages(err);
return cbNumPages(null, pages);
});
} else {
return cbNumPages(err);
}
})
}
function funcTwo(pages, cbGetFiles) {
var files = [];
var limitDate = moment().tz('Europe/Madrid').subtract(330,'days').format();
async.eachSeries(pages, function(page, cb)
{
request({
url: 'any-url'+page,
qs: {},
method: 'GET',
headers: {
'Authorization' : 'any-auth'
}
}, (err, response, body) => {
if(!err && response.statusCode == 200)
{
var $ = cheerio.load(body);
var rows = $('tr[id^=\'attachment-\']');
async.eachLimit(rows, 1, function(row, inner_cb)
{
var id = row.attribs['id'];
var file = row.attribs['data-attachment-filename'];
var author = $(row).children('.creator').text().trim();
var created = $(row).children('.created-date').text().trim();
created = moment.tz(created, 'MMM D, YYYY', 'Europe/Madrid').format();
var urlFile = 'simple-file' + $(row).children('.filename-column').children('.filename').attr('href');
var extension = row.attribs['data-attachment-filename'].split('.');
extension = extension[extension.length-1];
if(created<limitDate && validExtensions.indexOf(extension)>-1)
{
var f = '{ "id": "' + id + '",';
f += ' "file": "' + file + '",';
f += ' "url": "' + urlFile + '",';
f += ' "author": "' + author + '",';
f += ' "modified": "' + created + '" }';
files.push(JSON.parse(f));
}
return inner_cb();
}, (err) => {
if(err) return cbGetFiles(err);
});
return cb();
} else {
return cb(err);
}
});
}, function(err){
if(err){
return cbGetFiles(err);
} else {
return cbGetFiles(null, files);
}
});
}
function funcThree(files, cbGetAutors) {
var filesFinal = {};
for(var f in files)
{
if(!filesFinal[files[f].author])
{
var ff = {};
for(var i in files)
{
if(files[i].author === files[f].author)
{
ff[files[i].file] = files[i].url;
}
}
filesFinal[files[f].author] = ff;
}
}
return cbGetAutors(null, JSON.parse(JSON.stringify(filesFinal)));
}
function funcFour(filesFinal, cbSendEmail) {
var authors = Object.keys(filesFinal);
async.eachSeries(authors, function(author, cb)
{
var name = author.split(' ');
var email = 'simple-mail#gmail.com';
var msg = '<p>Hi ' + author + ',</p><p>how is it going:</p><p>';
for(var a in Object.keys(filesFinal[author]))
{
msg += '<p style="margin-left:20px"> '+ICON_DOC+' <a href="';
msg += filesFinal[author][Object.keys(filesFinal[author])[a]]+'">'+Object.keys(filesFinal[author])[a]+'</a></p>';
}
msg += '</p></p><p><b>NOTE: This is a no-reply address.</b></p><p>Have a nice day! '+ICON_MONKEY+'</p>';
var message = {
text: msg,
from: 'test#mail.com',
to: email,
bcc: '',
subject: 'Sample subject',
attachment: [{data: msg, alternative: true}]
};
serverEmail.send(message, function(err, message)
{
if(err)
{
return cb(err);
} else {
console.log(message);
return cb();
}
});
}, function(err){
if(err) return cbSendEmail(err);
return cbSendEmail();
});
}
As #YSK said in a comment, I was obtaining a 401 from the response.statusCode and therefore it is being missleaded to the cbSendEmail(err) with err beying null. Making the next method in the waterfall's first parameter beying the callback instead of the second.

Resources