Node.js Async mapLimit and memory - node.js

SOLVED, see the answer please.
I have a list of urls I fetch using request and for some reason I am unable to save more than 1720 records into my database when I try to fetch 2000 or more URL's at a time.
If I try 1000 to 2000 and 2000 to 3000, I get 3000 results in total. But when I try 1000 to 3000 or 4000 to 6000, my script stops after fetching the 1720th result.
What could be the reason for that?
I use mapLimit in order to limit concurrent connections.
app.get('/asynctest', function(req, res) {
var people = [];
for (var a = 1000; a < 3000; a++) {
people.push("http://www.example.com/" + a + "/person.html");
}
async.mapLimit(people, 20, function(url, callback) {
// iterator function
var options2 = {
url: url,
headers: {
'User-Agent': req.headers['user-agent'],
'Content-Type': 'application/json; charset=utf-8'
}
};
request(options2, function(error, response, body) {
if (!error && response.statusCode == 200) {
async.series([
// add this person into database
function(callback) {
var $ = cheerio.load(body);
var name = entities.decodeHTML($('span[itemprop="name"]').text());
new person({
name: name,
url: url
}).save();
callback();
},
function(callback) {
async.waterfall([
function(callback) {
var $ = cheerio.load(body);
var jobs = $('span[itemprop="jobtitle"]').length;
if (jobs == 0) {
console.log("no job");
var jobsArr = 0;
} else {
var jobsArr = [];
for (var aa = 0; aa < jobs; aa++) {
jobsArr.push(entities.decodeHTML($('span[itemprop="jobtitle"]').eq(aa).text()));
}
}
callback(null, jobsArr);
},
function(jobsArr, callback) {
if (jobsArr == 0) {
console.log("this person has no jobs");
} else {
async.map(jobsArr, function(jobs, callback) {
personRole.where('job_name', jobs).fetch({
require: true
}).then(function(data1) {
data1 = data1.toJSON();
person.where('url', url).fetch().then(function(data2) {
data2 = data2.toJSON();
new personPersonRole({
person_id: data2.id,
personrole_id: data1.id
}).save();
});
}).catch(function(err) {
new personRole({
job_name: jobs
}).save().then(function(data3) {
data3 = data3.toJSON();
person.where('url', url).fetch().then(function(data4) {
data4 = data4.toJSON();
new personPersonRole({
person_id: data4.id,
personrole_id: data3.id
}).save();
});
});
});
});
}
callback(null, "yes");
}
], function(err, result) {
if (err) {
console.log(err);
}
});
callback();
}
], function(err, result) {
if (err) {
console.log("err3");
}
});
} else {
console.log("err4");
}
});
callback();
});
});
EDIT #2
The following code is also problematic, adds only 1747 records and it stops after that. If I stop my node app and start again, it also stops at 1747.
var person = require('./models').person;
app.get('/asynctest', function(req, res) {
var people = [];
for (var a = 18000; a < 20000; a++) {
people.push("random url");
}
async.mapLimit(people, 20, function(url, callback) {
new person({
name: "YES",
url: url
}).save();
callback();
});
});
db.js
var knex = require('knex')({
client: 'mysql',
connection: {
host: '127.0.0.1',
port: 8889,
user: 'root',
password: 'root',
database: 'mydatabase',
charset: 'utf8'
},
pool: {
min: 0,
max: 100
}
});
var db = require('bookshelf')(knex);
module.exports = db;
models.js
var person = db.Model.extend({
tableName: 'people'
});
module.exports = {
person : person
};
EDIT #3
Okay, I think I've found the solution.
18K-18.5K - no problem
18K-19K - no problem
18K-19.7K - no problem
18K-20K - RangeError: Maximum call stack size exceeded at new Object
(native)
I just wrapped my callbacks into a wrapper, like below.
async.setImmediate(function () {
callback();
});
app.get('/async22', function(req, res) {
var people = [];
for (var a = 18000; a < 20000; a++) {
people.push("yes");
}
async.mapLimit(people, 20, function(url, callback) {
new person({
name: "YES",
url: url
}).save();
async.setImmediate(function () {
callback();
});
});
});

It was in front of my eyes all the time, actually this solution isn't unique, it's already included async library's database.
https://github.com/caolan/async#common-pitfalls-stackoverflow
Here's how you do it.
async.setImmediate(function () {
callback();
});
Example
app.get('/async22', function(req, res) {
var people = [];
for (var a = 18000; a < 20000; a++) {
people.push("yes");
}
async.mapLimit(people, 20, function(url, callback) {
new person({
name: "YES",
url: url
}).save();
async.setImmediate(function () {
callback();
});
});
});

This is still not an answer, but it is too big for the comment.
I suggest to reduce the code to some minimal example and try if it works (example is below and it works for me).
Second thing - is to add a monitoring route (see the /apptest below), so you can check if you app still works and the processing progress.
If the minimal sample works, start to gradually add more code with your logic to it and check if it still works.
The code, server.js:
var util = require('util');
var express = require('express');
var async = require('async');
var request = require('request');
var cheerio = require('cheerio');
var app = express.createServer();
app.successCount = 0;
app.errorCount = 0;
app.get('/apptest', function(req, res) {
res.send(
util.format(
'I am OK, successCount: %s, errorCount: %s',
app.successCount, app.errorCount
), 200
);
});
app.get('/asynctest', function(req, res) {
var people = [];
for (var a = 1000; a < 3000; a++) {
people.push("http://www.example.com/" + a + "/person.html");
}
async.mapLimit(people, 20, function(url, callback) {
// iterator function
var options2 = {
url: url,
headers: {
'User-Agent': req.headers['user-agent'],
'Content-Type': 'application/json; charset=utf-8'
}
};
request(options2, function(error, response, body) {
if (!error) {
console.log('success requesting: ' + options2.url);
var $ = cheerio.load(body);
app.successCount += 1;
} else {
console.log(
'error requesting: %s, error: %s, status: %s',
options2.url, error, response.statusCode
);
app.errorCount += 1;
}
callback();
});
});
});
app.listen(3000, function() {
console.log(
"Express server listening on port %d in %s mode",
app.address().port, app.settings.env
);
});
Dependencies, package.json:
{
"name": "application-name",
"version": "0.0.1",
"private": true,
"dependencies": {
"async": "^1.5.2",
"cheerio": "^0.19.0",
"express": "2.5.8",
"request": "^2.67.0"
},
"devDependencies": {}
}
Run the example as node server.js and then open http://localhost:3000/asynctest in the browser, you should see success requesting: xxxx in the console. While it is running (or when it stops running) - open http://localhost:3000/apptest to check if app is OK and how many urls are processed.

Related

How do I make sure socket.id is the same on the server and the client after a page reload?

I am writing a Proof Of Concept (for at least 2 months now) that uses Node cluster (workers), Redis and socket.io.
Socket.io is not in use for chat in this instance - just back to front communication. Ajax is not an option.
I am using pub/sub for redis and have that piece working (I think). At least the values returned from pubClient.get('key') are correct.
When I make a request from the front end and do not navigate or reload the page in any way, things work perfectly - I can make 10 requests and 10 responses are received.
Conversely, when I navigate, the same is not true - and I need to deliver the results no matter how much someone navigates on the front end.
It seems there is a disconnect after a reload. In both consoles - Dev Tools and node js, the socket ids are the same. I'm really scratching my head on this one!
Any help out there?
So, for some mainly socket.io code:
CLIENT:
socket = io('https://' + location.hostname + ':4444/', {
transports: ['websocket', 'polling'],
secure: true,
});
socket.on('download', function(data){// after reload, this never hits
console.log('DOWNLOAD '+ data.download);
});
var pkgs = ['y14Vfk617n6j', 'My77gWYmBLxT', 'IYd6dL9UoXkx'];
if(pkgs.length > 0){
for(var i = 0; i < pkgs.length; i++){
socket.emit('get-request', pkgs[i]);
}
}
SERVER:
var cluster = require('cluster');
var express = require('express');
var numCPUs = require('os').cpus().length;
const { setupMaster, setupWorker } = require("#socket.io/sticky");
const { createAdapter, setupPrimary } = require("#socket.io/cluster-adapter");
var app = express();
const https = require('https');
const { Server } = require("socket.io");
const Redis = require("ioredis");
const sock_nodes = [
{port: 6379, host: '192.168.0.41'},
{port: 6380, host: '192.168.0.34'},
{port: 6381, host: '192.168.0.35'},
{port: 6379, host: '192.168.0.34'},
{port: 6380, host: '192.168.0.35'},
{port: 6381, host: '192.168.0.41'}
];
const port = 4444;
const httpServer = https.createServer(options, app);
const io = new Server(httpServer, {maxHttpBufferSize: 10240000});
const pubClient = new Redis.Cluster(sock_nodes, {
redisOptions: {
password: 'my secret!'
}
});
const subClient = pubClient.duplicate(); // I am not actually using this - should I be?
if (cluster.isMaster) {
for (var i = 0; i < numCPUs; i++) {
// Create a worker
cluster.fork();
}
cluster.on("exit", (worker) => {
console.log(`Worker PID ${worker.process.pid} died`);
var w = cluster.fork();
console.log('WORKER %d died (%s). restarting...', worker.process.pid, worker.state);
w.on('message', function(msg){
console.log("Message Received : " , msg);
});
});
} else {
app.use((req, res, next) => {
var reqip = req.headers['x-real-ip'] || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
//~ console.log(reqip, md5(reqip));
var sess = parseCookies(req, 'session_state');
if(!sess){
res.cookie('session_state', md5(reqip));
}
next();
});
app.get('/', (req, res) => {
getSession(req, res, function(sess){
getPub('currSockets', sess, function(err, socket){
res.render("pages/shared/index", {'ns': sess, 'socket': socket});
});
});
});
});
app.get('/start', function(req, res){
getSession(req, res, function(sess){
getPub('currSockets', sess, function(err, socket){
res.render("pages/shared/start", {'ns': sess, 'socket': socket});
});
});
});
io.on('connection', function (socket) {
var currUser = parseCookies(socket.request, 'session_state');
socket.join(currUser);
getPub('currSockets', currUser, function(err, currSockets){
if (currSockets) {
currSockets = JSON.parse(currSockets);
if (currSockets[currUser]) {
if (currSockets[currUser].stream) {
currSockets[currUser].sock = socket.id;
setCurrSockets(currSockets, currUser, null, function(cSocks){
});
}
}
}
});
socket.on('get-request', function(data){ // can be one or many requests
// there is a similar, currently irrelevant, socket.on('new-request') that is left out here
if(data){
getPub('currSockets', currUser, function(err, currSockets){
currSockets = JSON.parse(currSockets);
if(currSockets){
if(currUser){
if(currSockets[currUser]){
if(currSockets[currUser].stream){
var str = Object.keys(currSockets[currUser].stream);
for(var i = 0; i < str.length; i++){
if(str[i] !== 'sock'){
if(!currSockets[currUser].stream[str[i]]){
delete currSockets[currUser].stream[str[i]];
setCurrSockets(currSockets, currUser, null, function(cSocks){
checkCurrSockets(currUser, data, socket);
});
}
}
}
}
}
}
}
});
}
});
});
httpServer.listen(port, () => {
logs(__line__, `Worker ${process.pid} listening on ${port}`);
});
}
function existsPub(key, cb){
return pubClient.exists(key, cb);
}
function setPub(key, val, cb){
if(val === JSON.stringify({})){
return pubClient.get(key, cb);
}
return pubClient.set(key, val, cb);
}
function getPub(key, currUser, cb){
existsPub(key, function(err, reply){
if(reply === 1){
return pubClient.get(key, cb);// always getting an old socket.id
}
});
}
// Here is the piece that doesn't work after reloading the page
function ioEmit (currSock, target, payload) {
io.to(currSock).emit(target, payload); // doesn't work after page reload
}
// end piece where after reload does not work
getPub('currSockets', currUser, function(err, currSockets){
if( currSockets){
currSockets = JSON.parse(currSockets);
ioEmit(currUser, 'download', {'download': currSockets[currUser].stream[data]);
}
});
function parseCookies (req, name) {
var list = {}, rc;
rc && rc.split(';').forEach(function( cookie ) {
var parts = cookie.split('=');
list[parts.shift().trim()] = decodeURI(parts.join('='));
});
return list[name];
}
function getSession(req, res, callback) {
var sess = false;
if(req.headers) {// handle req
var reqip = req.headers['x-real-ip'] || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
if(req.headers.cookie){
sess = req.headers.cookie.split('=')[1].split(';')[0];
} else {
res.cookie('session_state', md5(reqip));
}
return callback(sess);
} else if(req.request) {// handle socket
//~ console.log('req.request.headers.cookie', req.request.headers.cookie.split('=')[1]);
if(req.request.headers.cookie){
sess = req.request.headers.cookie.split('=')[1].split(';')[0];
//~ req.emit('join', sess);
//~ callback({[sess]: {'sock': req.id}});
callback(req.id);
}
} else {
return callback(null);
}
}
function setCurrSockets(currSockets, currUser, data, cb){
if(Object.keys(currSockets[currUser].stream).length > 0){
if(data){
if(ready(currSockets, currUser, data)){
delete currSockets[currUser].stream[data];// it appears that setCurrSockets is getting called too soon
}
}
setPub('currSockets', JSON.stringify(currSockets), function(err){
});
if(typeof cb === 'function'){
setTimeout(() => {
getPub('currSockets', currUser, function(err, cSocks){
cb(cSocks);// updated callback to return cSocks
}, 2000);
});
}
} else {
currSockets[currUser].stream = {};
setPub('currSockets', JSON.stringify(currSockets), function(err){
if(err){
} else {
if(typeof cb === 'function'){
cb(currSockets);// updated callback to return cSocks
}
}
});
}
}
figured this out. The problem was in here:
for(var i = 0; i < str.length; i++){
if(str[i] !== 'sock'){
>>>> if(!currSockets[currUser].stream[str[i]]){ // never true
// delete currSockets[currUser].stream[str[i]];
setCurrSockets(currSockets, currUser, null, function(cSocks){
checkCurrSockets(currUser, data, socket);
});
}
}
}
so I commented the for loop and kept the setCurrSockets part and it works.
Just thought I would share, in case someone else tries to use redis, node cluster and socket.io together. As #jfreind00 said, you should use an authentication system with a randomly gen'd string for storing cookies.

Synchronous api calls in Node.js

I've a cronjob that runs every 10 secs. It requests for machines for a single client and does computation based on the response and then has to update or create documents with those computations in a for loop. But, the api calls after '***' in the code don't happen until the for loop has executed and the data sent to the api calls is that of the last machine which is wrong. I want to solve this by this way or some other way possible. My code looks like this:
// index.js
const cron = require("node-cron");
const express = require("express");
const fs = require("fs");
const request = require("request");
app = express();
var clientId = 'ABCD';
var apiUrl = 'http://example.com:3001/';
var getMachines = apiUrl + 'getMachines/',
updateMachine = apiUrl + 'updateMachine/',
getControlRoomStatus = apiUrl + 'getControlRoomStatus/',
registerControlRoomStatus = apiUrl + 'registerControlRoomStatus/',
updateControlRoomStatus = apiUrl + 'updateControlRoomStatus/';
cron.schedule("*/10 * * * * *", function() {
APICall(getMachines, { 'clientId' : clientId }, 'POST', function(err, machines) {
if (err) {
console.log(err);
} else {
console.log('--------------------------------------------------------------------------------------------------');
var allMachines = machines;
var currentDateTime = IST();
for (var i = 0; i < 2; i++) {
var lastCycleTime = new Date(allMachines[i]['lastCycleTime']);
var lastHeartbeat = new Date(allMachines[i]['lastHeartbeat']);
var machineData;
var controlRoomData;
var machineId = {
'machineId' : allMachines[i]['machineId']
};
console.log(machineId);
if (allMachines[i]['downtimeStatus'] == '0') {
if ((currentDateTime - lastCycleTime)>300000) {
if ((currentDateTime - lastHeartbeat)>300000) {
console.log(allMachines[i]['machineId'] ,' No Internet');
controlRoomData = {
'clientId': clientId,
'lastTimeStamp': allMachines[i]['lastCycleTime'],
'status': 'Inactive',
'type': 'No Internet/Power'
};
} else {
console.log(allMachines[i]['machineId'] ,' No button pressed');
controlRoomData = {
'clientId': clientId,
'lastTimeStamp': allMachines[i]['lastCycleTime'],
'status': 'Inactive',
'type': 'No Button Pressed'
};
}
machineData = {
'status' : 'Inactive'
};
} else {
console.log(allMachines[i]['machineId'] ,' Active');
machineData = {
'status' : 'Active'
};
controlRoomData = {
'clientId': clientId,
'lastTimeStamp': allMachines[i]['lastCycleTime'],
'status': 'Active',
'type': 'N.A'
};
}
} else {
if ((currentDateTime - lastHeartbeat)>300000) {
console.log(allMachines[i]['machineId'] ,' button pressed ',' No Internet');
controlRoomData = {
'clientId': clientId,
'lastTimeStamp': allMachines[i]['lastCycleTime'],
'status': 'Inactive',
'type': 'No Internet/Power'
};
} else {
var downtimeLength = allMachines[i]['downtimeData'].length - 1;
console.log(allMachines[i]['machineId'] ,' button pressed ',allMachines[i]['downtimeData'][downtimeLength]['downtimeType']);
controlRoomData = {
'clientId': clientId,
'lastTimeStamp': allMachines[i]['lastCycleTime'],
'status': 'Inactive',
'type': allMachines[i]['downtimeData'][downtimeLength]['downtimeType']
};
}
machineData = {
'status' : 'Inactive'
};
}
***
APICall(getControlRoomStatus, machineId, 'POST', function(err, controlRoom) {
if (err) {
console.log(err);
} else {
console.log(machineId,controlRoomData);
if (controlRoom == null ) {
APICall(registerControlRoomStatus, controlRoomData, 'POST', function(err, body) {
if (err) {
console.log(err);
} else {
// console.log(body);
}
});
} else {
var updateControlRooomUrl = (updateControlRoomStatus+''+controlRoom['_id']+'');
// console.log(updateControlRooomUrl);
APICall(updateControlRooomUrl, controlRoomData, 'PUT', function(err, body) {
if (err) {
console.log(err);
} else {
// console.log(body);
}
});
}
}
});
var updateMachineUrl = (updateMachine+''+allMachines[i]['_id']+'');
// console.log(updateMachineUrl);
APICall(updateMachineUrl, machineData, 'PUT', function(err, body) {
if (err) {
console.log(err);
} else {
console.log(i,machineId);
// console.log(body);
}
});
}
}
});
});
function APICall(url, requestData, method, callback) {
request({
url: url,
form: requestData,
method: method
}, function (error, response, body) {
if (error || response.statusCode !== 200) {
return callback(error || {statusCode: response.statusCode});
}
callback(null, JSON.parse(body));
});
}
function IST(){
var dateUTC = new Date();
var dateUTC = dateUTC.getTime();
var dateIST = new Date(dateUTC);
dateIST.setHours(dateIST.getHours() + 5);
dateIST.setMinutes(dateIST.getMinutes() + 30);
return dateIST;
}
app.listen(3128);
Thank you in advance.
I used a different method to do things and now it's working just as it's supposed to. I used 'async' and replaced the for loop with the following:
var async = require('async');
...
async.map(allMachines , function(machine, callback) {
...
});
...
You can try the following package:
sync-request
You can find it here on NPM.
Here is an example how to use it (from the docs):
var request = require('sync-request');
var res = request('GET', 'http://example.com');
console.log(res.getBody());
As stated in the documentation, don't use it in production code, since this will terribly block your server and it will slow down considerably (if you are running a HTTP server which you are using express).
If you have asynchronous code and you want to execute some code after the asynchronous you also can use:
Observables (not native need to use a package, RxJS for example)
Promises (native ES6 JS)

Backbone and Express: concatinating (duplicating) routes on res.redirect

I have an action where I need to update MongoDB entry including _id field, which requires deleting old entry and making a new one, here is server side:
exports.update = function(req, res, next){
var outcome = [];
outcome.previousId = req.params.id;
outcome.newId = req.body.name;
var getPreviousRecord = function(callback) {
req.app.db.models.AccountGroup
.findOne({ _id: req.params.id })
.lean()
.exec(function(err, accountGroups) {
if (err) {
return callback(err, null);
}
outcome.accountGroups = accountGroups;
return callback(null, 'done');
});
};
var makeNewRecord = function(callback) {
var permissions = outcome.accountGroups.permissions;
var fieldsToSet = {
_id: outcome.newId.toLowerCase(),
name: outcome.newId,
permissions: permissions
};
req.app.db.models.AccountGroup
.create(fieldsToSet, function(err, record) {
if (err) {
return callback(err, null);
}
outcome.record = record;
return callback(null, 'done');
});
};
var deletePreviousRecord = function() {
req.app.db.models.AccountGroup
.findByIdAndRemove(outcome.previousId)
.exec(function(err) {
if (err) {
return next(err);
}
res.redirect('admin/account-groups/' + outcome.newId + '/');
});
};
var asyncFinally = function(err) {
if (err) {
return next(err);
}
};
require('async').series([getPreviousRecord, makeNewRecord, deletePreviousRecord], asyncFinally);
};
It works fine, but I can't make this work normally on the front-end, it returns me both old route and a new route, for example:
PUT /admin/account-groups/customers22/admin/account-groups/Customers2233/ 404 213.749 ms - 31
where customers22 is old _id and customers2233 is new _id. If I navigate from another page to new entry it gets route normally.
On client side:
(function() {
'use strict';
app = app || {};
app.Details = Backbone.Model.extend({
idAttribute: '_id',
defaults: {
success: false,
errors: [],
errfor: {},
name: ''
},
url: function() {
return '/admin/account-groups/'+ app.mainView.model.id +'/';
},
parse: function(response) {
if (response.accountGroup) {
app.mainView.model.set(response.accountGroup);
delete response.accountGroup;
}
return response;
}
});
app.DetailsView = Backbone.View.extend({
el: '#details',
events: {
'click .btn-update': 'update'
},
template: Handlebars.compile( $('#tmpl-details').html() ),
initialize: function() {
this.model = new app.Details();
this.syncUp();
this.listenTo(app.mainView.model, 'change', this.syncUp);
this.listenTo(this.model, 'sync', this.render);
this.render();
},
syncUp: function() {
this.model.set({
_id: app.mainView.model.id,
name: app.mainView.model.get('name')
});
},
render: function() {
this.$el.html(this.template( this.model.attributes ));
for (var key in this.model.attributes) {
if (this.model.attributes.hasOwnProperty(key)) {
this.$el.find('[name="'+ key +'"]').val(this.model.attributes[key]);
}
}
},
update: function() {
this.model.save({
name: this.$el.find('[name="name"]').val()
});
}
});
app.MainView = Backbone.View.extend({
el: '.page .container',
initialize: function() {
app.mainView = this;
this.model = new app.AccountGroup( JSON.parse( unescape($('#data-record').html()) ) );
// ...
app.detailsView = new app.DetailsView();
}
});
$(document).ready(function() {
app.mainView = new app.MainView();
});
}());
It probably requires to trigger both model.save and model.destroy or prevent URL being used. Any advice on how to do it is appreciated, thank you.
Edit
Just a typo mistake here that is not related to the question, recklessly checking routes, see as cancelled
I believe the problem is here:
res.redirect('admin/account-groups/' + outcome.newId + '/');
That's a relative path so it'll be appended onto the current URL. I suspect you want something like this:
res.redirect('/admin/account-groups/' + outcome.newId + '/');

node js mongo db dependencies (doc not being found)

I have the following code:
var method = PushLoop.prototype;
var agent = require('./_header')
var request = require('request');
var User = require('../models/user_model.js');
var Message = require('../models/message_model.js');
var async = require('async')
function PushLoop() {};
method.startPushLoop = function() {
getUserList()
function getUserList() {
User.find({}, function(err, users) {
if (err) throw err;
if (users.length > 0) {
getUserMessages(users)
} else {
setTimeout(getUserList, 3000)
}
});
}
function getUserMessages(users) {
// console.log("getUserMessages")
async.eachSeries(users, function (user, callback) {
var params = {
email: user.email,
pwd: user.password,
token: user.device_token
}
messageRequest(params)
callback();
}, function (err) {
if (err) {
console.log(err)
setTimeout(getUserList, 3000)
}
});
}
function messageRequest(params) {
var url = "https://voip.ms/api/v1/rest.php?api_username="+ params.email +"&api_password="+ params.pwd +"&method=getSMS&type=1&limit=5"
request(url, function(err, response, body){
if (!err) {
var responseObject = JSON.parse(body);
var messages = responseObject.sms
if (responseObject["status"] == "success") {
async.eachSeries(messages, function(message, callback){
console.log(params.token)
saveMessage(message, params.token)
callback();
}, function(err) {
if (err) {
console.log(err)
}
// setTimeout(getUserList, 3000)
})
} else {
// setTimeout(getUserList, 3000)
}
} else {
console.log(err)
// setTimeout(getUserList, 3000)
}
});
setTimeout(getUserList, 3000)
}
function saveMessage(message, token) {
// { $and: [ { price: { $ne: 1.99 } }, { price: { $exists: true } }
// Message.find({ $and: [{ message_id: message.id}, {device_token: token}]}, function (err, doc){
Message.findOne({message_id: message.id}, function (err, doc){
if (!doc) {
console.log('emtpy today')
var m = new Message({
message_id: message.id,
did: message.did,
contact: message.contact,
message: message.message,
date: message.date,
created_at: new Date().toLocaleString(),
updated_at: new Date().toLocaleString(),
device_token: token
});
m.save(function(e) {
if (e) {
console.log(e)
} else {
agent.createMessage()
.device(token)
.alert(message.message)
.set('contact', message.contact)
.set('did', message.did)
.set('id', message.id)
.set('date', message.date)
.set('message', message.message)
.send();
}
});
}
}) //.limit(1);
}
};
module.exports = PushLoop;
Which actually works perfectly fine in my development environment - However in production (i'm using Openshift) the mongo documents get saved in an endless loop so it looks like the (if (!doc)) condition always return true therefore the document gets created each time. Not sure if this could be a mongoose issue - I also tried the "find" method instead of "findOne". My dev env has node 0.12.7 and Openshift has 0.10.x - this could be the issue, and i'm still investigating - but if anybody can spot an error I cannot see in my logic/code please let me know
thanks!
I solved this issue by using a "series" like pattern and using the shift method on the users array. The mongoose upsert findOneOrCreate is good however if there is a found document, the document is returned, if one isn't found and therefore created, it's also returned. Therefore I could not distinguish between the newly insert doc vs. a found doc, so used the same findOne function which returns null if no doc is found I just create it and send the push notification. Still abit ugly, and I know I could have used promises or the async lib, might refactor in the future. This works for now
function PushLoop() {};
var results = [];
method.go = function() {
var userArr = [];
startLoop()
function startLoop() {
User.find({},function(err, users) {
if (err) throw err;
users.forEach(function(u) {
userArr.push(u)
})
function async(arg, callback) {
var url = "https://voip.ms/api/v1/rest.php?api_username="+ arg.email +"&api_password="+ arg.password +"&method=getSMS&type=1&limit=5"
request.get(url, {timeout: 30000}, function(err, response, body){
if (!err) {
var responseObject = JSON.parse(body);
var messages = responseObject.sms
var status = responseObject.status
if (status === "success") {
messages.forEach(function(m) {
var message = new Message({
message_id: m.id,
did: m.did,
contact: m.contact,
message: m.message,
date: m.date,
created_at: new Date().toLocaleString(),
updated_at: new Date().toLocaleString(),
device_token: arg.device_token
});
var query = { $and : [{message_id: m.id}, {device_token: arg.device_token}] }
var query1 = { message_id: m.id }
Message.findOne(query).lean().exec(function (err, doc){
if (!doc || doc == null) {
message.save(function(e) {
console.log("message saved")
if (e) {
console.log("there is an error")
console.log(e)
} else {
console.log(message.device_token)
var messageStringCleaned = message.message.toString().replace(/\\/g,"");
var payload = {
"contact" : message.contact,
"did" : message.did,
"id" : message.message_id,
"date" : message.date,
"message" : messageStringCleaned
}
var note = new apns.Notification();
var myDevice = new apns.Device(message.device_token);
note.expiry = Math.floor(Date.now() / 1000) + 3600; // Expires 1 hour from now.
note.badge = 3;
note.alert = messageStringCleaned;
note.payload = payload;
apnsConnection.pushNotification(note, myDevice);
}
})
}
});
});
}
else {
console.log(err)
}
}
});
setTimeout(function() {
callback(arg + "testing 12");
}, 1000);
}
// Final task (same in all the examples)
function series(item) {
if(item) {
async( item, function(result) {
results.push(result);
return series(userArr.shift());
});
} else {
return final();
}
}
function final() {
console.log('Done');
startLoop();
}
series(userArr.shift())
});
}
}
module.exports = PushLoop;

Export HTTP request as module function Node.js

I've tried (with success) to do an http request. I have some REST Api's, like graph.facebook.com as target. The functionality is the same. When i make an HTTP request with node.js as a simple program i can do it.
Really i want to make a little module, and i have this code:
// file: facebook.js
var http = require('http');
var Facebook = (function() {
function Facebook(access_token) {
this.access_token = access_token;
}
Facebook.prototype.getObject = function(id) {
var options;
this.id = id;
options = {
host: 'graph.facebook.com',
port: 80,
path: '/' + id + '?access_token=' + this.access_token
};
return http.request(options, function(response) {
response.on('data', function(d) {
return JSON.parse(d);
});
request.end();
return request.on('error', function(err) {
return {
error: 'An error ocurred.'
};
});
});
};
return Facebook;
})();
module.exports = Facebook;
After, when i write a program i can do this:
var facebook = require('./facebook.js');
var fb = facebook('my_Access_token')
// Here's the problem:
var response = fb.getObject('My_facebook_profile_ID')
I get a response like
{ domain: null,
_events:
{ response: { [Function: g] listener: [Function] },
socket: { [Function: g] listener: [Function] } },
....
When i must have something like
{
"id": "MyIDNumer",
"first_name": "Orlando",
"gender": "male",
"last_name": "Sanchez",
"link": "https://www.facebook.com/MyFacebookName",
"locale": "es_LA",
"name": "Orlando S\u00e1nchez",
"username": "MyUsername"
}
What can i do?
The first thing you should do is rewrite the module to not use the same function name twice ('Facebook').
Secondly, there's no need for the closure, just export the constructor.
Thirdly, you are trying to return a value from an asynchronous callback. This particular issue is common with people coming from the synchronous world.
Assuming you wanted to do a GET request, here's your module refactored after the above and other things are fixed:
// file: facebook.js
var http = require('http');
function Facebook(access_token) {
if (!(this instanceof Facebook))
return new Facebook(access_token);
this.access_token = access_token;
}
Facebook.prototype.getObject = function(id, cb) {
var options;
this.id = id;
options = {
host: 'graph.facebook.com',
port: 80,
path: '/' + id + '?access_token=' + this.access_token
};
http.get(options, function(res) {
var buf = '',
hadError = false;
res.on('data', function(d) {
buf += d;
}).on('error', function(err) {
hadError = true;
cb(err);
}).on('end', function() {
if (hadError)
return;
var val;
try {
val = JSON.parse(buf);
} catch (err) {
return cb(err);
}
cb(null, val);
});
});
};
module.exports = Facebook;
Then use it like so:
var facebook = require('./facebook.js');
var fb = facebook('my_Access_token');
fb.getObject('My_facebook_profile_ID', function(err, response) {
if (err) {
// include better error handling here
return console.log(err);
}
// use response here
console.dir(response);
});

Resources