nodejs: setInterval for custom function - node.js

I have the following function:
getHostlist() {
fs.readFile('out/hostlist', 'utf8', (err, data) => {
if (err) throw err;
var hostlist = data.split(',');
this.getHostStats(hostlist);
});
},
it is exported.
in my index.js im trying to call it with setInterval every minute:
setInterval(remote.getHostlist, config.app.refresh); the second parameters contains 60000.
So i try to run it every minute.
But when it runs im recieving the following error:
TypeError: this.getHostStats is not a function
But when i run the function manually without interval it works and getHostStats is called.
Where is the problem here`?
EDIT FULL EXPORT SCRIPT:
module.exports = {
getHostlist() {
console.log("Starting Usage synchronization at " + currTime());
fs.readFile('out/hostlist', 'utf8', (err, data) => {
if (err) throw err;
var hostlist = data.split(',');
getHostStats(hostlist);
});
},
....
}
if i write everything down here it would be to long.

I believe in this scenario "this" is referring to wherever your setInterval code is running so "this" is no longer remote. Try and get an explicit reference to remote.getHoststatsand use that instead.
//EDIT 1//
It may be worth wrapping your export functions into a class, creating an object of that class and then exporting the object. That should guarantee that "this" works in your context

try this
module.exports = {
getHostlist : function getHostlist() {
console.log("Starting Usage synchronization at " + currTime());
fs.readFile('out/hostlist', 'utf8', (err, data) => {
if (err) throw err;
var hostlist = data.split(',');
getHostStats(hostlist);
});
}
}
This could fix your issue

Related

async.parallel: Type Error, task is not a function

What is wrong with this code ?
async = require("async");
fs = require("fs")
async.parallel(['calc.js','index.js'], fs.stat, function (err, results) {
if (err){
console.log(err);
}
else{
console.log(results);
}
});
When I try to run this little peice of code, I get the error mentioned below -
/home/gaurav/node-exp/node_modules/async/dist/async.js:4819
task(rest(function (err, args) {
^
TypeError: task is not a function
I was following a nodejs tutorial and I am very new to node js so pardon me if the question is very naive
What you are trying to do is map the file names onto fs.stat. You missed the call to map function in your code.
async.map([file1, file2], fs.stat, function () {...

Make Node.js code synchronous in Mongoose while iterating

I am learning Node.js; due to asynchronous of Node.js I am facing an issue:
domain.User.find({userName: new RegExp(findtext, 'i')}).sort('-created').skip(skip).limit(limit)
.exec(function(err, result) {
for(var i=0;i<result.length;i++){
console.log("result is ",result[i].id);
var camera=null;
domain.Cameras.count({"userId": result[i].id}, function (err, cameraCount) {
if(result.length-1==i){
configurationHolder.ResponseUtil.responseHandler(res, result, "User List ", false, 200);
}
})
}
})
I want to use result in Cameras callback but it is empty array here, so is there anyway to get it?
And this code is asynchronous, is it possible if we make a complete function synchronous?
#jmingov is right. You should make use of the async module to execute parallel requests to get the counts for each user returned in the User.find query.
Here's a flow for demonstration:
var Async = require('async'); //At the top of your js file.
domain.User.find({userName: new RegExp(findtext, 'i')}).sort('-created').skip(skip).limit(limit)
.exec(function(err, result) {
var cameraCountFunctions = [];
result.forEach(function(user) {
if (user && user.id)
{
console.log("result is ", user.id);
var camera=null; //What is this for?
cameraCountFunctions.push( function(callback) {
domain.Cameras.count({"userId": user.id}, function (err, cameraCount) {
if (err) return callback(err);
callback(null, cameraCount);
});
});
}
})
Async.parallel(cameraCountFunctions, function (err, cameraCounts) {
console.log(err, cameraCounts);
//CameraCounts is an array with the counts for each user.
//Evaluate and return the results here.
});
});
Try to do async programing allways when doing node.js, this is a must. Or youll end with big performance problems.
Check this module: https://github.com/caolan/async it can help.
Here is the trouble in your code:
domain.Cameras.count({
"userId": result[i].id
}, function(err, cameraCount) {
// the fn() used in the callback has 'cameraCount' as argument so
// mongoose will store the results there.
if (cameraCount.length - 1 == i) { // here is the problem
// result isnt there it should be named 'cameraCount'
configurationHolder.ResponseUtil.responseHandler(res, cameraCount, "User List ", false, 200);
}
});

NodeJS and parallel flow

I'm new with NodeJS. An issue makes me confused is parallel flow. I read an example show this snippet as a technique for controlling parallel flow:
var fs = require('fs');
var fileDir = './files';
fs.readdir(filesDir, function (err, files) {
if (err) throw err;
for (var index in files) {
var task = (function (file) {
return function () {
fs.readFile(file, function (err, text) {
if (err) throw err;
doSomething();
});
}
})(filesDir + '/' + files[index]);
tasks.push(task);
}
for (var index in tasks) {
tasks[index]();
}
});
This code work like a charm, but when I replace it with
for (var index in files) {
var task = function () {
console.log(files[index]);
fs.readFile(filesDir + '/' + files[index], function (err, text) {
if (err) throw err;
doSomething();
});
};
tasks.push(task);
}
for (var index in tasks) {
tasks[index]();
}
It doesn't work as I expected, because the files[index] in loop is always the last file in directory. Could you please explain me what the real flow is?
In short, the function you created have reference for the index variable(not it's value), so when it's executed, the index value is the last file in directory in your case.
Some links: Understanding variable capture by closures in Javascript/Node
Its because index reference will be to its last file. Node js is asynchronous that it ll not wait till read file operation is completed. It ll increment index value.
for (var index in files) {
var task = function () {
console.log(files[index]);
fs.readFile(filesDir + '/' + files[index], function (err, text) {
if (err) throw err;
doSomething();
});
};
tasks.push(task);
}
Since first code uses closures and it passes the current indexed file to a function. It ll take the current indexed file and returns a function with the file as input.
Now that returned function will execute in parallel.

Node - how to wait on async operations?

Sorry, just starting with node. This might be a very novice question.
Let's say I have some code which reads some files from a directory in the file system:
var fs = require('fs');
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) throw err;
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) throw err;
console.log('finished reading file ' + fileName + ': ' + data);
module.exports.files.push(data);
});
});
});
Note that all of this occurs asynchronously. Let's also say I have a Mocha test which executes this code:
describe('fileProvider', function () {
describe('#files', function () {
it.only('files array not empty', function () {
assert(fileProvider.files.length > 0, 'files.length is zero');
});
});
});
The mocha test runs before the files are finished being read. I know this because I see the console.log statement after I see the little dot that indicates a mocha test being run (at least I think that is what is being indicated). Also, if I surround the assert with a setTimeout, the assert passes.
How should I structure my code so that I can ensure the async file operations are completed? Note that this is not just a problem with testing - I need the files to be loaded fully before I can do real work in my app as well.
I don't think the right answer is to read files synchronously, because that will block the Node request / response loop, right?
Bonus question:
Even if I put the assert in a setTimeout with a 0 timeout value, the test still passes. Is this because just putting it in a setTimeout kicks it to the end of the processing chain or something so the filesystem work finishes first?
You can implement a complete callback after all files have been read.
exports.files = [];
exports.initialize = initialize;
function initialize(callback) {
var fs = require('fs');
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) throw err;
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) throw err;
console.log('finished reading file ' + fileName + ': ' + data);
exports.files.push(data);
if (exports.files.length == files.length) {
callback();
}
});
});
}
You can call the file operation method by doing something like:
var f = require('./files.js');
if (f.files.length < 1) {
console.log('initializing');
f.initialize(function () {
console.log('After: ' + f.files.length);
var another = require('./files.js');
console.log('Another module: ' + another.files.length);
});
}
EDIT: Since you want to only have to call this once, you could initialize it once when the application loads. According to Node.js documentation, modules are cached after the first time they are loaded. The two above examples have been edited as well.
To avoid being caught up in nested callbacks. You might want to use async's each that will allow you to do the tasks asynchronously in a non-blocking manner:
https://github.com/caolan/async#each
I think that's a good test, the same thing would happen in any app that used your module, i.e. it's code could be run before files is set. What you need to do is create a callback like #making3 suggests, or use promises. I haven't used mocha, but there's a section on ascynchronous calls. You could export the promise itself:
module.exports.getFiles = new Promise((resolve, reject) => {
datas = [];
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) {
reject(err);
return;
}
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) {
reject(err);
return;
}
console.log('finished reading file ' + fileName + ': ' + data);
datas.push(data);
if (datas.length == files.length) {
resolve(datas);
}
});
});
});
}
chai-as-promissed lets you work directly with promises using eventually, or you can use the callback passed to your test I think:
describe('fileProvider', function () {
describe('#files', function () {
it.only('files array not empty', function (done) {
fileProvider.getFiles.then(function(value) {
assert(value.length > 0, 'files.length is zero');
done();
}, function(err) {
done(err);
})
});
});
});

Process.nextTick or child_process?

I am trying to allow users to export their contact list in csv format. I am confused on how to run export_connect_csv() function. should i put it in child process or process.nextTick?
function export_connect_csv(user_id, file_location){
mysqlPool.getConnection(function(err, connection){
var csv_row = "Email,First Name,Last Name,Status,Created\n";
function processRow (row) {
var csv_row = row.email+','+row.first_name+','+row.last_name+','+row.status+','+row.created+"\n";
fs.appendFile(file_location, csv_row, function (err) {
if(err){
throw err;
}
});
}
fs.appendFile(file_location, csv_row, function (err) {
if(err){
throw err;
}
var query = connection.query('SELECT * FROM contacts where user_id = "'+user_id+'"');
query
.on('error', function(err) {
//handle error
})
.on('fields', function(fields) {
})
.on('result', function(row) {
processRow(row);
})
.on('end', function() {
//email now
console.log('done');
});
});
});
}
var exportContacts = function(req, res){
var user_id = req.params.user_id || 0;
export_connect_csv(user_id);
res.json({});
};
You don't need to use either, you can just call the function. All of that code will run assynchronously, both getConnection and fs.appendFile. However, you will run into a conflict in the case two users try to export at the same time. You have the following options:
1) You pass a unique file_name every time you call that function
2) You keep things exactly as they are and use fs.appendFileSync to make sure they don't overlap each other but that would block you
3) Or probably the best solution is do what you intended to do with the Process.nextTick, but instead you should use setImmediate and appendFileSync to be able to synchronize writes from several users simultaneously (write only a row at a time to avoid blocking for long periods):
setImmediate(function () {
fs.appendFileSync('filename', JUST_A_SINGLE_ROWW)
});
This is because a recursive process.nextTick can starve the event loop and effecively block you (hence the use of setImmediate) and you need to use fs.appendFileSync because two users might write to the same file simultaneously.
More on setImmediate vs nextTick:
setImmediate vs. nextTick
More info on appendFile: http://nodejs.org/api/fs.html#fs_fs_appendfile_filename_data_options_callback

Resources