Node - how to wait on async operations? - node.js

Sorry, just starting with node. This might be a very novice question.
Let's say I have some code which reads some files from a directory in the file system:
var fs = require('fs');
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) throw err;
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) throw err;
console.log('finished reading file ' + fileName + ': ' + data);
module.exports.files.push(data);
});
});
});
Note that all of this occurs asynchronously. Let's also say I have a Mocha test which executes this code:
describe('fileProvider', function () {
describe('#files', function () {
it.only('files array not empty', function () {
assert(fileProvider.files.length > 0, 'files.length is zero');
});
});
});
The mocha test runs before the files are finished being read. I know this because I see the console.log statement after I see the little dot that indicates a mocha test being run (at least I think that is what is being indicated). Also, if I surround the assert with a setTimeout, the assert passes.
How should I structure my code so that I can ensure the async file operations are completed? Note that this is not just a problem with testing - I need the files to be loaded fully before I can do real work in my app as well.
I don't think the right answer is to read files synchronously, because that will block the Node request / response loop, right?
Bonus question:
Even if I put the assert in a setTimeout with a 0 timeout value, the test still passes. Is this because just putting it in a setTimeout kicks it to the end of the processing chain or something so the filesystem work finishes first?

You can implement a complete callback after all files have been read.
exports.files = [];
exports.initialize = initialize;
function initialize(callback) {
var fs = require('fs');
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) throw err;
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) throw err;
console.log('finished reading file ' + fileName + ': ' + data);
exports.files.push(data);
if (exports.files.length == files.length) {
callback();
}
});
});
}
You can call the file operation method by doing something like:
var f = require('./files.js');
if (f.files.length < 1) {
console.log('initializing');
f.initialize(function () {
console.log('After: ' + f.files.length);
var another = require('./files.js');
console.log('Another module: ' + another.files.length);
});
}
EDIT: Since you want to only have to call this once, you could initialize it once when the application loads. According to Node.js documentation, modules are cached after the first time they are loaded. The two above examples have been edited as well.

To avoid being caught up in nested callbacks. You might want to use async's each that will allow you to do the tasks asynchronously in a non-blocking manner:
https://github.com/caolan/async#each

I think that's a good test, the same thing would happen in any app that used your module, i.e. it's code could be run before files is set. What you need to do is create a callback like #making3 suggests, or use promises. I haven't used mocha, but there's a section on ascynchronous calls. You could export the promise itself:
module.exports.getFiles = new Promise((resolve, reject) => {
datas = [];
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) {
reject(err);
return;
}
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) {
reject(err);
return;
}
console.log('finished reading file ' + fileName + ': ' + data);
datas.push(data);
if (datas.length == files.length) {
resolve(datas);
}
});
});
});
}
chai-as-promissed lets you work directly with promises using eventually, or you can use the callback passed to your test I think:
describe('fileProvider', function () {
describe('#files', function () {
it.only('files array not empty', function (done) {
fileProvider.getFiles.then(function(value) {
assert(value.length > 0, 'files.length is zero');
done();
}, function(err) {
done(err);
})
});
});
});

Related

Node.js function not running in order. Error: Unhandled stream error in pipe

I updated the function to create the CSV file but now I'm getting an error:
In upload function
internal/streams/legacy.js:57
throw er; // Unhandled stream error in pipe.
^
Error: ENOENT: no such file or directory, open 'C:\Users\shiv\WebstormProjects\slackAPIProject\billingData\CSV\1548963844106output.csv'
var csvFilePath = '';
var JSONFilePath = '';
function sendBillingData(){
var message = '';
axios.get(url, {
params: {
token: myToken
}
}).then(function (response) {
message = response.data;
fields = billingDataFields;
// saveFiles(message, fields, 'billingData/');
saveFilesNew(message, fields, 'billingData/');
var file = fs.createReadStream(__dirname + '/' + csvFilePath); // <--make sure this path is correct
console.log(__dirname + '/' + csvFilePath);
uploadFile(file);
})
.catch(function (error) {
console.log(error);
});
}
The saveFilesNew function is:
function saveFilesNew(message, options, folder){
try {
const passedData = message;
var relevantData='';
if (folder == 'accessLogs/'){
const loginsJSON = message.logins;
relevantData = loginsJSON;
console.log(loginsJSON);
}
if(folder == 'billingData/'){
relevantData = passedData.members;
const profile = passedData.members[0].profile;
}
//Save JSON to the output folder
var date = Date.now();
var directoryPath = folder + 'JSON/' + date + "output";
JSONFilePath = directoryPath + '.json';
fs.writeFileSync(JSONFilePath, JSON.stringify(message, null, 4), function(err) {
if (err) {
console.log(err);
}
});
//parse JSON onto the CSV
const json2csvParser = new Json2csvParser({ fields });
const csv = json2csvParser.parse(relevantData);
// console.log(csv);
//function to process the CSV onto the file
var directoryPath = folder + 'CSV/' + date + "output";
csvFilePath = directoryPath + '.csv';
let data = [];
let columns = {
real_name: 'real_name',
display_name: 'display_name',
email: 'email',
account_type: 'account_type'
};
var id = passedData.members[0].real_name;
console.log(id);
console.log("messageLength is" +Object.keys(message.members).length);
for (var i = 0; i < Object.keys(message.members).length; i++) {
console.log("value of i is" + i);
var display_name = passedData.members[i].profile.display_name;
var real_name = passedData.members[i].profile.real_name_normalized;
var email = passedData.members[i].profile.email;
var account_type = 'undefined';
console.log("name: " + real_name);
if(passedData.members[i].is_owner){
account_type = 'Org Owner';
}
else if(passedData.members[i].is_admin){
account_type = 'Org Admin';
}
else if(passedData.members[i].is_bot){
account_type = 'Bot'
}
else account_type = 'User';
data.push([real_name, display_name, email, account_type]);
}
console.log(data);
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFileSync(csvFilePath, output, function(err) {
console.log(output);
if (err) {
console.log(err);
}
console.log('my.csv saved.');
});
});
} catch (err) {
console.error(err);
}
}
The upload file function is:
function uploadFile(file){
console.log("In upload function");
const form = new FormData();
form.append('token', botToken);
form.append('channels', 'testing');
form.append('file', file);
axios.post('https://slack.com/api/files.upload', form, {
headers: form.getHeaders()
}).then(function (response) {
var serverMessage = response.data;
console.log(serverMessage);
});
}
So I think the error is getting caused because node is trying to upload the file before its being created. I feel like this has something to do with the asynchronous nature of Node.js but I fail to comprehend how to rectify the code. Please let me know how to correct this and mention any improvements to the code structure/design too.
Thanks!
You don't wait for the callback provided to stringify to be executed, and it's where you create the file. (Assuming this stringify function really does acccept a callback.)
Using callbacks (you can make this cleaner with promises and these neat async/await controls, but let's just stick to callbacks here), it should be more like:
function sendBillingData() {
...
// this callback we'll use to know when the file writing is done, and to get the file path
saveFilesNew(message, fields, 'billingData/', function(err, csvFilePathArgument) {
// this we will execute when saveFilesNew calls it, not when saveFilesNew returns, see below
uploadFile(fs.createReadStream(__dirname + '/' + csvFilePathArgument))
});
}
// let's name this callback... "callback".
function saveFilesNew(message, options, folder, callback) {
...
var csvFilePath = ...; // local variable only instead of your global
...
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err; // or return callbcack(err);
fs.writeFile(csvFilePath , output, function(err) { // NOT writeFileSync, or no callback needed
console.log(output);
if (err) {
console.log(err);
// callback(err); may be a useful approach for error-handling at a higher level
}
console.log('my.csv saved.'); // yes, NOW the CSV is saved, not before this executes! Hence:
callback(null, csvFilePath); // no error, clean process, pass the file path
});
});
console.log("This line is executed before stringify's callback is called!");
return; // implicitly, yes, yet still synchronous and that's why your version crashes
}
Using callbacks that are called only when the expected events happen (a file is done writing, a buffer/string is done transforming...) allows JS to keep executing code in the meantime. And it does keep executing code, so when you need data from an async code, you need to tell JS you need it done before executing your piece.
Also, since you can pass data when calling back (it's just a function), here I could avoid relying on a global csvFilePath. Using higher level variables makes things monolithic, like you could not transfer saveFilesNew to a dedicated file where you keep your toolkit of file-related functions.
Finally, if your global process is like:
function aDayAtTheOffice() {
sendBillingData();
getCoffee();
}
then you don't need to wait for the billing data to be processed before starting making coffee. However, if your boss told you that you could NOT get a coffee until the billing data was settled, then your process would look like:
function aDayAtTheOffice() {
sendBillingData(function (err) {
// if (err) let's do nothing here: you wanted a coffee anyway, right?
getCoffee();
});
}
(Note that callbacks having potential error as first arg and data as second arg is a convention, nothing mandatory.)
IMHO you should read about scope (the argument callback could be accessed at a time where the call to saveFilesNew was already done and forgotten!), and about the asynchronous nature of No... JavaScript. ;) (Sorry, probably not the best links but they contain the meaningful keywords, and then Google is your buddy, your friend, your Big Brother.)

nodejs: setInterval for custom function

I have the following function:
getHostlist() {
fs.readFile('out/hostlist', 'utf8', (err, data) => {
if (err) throw err;
var hostlist = data.split(',');
this.getHostStats(hostlist);
});
},
it is exported.
in my index.js im trying to call it with setInterval every minute:
setInterval(remote.getHostlist, config.app.refresh); the second parameters contains 60000.
So i try to run it every minute.
But when it runs im recieving the following error:
TypeError: this.getHostStats is not a function
But when i run the function manually without interval it works and getHostStats is called.
Where is the problem here`?
EDIT FULL EXPORT SCRIPT:
module.exports = {
getHostlist() {
console.log("Starting Usage synchronization at " + currTime());
fs.readFile('out/hostlist', 'utf8', (err, data) => {
if (err) throw err;
var hostlist = data.split(',');
getHostStats(hostlist);
});
},
....
}
if i write everything down here it would be to long.
I believe in this scenario "this" is referring to wherever your setInterval code is running so "this" is no longer remote. Try and get an explicit reference to remote.getHoststatsand use that instead.
//EDIT 1//
It may be worth wrapping your export functions into a class, creating an object of that class and then exporting the object. That should guarantee that "this" works in your context
try this
module.exports = {
getHostlist : function getHostlist() {
console.log("Starting Usage synchronization at " + currTime());
fs.readFile('out/hostlist', 'utf8', (err, data) => {
if (err) throw err;
var hostlist = data.split(',');
getHostStats(hostlist);
});
}
}
This could fix your issue

node.js svn update/commit synchronously?

I'm using svn-spawn library to update/commit files to svn. Problem is my app calls svn up/commit in a loop, and because of the async nature of the call, svn-up is called from the next iteration of the loop before the previous svn-up can finish.
How to handle this issue? Is there any way to prevent the next call from happening until the previous one is complete?
Figured out a way to do it using async module.
async.series can be used to execute async tasks in a serial fashion.
This is how I did it.
function commitFile(arg, callback) {
svnClient.getStatus(filePath, function(err, data) {
//...
svnClient.commit(['Commit msg', filePath], callback);
//...
});
}
var toCommit = [];
for (var i = 0, len = requests.length; i < len; i++) {
//Adding files to commit, async.apply enables adding arguments to the anonymous function
toCommit.push(async.apply(function(arg, cb) {
commitFile(arg, cb);
}, 'arg1'));
}
async.series(toCommit,function (err, result) {
console.log('Final callback');
if(err) {
console.log('error', err);
} else {
console.log('result of this run: ' + result);
}
});
async.series needs an array of functions which must call a callback once they are done. It uses the callback to determine that the current function in done executing and only then it will pick the next function to execute.

NodeJS and parallel flow

I'm new with NodeJS. An issue makes me confused is parallel flow. I read an example show this snippet as a technique for controlling parallel flow:
var fs = require('fs');
var fileDir = './files';
fs.readdir(filesDir, function (err, files) {
if (err) throw err;
for (var index in files) {
var task = (function (file) {
return function () {
fs.readFile(file, function (err, text) {
if (err) throw err;
doSomething();
});
}
})(filesDir + '/' + files[index]);
tasks.push(task);
}
for (var index in tasks) {
tasks[index]();
}
});
This code work like a charm, but when I replace it with
for (var index in files) {
var task = function () {
console.log(files[index]);
fs.readFile(filesDir + '/' + files[index], function (err, text) {
if (err) throw err;
doSomething();
});
};
tasks.push(task);
}
for (var index in tasks) {
tasks[index]();
}
It doesn't work as I expected, because the files[index] in loop is always the last file in directory. Could you please explain me what the real flow is?
In short, the function you created have reference for the index variable(not it's value), so when it's executed, the index value is the last file in directory in your case.
Some links: Understanding variable capture by closures in Javascript/Node
Its because index reference will be to its last file. Node js is asynchronous that it ll not wait till read file operation is completed. It ll increment index value.
for (var index in files) {
var task = function () {
console.log(files[index]);
fs.readFile(filesDir + '/' + files[index], function (err, text) {
if (err) throw err;
doSomething();
});
};
tasks.push(task);
}
Since first code uses closures and it passes the current indexed file to a function. It ll take the current indexed file and returns a function with the file as input.
Now that returned function will execute in parallel.

Reading and returning multiple files in Node.js using fs.readFile

I'm writing a simple request handler to return a pair of css files. Using fs.readFileSync this was easy. However, I'm having difficulty accomplishing the same task using the async version of readFile. Below is my code. Having my response.write() method calls split among two different callbacks seems to be problematic. Can someone point out what I've done wrong? Interestingly this code works if I put response.end() inside of the first else statement. However, that creates a problem in that the second css file does not get returned (because response.end() has already been fired).
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
fs.readFile('css/bootstrap.css', function(error, content){
if(error){
console.log(error);
}
else{
response.write(content);
}
});
fs.readFile('css/bootstrap-responsive.css', function(error, content){
if(error){
console.log(error);
}
else{
response.write(content)
}
});
response.end();
}
The primary issue with what you have is that response.end() gets called right away. You need to only call it after the files have done their response.write calls.
The easiest way would be to use a control flow library. Managing multiple asynchronous callbacks is generally complicated.
https://github.com/joyent/node/wiki/modules#wiki-async-flow
I'm going to use the async library because it's the one I know best.
var fs = require('fs');
var async = require('async');
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
async.eachSeries(
// Pass items to iterate over
['css/bootstrap.css', 'css/bootstrap-responsive.css'],
// Pass iterator function that is called for each item
function(filename, cb) {
fs.readFile(filename, function(err, content) {
if (!err) {
response.write(content);
}
// Calling cb makes it go to the next item.
cb(err);
});
},
// Final callback after each item has been iterated over.
function(err) {
response.end()
}
);
}
If you want to accomplish this without a library, or just want another way, this is how I would do it more directly. Basically you keep a count and call end once both file reads have finished.
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
var count = 0;
var handler = function(error, content){
count++;
if (error){
console.log(error);
}
else{
response.write(content);
}
if (count == 2) {
response.end();
}
}
fs.readFile('css/bootstrap.css', handler);
fs.readFile('css/bootstrap-responsive.css', handler);
}
You can simply rely on html5 Promise. The code can be as simple as follows:
var promises= ['file1.css', 'file2.css'].map(function(_path){
return new Promise(function(_path, resolve, reject){
fs.readFile(_path, 'utf8', function(err, data){
if(err){
console.log(err);
resolve(""); //following the same code flow
}else{
resolve(data);
}
});
}.bind(this, _path));
});
Promise.all(promises).then(function(results){
//Put your callback logic here
response.writeHead(200, {"Content-Type": "text/css"});
results.forEach(function(content){response.write(content)});
response.end();
});
There's a simple common solution to get them all with an one callback.
You can place it anywhere in your project to reuse in many different cases.
var FS = require('fs');
/**
* Abstract helper to asyncly read a bulk of files
* Note that `cb` will receive an array of errors for each file as an array of files data
* Keys in resulting arrays will be the same as in `paths`
*
* #param {Array} paths - file paths array
* #param {Function} cb
* #param {Array} errors - a list of file reading error
* #param {Array} data - a list of file content data
*/
function FS_readFiles (paths, cb) {
var result = [], errors = [], l = paths.length;
paths.forEach(function (path, k) {
FS.readFile(path, function (err, data) {
// decrease waiting files
--l;
// just skip non-npm packages and decrease valid files count
err && (errors[k] = err);
!err && (result[k] = data);
// invoke cb if all read
!l && cb (errors.length? errors : undef, result);
});
});
}
Just put inside it a bulk of files and it will returns to you each of them as a buffer.
Simple example:
var cssFiles = [
'css/bootstrap.css',
'css/bootstrap-responsive.css'
];
function css(response) {
FS_readFiles(cssFiles, function (errors, data) {
response.writeHead(200, {"Content-Type": "text/css"});
data.forEach(function (v) {
response.write(v);
});
response.end();
});
}
Offtopic: Btw, requests like this you better to cache on front-end proxy server like nginx or varnish. It's never change.
const fs = require('fs');
function readFilePromise(fileName) {
return new Promise(function (resolve, reject) {
fs.readFile(fileName, 'utf-8', function(err, data){
if(err){reject(err)} else {
resolve(data)
}
})
})
}
Promise.all([readFilePromise("abc.txt"), readFilePromise("dec.txt")]).then(function(out){
console.log(out)
})
Async is an awesome lib. However the standard for these things is moving in the direction of promises for handling multiple asynchronous operations. In fact in ECMAScript6 this will be a standard part of the library. There are several libraries that implement promises including JQuery. However, for node, I like to use 'q'
Here is the same code using promises: One note.. you might want to move the first writeHead call to coincide with the first successful read.
var Q = require('q');
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
var defer = Q.defer();
fs.readFile('css/bootstrap.css', function(error, content){
if(error){
defer.reject(error)
}
else{
response.write(content);
defer.resolve();
}
});
defer.promise.then(function() { //this gets executed when the first read succeeds and is written
var secondDefer = Q.defer();
fs.readFile('css/bootstrap-responsive.css', function(error, content){
if(error){
secondDefer.reject(error);
}
else{
response.write(content);
secondDefer.resolve();
}
});
return secondDefer.promise;
},
function(error) { //this gets called when the first read fails
console.log(error);
//other error handling
}).
done(function() {
response.end();
},
function(error) { //this is the error handler for the second read fails
console.log(error);
response.end(); //gotta call end anyway
});
}

Resources