I am new to nodejs and trying to cat multiple css files on-the-fly while coding. The package chokidar allow me to call a function when a file is modified, however I have a problem with the execution.
var goconcat =
fs.readdir(paths, function (err, files) {
if (err) {console.log(err);}
fs.unlink(paths + 'concat.css', function (err) {
if (err) throw err;
var list = files.map(function (files) {
return path.join(paths, files);
});
concat(list, paths + 'concat.css', function(err) {
if (err) throw err
});
});
});
I want to first delete the previous file, then read the directory and then write a new "concat.css". However I have an error;
Error: ENOENT: no such file or directory, open 'public/css/concat.css'
at error (native)
It appears that the function concat() is executed before the directory update and not after, and therefore it is trying to cat a file that just have been deleted. Why ?
I know that nodejs is executing functions in a synchronous way but I can't find a way to solve this problem. I tried async but I can't declare a variable between two functions and I couldn't manage to make it work.
If it cannot exist in a callback, using the setTimeout(fn, 0) trick may help make sure it's executed after the variable assignment.
var goconcat =
fs.readdir(paths, function (err, files) {
if (err) {console.log(err);}
fs.unlink(paths + 'concat.css', function (err) {
if (err) throw err;
var list = files.map(function (files) {
return path.join(paths, files);
});
setTimeout(function() {
concat(list, paths + 'concat.css', function(err) {
if (err) throw err
})}, 0);
});
});
The problem you're having is that your concat function is being invoked before the file is deleted by invoking unlink. You can prevent this by having nested callbacks; however, you can probably have better control flow if you use a module like async, and prevent yourself from dealing with Callback Hell.
Below is an example on how you can use the async module.
var fs = require('fs');
var async = require('async');
var myDir = __dirname + '/data';
async.waterfall([function(callback) {
fs.readdir(myDir, 'utf-8', function(error, files) {
if (error) {
return callback(error);
}
return callback(null, files);
});
}, function(files, callback) {
fs.open(myDir + '/myFile', 'wx', function(error, f) {
if (error && error.code === 'EEXIST') {
return callback(null, 'EEXIST');
}
return callback(null, 'CREATE');
});
}, function(fileStatus, callback) {
if (fileStatus === 'EEXIST') {
console.log('File exists. Deleting file...');
fs.unlink(myDir + '/myFile', function(error) {
if (error) {
return callback(error);
} else {
return callback(null);
}
});
} else {
console.log('File does not exist...');
return callback(null);
}
}, function(callback) {
fs.writeFile(myDir + '/myFile', "Hello World", function(err) {
if(err) {
return callback(error);
}
return callback(null, 'File Created');
});
}], function(error, results) {
console.error(error);
console.log(results);
});
The waterfall function runs the tasks array of functions in series,
each passing their results to the next in the array. However, if any
of the tasks pass an error to their own callback, the next function is
not executed, and the main callback is immediately called with the
error.
Related
Lets say, for example, I want to write a nodejs program where I have two or three independent parts like fs.readdir, fs.copy, etc. on different locations, but the result all three actions is to be sent to a json file like this:
var fs = require('fs-extra');
var jsd = {
"act1" : false,
"act2" : false,
"act3" : false
}
fs.readdir(path1, function (err, files) {
if (err) jsd.act1 = err;
for (x in files) console.log(files[x]);
jsd.act1 = true;
});
fs.copy(path2, path3, function (err) {
if (err) jsd.act2 = err;
jsd.act2 = true;
});
fs.remove(path4, function (err) {
if (err) jsd.act3 = err;
jsd.act3 = true;
});
// all three of the above actions are independent, so it makes sense that all of them are executed asynchronously.
// Now we write jsd object to a json file; jsd's contents are dependent on the above actions though
fs.writeJson("./data.json", jsd, function (err, files) {
if (err) return console.error(err);
});
How do I make sure that the correct data is entered into the file data.json, i.e fs.writeJson executes after the actions previous to it are executed first?
I know one way is to nest all of them, i.e,
readdir() {
copy() {
remove() {
writeJson();
}
}
}
But this may result in callback hell, so is there a better way to do this?
you can use Promise or module async,
if you use promise, first you must convert all callback function into Promise like this:
const reddir = function(path) {
return new Promise((resolve, reject) => {
fs.readdir(path, (err, files) => {
if (err) return reject(err);
for (x in files) console.log(files[x]);
resolve(true);
});
})
}
then you can use
Promise.all([reddir(path1), copy(path2, path3), remove(path4)])
.spread((act1, act2, act3) => { //.spread is bluebird feature
return writeJson(./data.json);
})
.catch(e => {
// all error can handled in this
})
if you use async module, you can write like this:
async.parallel({
act1: function(cb){
fs.reddir(path1, (err, files) => {
if (err) return cb(err);
for (x in files) console.log(files[x]);
cb(true);
})
},
act2: ...
},(err, jsd) => { // jsd will be {act1: true, act2: ...}
if (err) return console.error(err); // handle all above error here;
fs.writeJson("./data.json", jsd, function (err, files) {
if (err) return console.error(err);
});
})
I'm trying to read some files from dir with async.waterfall, it seems to me that I'm doing stuff right, but I get the specified error and the readData function is never called. What's wrong?
var fs = require("fs");
var async = require("async");
var folder = "./files/";
try {
async.waterfall([
function readDir(cb) {
fs.readdir(folder, function(err, files) {
cb(err, files);
});
},
function loopFiles(files, cb) {
files.forEach(function(fn) {
console.log("loop " + fn);
cb(null, fn);
});
},
function check(fn, cb) {
console.log("check "+fn);
fs.stat(folder + fn, function(err, stats) {
console.log(stats.isFile());
cb(err, stats, fn);
});
},
function readData(stats, fn, cb) {
console.log("read "+fn);
if (stats.isFile()) {
fs.readFile(folder + fn, "utf-8", function(err, data) {
cb(err, data);
});
}
}
], function(err, result) {
if (err) {
throw err;
}
console.log(result);
});
} catch (err) {
console.log(err);
}
The problem is that you're calling cb(null, fn) in loopFiles() multiple times if files.length > 1. You will probably need to perform a separate async.waterfall() or use some other async.* method for each file.
One other problem is in readData() where you aren't calling cb() in the case that stats.isFile() evaluates to false.
I have this async.parallel functionality inside an aysnc.eachSeries call.
I hardcoded an error so I could pass it, to see if it was behaving the way I thought. For some reason, when I pass an error, it doesn't get thrown in the final callback named "doneWithSeries".
async.eachSeries(jsonDataArr, function iterator(item, callback) {
async.parallel([
function (cb) {
if (item.hasOwnProperty('event.type')) {
var event_type = item['event.type'];
delete item['event.type'];
try {
var json = JSON.stringify(item);
}
catch (err) {
throw err;
}
fs.writeFile('./enriched_data/' + event_type + '.json', json, function (err) {
if (err) {
cb(err);
}
else {
cb(null);
}
});
}
},
function (cb) {
if (item.hasOwnProperty('status_desc')) {
var status_desc = item['status_desc'];
delete item['status_desc'];
try {
var json = JSON.stringify(item);
}
catch (err) {
throw err;
}
fs.writeFile('./enriched_data/' + status_desc + '.json', json, function (err) {
if (err) {
cb(err);
}
else {
cb(null);
}
});
}
}
],
function doneWithParallel(err) {
callback(new Error('throw this baby')); //shouldn't the first incident of error pass the error straight to the doneWithSeries callback below?
})
},
function doneWithSeries(err) {
if (err) {
throw err;
}
else {
console.log('success');
}
});
here is a distilled version of the code without anything unnecessary:
var async = require('async');
async.eachSeries(['1', '2'], function (item, callback) {
async.parallel([
function (cb) {
setTimeout(function () {
cb(null, 'one');
}, 200);
},
function (cb) {
setTimeout(function () {
cb(null, 'two');
}, 100);
}
],
function doneWithParallel(err, results) {
console.log('results', results);
callback(new Error('duh'));
})
},
function doneWithSeries(err) {
if (err)
throw err;
});
indeed that works. can't figure out why my code above doesn't, accept perhaps that the array could be empty even though when I run my code the success message gets logged...weird.
I think that's expected behavior if your list is empty. async will always call the final callback with no error even if there is no input list.
I'm trying to design a webpage. I have a function that I call to get all info needed for an individual's home page. A snippet of the code is:
exports.getHomePageData = function(userId, cb) {
var pageData = {};
pageData.userFullName = dbUtil.findNameByUserId(userId, function(err){
if (err) cb(err);
});
pageData.classes = dbUtil.findUserClassesByUserId(userId, function(err){
if (err) cb(err);
});
cb(pageData);
}
The problem I'm having is that the cb(pageData) is being called before I even finish setting the elements.
I've seen that people use the async library to solve this, but I was wondering if there was any other way for me to do it without needing more modules.
One possible approach:
exports.getHomePageData = function(userId, cb) {
var pageData = {},
filler = function() {
if ('userFullName' in pageData
&& 'classes' in pageData)
cb(null, pageData);
};
dbUtil.findNameByUserId(userId, function(err, name) {
if (err) {
cb(err);
return;
}
pageData.userFullName = name;
filler();
});
dbUtil.findUserClassesByUserId(userId, function(err, classes) {
if (err) {
cb(err);
return;
}
pageData.classes = classes;
filler();
});
}
It looks like dbUtil.findUserClassesByUserId and dbUtil.findNameByUserId are asynchronous methods; that usually indicates that they do not return a value, and instead use the callback to give you the data.
Both functions are most likely expecting a signature like follows:
function(err, data) {
// if err is set, an error occurred, otherwise data is set with the result
}
Thus, your function should look like this instead:
exports.getHomePageData = function(userId, cb) {
dbUtil.findNameByUserId(userId, function(err, userFullName){
if (err) {
cb(err);
return;
}
dbUtil.findUserClassesByUserId(userId, function(err, classes){
if (err) {
cb(err);
return;
}
var pageData = {
userFullName: userFullName,
classes: classes
};
cb(pageData);
});
});
}
I'm making a function that will check if two files contain a given string. If both files DON'T contain the string, it should return undefined :(
here is my code:
var fs = require("fs");
function get_uniq(string, file1, file2, callback){
fs.readFile(file1, 'utf8', function(err, data1) {
if (err) throw err;
i = data1.search(string);
console.log(i);
if(i == -1){
fs.readFile(file2, 'utf8', function(err, data2) {
if (err) throw err;
j = data2.search(string);
if(j == -1){
return 1;
}
});
}
});
callback();
}
var i = get_uniq("stringThatFilesDoesntContainin", "somefile.txt", "anotherfile.txt", function(){
console.log(i);
});
Any idea what the problem is?
You should not rely on returning a computed value. In node functions can be executed asynchronously so it can return before the function can finish. To execute when the function completes a callback is given. For e.g.
fs.readFile(file1, 'utf8', function(err, data1) {...});
The function that is passed as the last argument is the callback. It is executed when the file has been read. Trying to return the data will result in undefined value.
In your case the returned values will be undefined for all cases. And callback will be executed in parallel with readFile.
callback must be called from inside readFile for file1 or file2, wherever it can finish logically. To give all the places where callback can be added are :
function get_uniq(string, file1, file2, callback){
fs.readFile(file1, 'utf8', function(err, data1) {
if (err)
{
throw err;
callback(err);
}
else
{
i = data1.search(string);
console.log(i);
if(i == -1){
fs.readFile(file2, 'utf8', function(err, data2) {
if (err)
{
throw err;
callback(err);
}
else
{
j = data2.search(string);
if(j == -1){
callback(false);
}
else
callback(true);
}
});
}
else
callback(false);
}
});
}
You can put you return value (true/false) as the argument to callback. Or catch error from inside it. How you will execute the above function will be like :
get_uniq("stringThatFilesDoesntContainin", "somefile.txt", "anotherfile.txt", function(value){
console.log(value);
});