I am using the recursive-readdir module to list all files types in a directory, the code below works, however I get "undefined" being displayed before my array.
The code below is inserted into a js file "test.js" and is ran from the cmd line using "node test.js".
Any suggestings on how best to return an array of all the required files without the undefined string being returned?
const recursive = require('recursive-readdir');
exports.file = function() {
recursive(__dirname, ['!*.md'], function(error, files) {
var arr = [];
for (var i = 0; i < files.length; i++) {
arr.push(files[i]);
}
console.log(arr);
});
};
console.log( exports.file() );
As mentioned in the comments exports.file does not have a return value, which is why you get undefined when you try to log it.
If you want the caller of exports.file to have access to the files, you need to update exports.file to take a callback that can be invoked with the files as a parameter.
const recursive = require('recursive-readdir');
exports.file = function(callback) {
recursive(__dirname, ['!*.md'], function(error, files) {
var arr = [];
for (var i = 0; i < files.length; i++) {
arr.push(files[i]);
}
console.log(arr);
callback(error, arr);
});
};
exports.file(function(error, files) {
console.log(files);
});
That will log the files with minimal changes to your original code.
Depending on how you need this code to evolve, you can probably simplify this quite a bit. First, do you really need arr in addition to files? If not, you can simply pass files to the callback:
const recursive = require('recursive-readdir');
exports.file = function(callback) {
recursive(__dirname, ['!*.md'], function(error, files) {
callback(error, files);
});
};
exports.file(function(error, files) {
console.log(files);
});
And now since your callback to recursive just invokes the callback to exports.file, you could simply pass the exports.file callback to recursive
const recursive = require('recursive-readdir');
exports.file = function(callback) {
recursive(__dirname, ['!*.md'], callback);
};
exports.file(function(error, files) {
console.log(files);
});
Related
I'm having problems handling a file list with async.map. When I pass a list larger than 2045 items, the code give a error. And there is no problem of the actual file because on manual runs and debuging with a few files it runs ok. Maybe its not the best code example that I have, but I'm in learning process
var insertInDb = function (err, book_data) {
count = function(book, cb){
book_db.findOne({identifier:book['identifier']}, function (err, docs) {
if (docs !== null) {
cb('Book exists already',null)
}else{
book_db.insert(book, function(err){
cb(err,book)
})
}
})
}
async.map(book_data, count)
};
var epubData = function (epub, nextEpub) {
var book_data = {};
epubParser.open(epub, function (err, epub_data) {
console.log(epub);
for (var i of epub_data.easy['simpleMeta']) { // <-- ERROR! TypeError: Cannot read property 'easy' of undefined
for (var attrname in i) {
if (attrname.indexOf('dc:') !== -1) {
book_data[attrname.split(':')[1]] = i[attrname];
book_data['file'] = epub;
}
}
}
nextEpub(err, book_data)
})
};
async.map(full_files_path.slice(0, 2045), epubData, insertInDb);
I have images model and users model.
every image has a user_id field of a user and I want to get the picture of the user and name, add it to the image object and return an array of images.
When I am trying to add author_image field to ONE image I don't have any errors,
But when I am looping over all the images the app crashes the output is that imagesData is undefined as well as userData.
I tried using promises but again I get an error.
What is the best way I can do that without the undefined error?
router.route('/images/all')
.get(function(req,res){
var response = {};
var imagesData = {};
images.find({}).lean().exec(function(err,data){
// console.log(data);
imagesData = data;
if (!err) {
for (var i = 0; i < imagesData.length; i++) {
users.find(({'_id': imagesData[i].user_id}),function(err,userData){
console.log(userData);
imagesData[i].author_pic = userData[0].profile_image;
});
}
}
res.json(imagesData);
});
});
What you missed out is that find operation is not a synchronous operation. So all your find operation immediately move on to the next line.
Although there are multiple ways to handle such situation, I tend to use promises (Q library).
The code would look like this
var Q = require('q');
var defer = Q.defer();
images.find({}).lean().exec(function (err, data) {
// console.log(data);
imagesData = data;
var promiseArr = [];
if (!err) {
for (var i = 0; i < imagesData.length; i++) {
var innerDefer = Q.defer();
users.find(({'_id': imagesData[i].user_id}), function (err, userData) {
console.log(userData);
defer.resolve(userData[0].profile_image);
});
promiseArr.push(innerDefer);
}
}
Q.all(promiseArr).then(function (results) {
for (var i = 0; i < imagesData.length; i++) {
if (Q.isPromise(results[i])) {
results[i] = results[i].valueOf();
}
imagesData[i].author_pic = results[i];
}
res.json(imagesData);
})
});
In this case I am using the Q.all method which basically waits for all the find to finish, and executes only then.
I am trying to write a code with NodeJS where I grab data from an external API and then populate them in MongoDB using Mongoose. In between that, I'll check to see if that particular already exists in Mongo or not. Below is my code.
router.route('/report') // the REST api address
.post(function(req, res) // calling a POST
{
console.log('calling report API');
var object = "report/" + reportID; // related to the API
var parameters = '&limit=100' // related to the API
var url = link + object + apiKey + parameters; // related to the API
var data = "";
https.get(url, function callback(response)
{
response.setEncoding("utf8");
response.on("data", function(chunk)
{
data += chunk.toString() + "";
});
response.on("end", function()
{
var jsonData = JSON.parse(data);
var array = jsonData['results']; // data is return in array of objects. accessing only a particular array
var length = array.length;
console.log(length);
for (var i = 0; i < length; i++)
{
var report = new Report(array.pop()); // Report is the schema model defined.
console.log('^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^');
console.log(i);
console.log('*****************************');
console.log(report);
console.log('*****************************');
// console.log(report['id']);
/*report.save(function(err)
{
if(err)
res.send(err);
});*/
Report.find({id:report['id']}).count(function(err, count) // checks if the id of that specific data already exists in Mongo
{
console.log(count);
console.log('*****************************');
if (count == 0) // if the count = 0, meaning not exist, then only save
{
report.save(function(err)
{
console.log('saved');
if(err)
res.send(err);
});
}
});
};
res.json({
message: 'Grabbed Report'
});
});
response.on("error", console.error);
});
})
My problem is that since NodeJS callbacks are parallel, it is not getting called sequentially. My end result would be something like this :
Calling report API
console.log(length) = 100
^^^^^^^^^^^^^^^^^^^^^^^^
console.log(i) = starts with 0
*******************************
console.log(report) = the data which will be stored inside Mongo
*******************************
number 3 - 7 repeats 100 times as the length is equals to 100
console.log(count) = either 0 or 1
number 9 repeats 100 times
console.log('saved')
number 11 repeats 100 times
Lastly, only the last out of 100 data is stored into Mongo
What I need is some sort of technique or method to handle these callbacks which are executing one after the other and not sequentially following the loop. I am pretty sure this is the problem as my other REST APIs are all working.
I have looked into async methods, promises, recursive functions and a couple others non which I could really understand how to solve this problem. I really hope someone can shed some light into this matter.
Feel free also to correct me if I did any mistakes in the way I'm asking the question. This is my first question posted in StackOverflow.
This problem is termed as the "callback hell".
There's lots of other approaches like using Promise and Async libraries you'll find.
I'm more excited about the native async ES7 will bring,
which you can actually start using today with transpiler library Babel.
But by far the simplest approach I've found is the following:
You take out the long callback functions and define them outside.
router.route('/report') // the REST api address
.post(calling_a_POST)
function calling_a_POST(req, res) {
...
var data = "";
https.get(url, function callback(response) {
...
response.on("end", response_on_end_callback); // --> take out
response.on("error", console.error);
});
}
function response_on_end_callback() { // <-- define here
...
for (var i = 0; i < length; i++) {
var report = new Report(array.pop());
...
Report.find({ id: report['id'] })
.count(Report_find_count_callback); // --> take out
};
res.json({
message: 'Grabbed Report'
});
}
function Report_find_count_callback(err, count) { // <-- define here
...
if (count == 0) {
report.save(function(err) { // !! report is undefined here
console.log('saved');
if (err)
res.send(err); // !! res is undefined here
});
}
}
A caveat is that you won't be able to access all the variables inside what used to be the callback,
because you've taken them out of the scope.
This could be solved with a "dependency injection" wrapper of sorts to pass the required variables.
router.route('/report') // the REST api address
.post(calling_a_POST)
function calling_a_POST(req, res) {
...
var data = "";
https.get(url, function callback(response) {
...
response.on("end", function(err, data){ // take these arguments
response_on_end(err, data, res); // plus the needed variables
});
response.on("error", console.error);
});
}
function response_on_end(err, data, res) { // and pass them to function defined outside
...
for (var i = 0; i < length; i++) {
var report = new Report(array.pop());
...
Report.find({ id: report['id'] })
.count(function(err, count){
Report_find_count(err, count, report, res); // same here
});
};
res.json({ // res is now available
message: 'Grabbed Report'
});
}
function Report_find_count(err, count, report, res) { // same here
...
if (count == 0) {
report.save(function(err) { // report is now available
console.log('saved');
if (err)
res.send(err); // res is now available
});
}
}
When I execute the response_on_end function, I am getting the undefined:1 unexpected token u error.
I am pretty much sure it has something to do with this line: var jsonData = JSON.parse(data)
My response_on_end is as below: var jsonData = JSON.parse(data); // problem here
I realize I made an error here:
function calling_a_POST(req, res) {
...
var data = "";
https.get(url, function callback(response) {
...
//sponse.on("end", function(err, data){
response.on("end", function(err){ // data shouldn't be here
response_on_end(err, data, res);
});
response.on("error", console.error);
});
}
Another problem I could forsee, which actually may not arise here but still would be better to talk about anyways.
The data variable, since it's a string which is a primitive type unlike an object, it is "passed by value".
More info
It's better to wrap the variable in an object and pass the object, because objects in javascript are always "passed by reference".
function calling_a_POST(req, res) {
...
// var data = ""; //
var data_wrapper = {};
data_wrapper.data = {}; // wrap it in an object
https.get(url, function callback(response) {
...
response.on("data", function(chunk){
data_wrapper.data += chunk.toString() + ""; // use the dot notation to reference
});
response.on("end", function(err){
response_on_end(err, data_wrapper, res); // and pass that object
});
response.on("error", console.error);
});
}
function response_on_end_callback(err, data_wrapper, res) {
var data = data_wrapper.data; // later redefine the variable
...
for (var i = 0; i < length; i++) {
var report = new Report(array.pop());
...
You can use async library for controlling your execution flows. And there are also iterators for working with arrays.
I don't know how node implement its amazing idea. And i have a question when use it.
I have to read four files file1.js file2.js file3.js file4.js and concat them into one big javascript file result.js. It's important to keep their order.
So it's normal for me to use readFileSync instead of readFile.
I know it's a bad solution. Anyone has a good idea to do that?
Q: Is it possible for node.js to read four files at the same time?
Hope someone can explain the principle of node.js and when process.nextTick will be fired.
A: yes it is possible for node to read 4 files at the same time.
My answer would be, it depends on your situation, for reading the files synchronously or asynchronously. If it's configuration data, or the files can be cached, I would suggest just doing it synchronously, it's easy, and it's only done once. So you won't be waiting around very much. Long operations on initialization are typical, and can make things in the long run more efficient. That being said, reading four files in order, asynchronously, so that your program can do other things in the background isn't that hard. I will work on sync and async examples of each and add an edit.
/* jshint node:true*/
var fs = require('fs');
function readFilesSync(fileNames) {
'use strict';
var results = '';
for (var i = 0; i < fileNames.length; i++) {
results += fs.readFileSync(fileNames[i]);
}
return results;
}
function readFiles(fileNames, callback) {
'use strict';
var results = '';
function readFile(index) {
if (index < fileNames.length) {
fs.readFile(fileNames[index], function (err, data) {
results += data;
readFile(index + 1);
});
} else {
callback(results);
}
}
readFile(0);
}
function readAllFilesAtOnce(fileNames, callback) {
'use strict';
var results = {};
var numFiles = fileNames.length;
function callBackWrapper() {
var resultsOrdered = '';
for (var i = 0; i < fileNames.length; i++) {
resultsOrdered += results[fileNames[i]];
}
callback(resultsOrdered);
}
function readFileAsync(fileName) {
fs.readFile(fileName, function (err, data) {
results[fileName] = data;
numFiles--;
if (numFiles === 0) {
callBackWrapper();
}
});
}
for (var i = 0; i < fileNames.length; i++) {
readFileAsync(fileNames[i]);
}
}
function doSomethingWithTheData(data) {
'use strict';
console.log('Results async: ' + data);
}
function doSomethingWithTheData2(data) {
'use strict';
console.log('Results async all at once: ' + data);
}
var fileNamesArray = ['blah.js', 'file.js', 'hello.txt'];
console.log('The results sync: ' + readFilesSync(fileNamesArray));
readFiles(fileNamesArray, doSomethingWithTheData);
readAllFilesAtOnce(fileNamesArray, doSomethingWithTheData2);
EDIT: There I added a method to read all of the files at once.
Process.nextTick does no more than process this function on the next time around the event loop. EX:
process.nextTick(function() {
console.log('never printed out');
});
while(true);
ex 2:
process.nextTick(function() {
console.log('printed last');
});
console.log('printed first');
I have an array which keeps URL of several files. For example:
var files = ['1.html', '2.html', '3.html'];
I need to read them asynchronously and save them in an object named cache (cache = {}).
To do this I used the code:
for(var i = 0; i < files.length; i++){
require('fs').readFile(files[i], 'utf8', function (error,data) {
cache[files[i]]=data;
});
}
In the end I have the result:
cache = { undefined : 'File 3 content' }
I do understand that the "readFile" acts after the loop is ended and it looses it's scope. Is there a way to fix this or another method to read files from an array and cache them?
When your callback to readFile executes, the for loop will already have finished. So i will be files.length and files[i] will be undefined. To mitigate this, you need to wrap the variables in a closure. The simplest way to do this is to create a function which does your readFile call, and call that in the loop:
function read(file) {
require('fs').readFile(file, 'utf8', function (error,data) {
cache[file]=data;
});
}
for(var i = 0; i < files.length; i++){
read(files[i]);
}
For even better execution control, you might want to look into async:
function readAsync(file, callback) {
fs.readFile(file, 'utf8', callback);
}
async.map(files, readAsync, function(err, results) {
// results = ['file 1 content', 'file 2 content', ...]
});
Edit: Made use of helper function for async example.
The existing answer didn't work for me. I did find an NPM package which did the job: https://www.npmjs.com/package/read-multiple-files. After npm install read-multiple-files at the command line, here's the code I used:
var files = ['1.html', '2.html', '3.html'];
console.log("\n");
readMultipleFiles(files, 'utf8', function(err, inputFiles) {
if(err) {
console.log("Read Error: " + err);
}
fileOne = inputFiles[0];
fileTwo = inputFiles[1];
...
console.log(fileOne);
console.log(fileTwo);
});