(Please note this is not a duplicate of two similarly titled questions, those two questions use Mongoose and the answers apply to Mongoose queries only)
I have a list of directories, each of these directories contains a file. I want to return a JSON list with the contents of each of these files. I can load the files no problem, but because I'm looping over the array with forEach, my empty response is sent before I've actually loaded the contents of the files:
function getInputDirectories() {
return fs.readdirSync(src_path).filter(function(file) {
return fs.statSync(path.join(src_path, file)).isDirectory();
});
}
router.get('/list', function(req, res, next) {
var modules = [];
var input_dirs = getInputDirectories();
input_dirs.forEach(function(dir) {
path = __dirname+'/../../modules/input/'+dir+'/module.json'
fs.readFile(path, 'utf8', function(err, data) {
modules.push(data);
});
});
res.status(200).json(modules);
});
How can I make sure that I only send down the modules array once it's fully loaded, ie: once the forEach is done.
Since fs.readFile is asynchronous, the behaviour that you are having is most likely the expected one.
What you need to do is return your modules when all modules have been read. You could do this inside fs.readFile.
As far as I have understood, you can obtain the total number of directories through input_dirs.length (since I guess getInputDirectories() is returning an array). Now you need some kind of a counter that helps you understand if you have read the last directory or not, and if yes, then you return your modules. Something like this should work:
router.get('/list', function(req, res, next) {
var modules = [];
var input_dirs = getInputDirectories();
var c = 0;
input_dirs.forEach(function(dir) {
path = __dirname+'/../../modules/input/' + dir + '/module.json'
fs.readFile(path, 'utf8', function(err, data) {
c++;
modules.push(data);
if(c == input_dirs.length) {
return res.status(200).json(modules);
}
});
});
});
I suggest you use Promises, example:
var Promise = require('bluebird');
router.get('/list', function(req, res, next) {
var modules = [];
var input_dirs = getInputDirectories();
// 'each' will try to fulfill all promises, if one fails, it returns a
// failed promise.
return Promise.each(input_dirs, function(dir){
path = __dirname+'/../../modules/input/'+dir+'/module.json';
return new Promise(function(resolve, reject){
fs.readFile(path, 'utf8', function(err, data) {
if (err) return reject(err);
return resolve(data);
});
});
}).then(function(modules){
return res.status(200).json(modules);
})
.catch(function(err){
if (err) {
//handle error
}
});
});
This way you move one once you fulfilled your promises.
Instead of fs.readFile use fs.readFileSync
Related
I am in the process of learning Node, Express and Mongoose and creating a web application. Sometimes, in one page, I need to display data from two or more of my collections. Although it works just fine, right now I use a bunch of nested if statements and have realized that the code has become very messy looking.
Example:
app.get("/jobs/:id/edit", function(req, res){
Job.findById(req.params.id, function(err, foundJob){
if (err){
console.log(err)
} else {
User.find({}, function(err, users){
if(err){
console.log(err);
} else {
Client.find({}, function(err, clients){
if(err) {
console.log(err);
} else {
let start_date = foundJob.start_date;
let end_date = foundJob.end_date;
start_date = moment(start_date).format("MM-DD-YYYY");
end_date = moment(end_date).format("MM-DD-YYYY");
// Redirect
res.render("edit_job", {job: foundJob, users: users, clients: clients, start_date, end_date});
}
});
}
});
}
});
});
This example is for a page that displays information from just three collections. Is there a better way to write this kind of code? I feel like using a table of collection names and using a for loop might work, but I am unsure how I would write that.
As an update, I tried the following logic, but it did not work:
app.get("/", function(req, res){
let collections = [Client, User, Ticket, Job];
let endCollections = [];
for (let i = 0; i < collections.length; i++){
collections[i].find({}, function(err, foundCollection){
if (err) {
console.log(err);
} else {
endCollections[i] = foundCollection;
}
})
}
res.render("dashboard", {clients: endCollections[0]});
No matter what I do, endCollections[i] remains undefined even though I have it set to be foundCollection, which is not undefined.
Thanks.
in the for-loop, you're executing an asynchronous block of code (collection.find()), so javaScript will not wait till this asynchronous code executed then do the next block of code which is the render, that's why you got an empty array
you need to use async/await to force javaScript to wait until the asynchronous block of code executed, then do the rest
just add async to the main function to be able to use await inside this function
something like this
app.get("/", async function(req, res){ // <== note the async keyword here
let collections = [Client, User, Ticket, Job];
let endCollections = [];
for (let i = 0; i < collections.length; i++){
await collections[i].find({}, function(err, foundCollection){ // <== note the await keyword here
if (err) {
console.log(err);
} else {
endCollections[i] = foundCollection;
}
})
}
res.render("dashboard", {clients: endCollections[0]});
hope it helps
I think the rendering takes place before the searching of the string on the files, i have tried different methods but don't seems to get this working. any help will be appreciated. im a noob on to the nodejs. im trying to get the id of the user and query and get all the data and there after see if he is in any of the lists given and finally render the page.
const j = [];
let name = '';
const filename = [];
var ext = '';
module.exports = function(app, express) {
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.post('/cusdetails', isLoggedIn, function (req, res) {
var cusid=req.body.cusid;
var insertQuerys = "SELECT * FROM customer WHERE cusid=? ORDER BY rowid DESC LIMIT 1";
connection.query(insertQuerys,[cusid],
function(err, rows){
rows.forEach( (row) => {
name=row.fncus;
});
fs.readdir('./views/iplist', function(err, files) {
if (err)
throw err;
for (var index in files) {
j.push(files[index])
}
j.forEach(function(value) {
var k = require('path').resolve(__dirname, '../views/iplist/',value);
fs.exists(k, function(fileok){
if(fileok) {
fs.readFile(k, function(err, content) {
if (err) throw err;
if (content.indexOf(name) > -1) {
ext = path.extname(k);
filename.push(path.basename(k, ext));
}
});
}
else {
console.log(" FileNotExist ");
}
});
});
});
console.log(filename);
res.render('cusdetails.ejs', {rows: rows, user:req.user , aml: filename });
});
})
You can create simple Promise wrapper and then use it inside async/await function to pause execution until resolved.
// use mysql2 package as it provides promise, less work to write promise wrappers
const mysql = require('mysql2/promise');
// create the connection to database
const connection = mysql.createConnection({
host: 'localhost',
user: 'root',
database: 'test'
});
// sample wrapper
function some(k) {
// more advisable to have local variables, why do you need this to be array?
var filename = [];
return new Promise((resolve, reject) => {
// doing this is also not recommended check nodejs documentation **fs.exists** for more info
fs.exists(k, function(fileok){
if(fileok) {
fs.readFile(k, function(err, content) {
if (err) reject(err);
if (content.indexOf(name) > -1) {
ext = path.extname(k);
filename.push(path.basename(k, ext));
resolve(filename)
}
});
}
else {
// reject(new Error("FileNotExist"))
console.log(" FileNotExist ");
}
});
})
}
// note the use of async
app.post('/cusdetails', isLoggedIn, async function (req, res) {
var cusid=req.body.cusid;
var insertQuerys = "SELECT * FROM customer WHERE cusid=? ORDER BY rowid DESC LIMIT 1";
// using await to pause excution, waits till query is finished
const [rows] = await connection.query(insertQuerys,[cusid])
rows.forEach( (row) => {
name=row.fncus;
});
// then you can
var result = await some(k)
...
Note however this way you loose the advantage of concurrent execution, as it's kindoff blocking. If the result of one call is not used in another, you can execute in parallel and await for result to achieve sequencing like
const [rows] = connection.query(insertQuerys,[cusid])
var result = some(k)
console.log(await rows) // do something
console.log(await result) // do something
JavaScript is asynchronous. This means that if you have a function with a callback (i.e. your query), the callback will be called asynchronously, at an unknown time, while the other code executes.
You need to look up some tutorials how to deal with callbacks, to get a proper understanding of it. Another method is using async/await and/or promises.
Basically, if you take the following code:
console.log("this will print first");
setTimeout(function () {
console.log("this will print last");
}, 1000);
console.log("this will print second");
If you run the code above, the top level is executed synchronously, so, it first calls console.log, then it executes setTimeout, which is synchronous. It sets a timeout, then says "I'm ready", and the code continues to the other console.log. After 1 second (1000 milliseconds), the callback in the setTimeout function is executed, and only then that console.log is called. You can not make the rest of the code wait this way, you need to restructure your code or read into promises.
I have the following express server set up (server is just express() from another file). I am sure there is a way to simplify this to only one server.get() but I haven't been able to figure out how. Any help or points in the right direction would be appreciated.
module.exports.api = function (server, fs) {
server.get('/api/getData/:uuid', function (req, res) {
fs.readFile(__dirname + '/data.json', function (err, data) {
if (err) throw err;
data = JSON.parse(data);
data.forEach(function (match) {
match['uuid'] = match['x'] + '-' + match['y'];
});
var match = data.filter(function (e) {
return e.uuid == req.params.uuid
})[0];
res.send(200, match);
});
});
server.get('/api/getData', function (req, res) {
fs.readFile(__dirname + '/data.json', function (err, data) {
if (err) throw err;
data = JSON.parse(data);
data.forEach(function (match) {
match['uuid'] = match['x'] + '-' + match['y'];
});
res.send(200, data);
});
});
};
Here's a solution that just moves the common code into a shared function, yet still uses the two routes for routing clarity:
function getData(res, uuid) {
fs.readFile(path.join(__dirname, 'data.json'), function (err, fileData) {
if (err) {
return res.send(500);
}
let data = JSON.parse(fileData);
data.forEach(function(match) {
match['uuid'] = match['x'] + '-' + match['y'];
});
if (uuid) {
var match = data.filter(function (e) {
return e.uuid == uuid;
})[0];
}
res.send(200, match);
});
}
module.exports.api = function (server, fs) {
server.get('/api/getData/:uuid', function (req, res) {
getData(res, req.params.uuid);
});
server.get('/api/getData', function (req, res) {
getData(res);
});
};
This changes the following things:
Puts shared code into getData() function that is called from both routes.
Sends an error response if fs.readFile() has an error
Creates new local variable so it doesn't assign back to a function argument which is now a less desirable practice because it prevents some interpreter optimizations.
Uses path.join() to join parts of a path in a more cross platform way.
FYI, unless the data in data.json actually changes from time to time, you could just read this data into a variable once and then cache it rather than rereading it on every one of these requests.
Note: You could use routing wildcards and reduce your code to a single route, but this is mostly considered an anti-pattern because wildcards often match much more than you want, creating situations where you have manually trigger 404 errors for things you didn't intend to match that ended up matching your routing wildcard. So, it is considered a good thing to explicitly declare the routes you intend to match rather and just share the appropriate implementation code rather than trying to collapse things down to a single route that matches more than one form of URL.
There are, of course, always exceptions by remember that the goal is clear, correct, maintainable, reliable code, not necessarily the fewest number of routes.
If you just want to cache the data.json data at server start up time, you can use require() to load and parse it for you like this and then there's really no reason for the sharef fucntion:
const cacheData = require('./data.json');
cacheData.forEach(function(match) {
match['uuid'] = match['x'] + '-' + match['y'];
});
module.exports.api = function (server, fs) {
server.get('/api/getData/:uuid', function (req, res) {
let match = cacheData.filter(function (e) {
return e.uuid == req.params.uid;
})[0];
res.send(match);
});
server.get('/api/getData', function (req, res) {
res.send(cacheData);
});
};
I have this simple piece of code:
var http = require('http'), fs = require("fs");
function get(p) {
fs.readFile('.' + p,'utf8', function (err, cont) {
if (err) return "EERRRORRRRR";
else return cont;
})
}
http.createServer(function (request, response) {
var path = ((request.url==="/")?"/index.html":request.url);
console.log(get(path));
}).listen(80);
When I run and connect to the server, it logs undefined...
When I add a "console.log(cont)" like:
fs.readFile('.' + p,'utf8', function (err, cont) {
console.log(html)
if (err) return "EERRRORRRRR";
else return cont;
})
; it logs the correct contents, so why is the function returning undefined? the contents exists...
How would i fix this issue?
The originally context of the code was a simple web server, if you couldn't tell.
Read about callbacks and asynchronous functions, you can find docs in google
var http = require('http'),
fs = require("fs");
// notice new parameter callback
function get(p, callback) {
fs.readFile('.' + p,'utf8', callback);
}
http.createServer(function (request, response) {
var path = ((request.url==="/")?"/index.html":request.url);
// get accepts callback
get(path, function(err, data){
if(err){
response.send('not found');
} else {
response.send(data);
}
});
}).listen(80); // notice: port 80 requires sudo to run, use better 3000
readFile in node.js is async (as well as almost all other functions). You can't return values from async functions, instead, you need to use a callback function that will be called once the operation ends:
fs.readFile('.' + p,'utf8', function (err, cont) {
console.log(html)
if (err) return "EERRRORRRRR";
else handleResponse(cont);
})
function handleResponse(data){//Do something here}
Use readFileSync if you want to return something without having to use a callback.
function get(p) {
var file = fs.readFileSync('.' + p,'utf8');
return file ? file : "EERRRORRRRR";
}
This assumes you don't mind using synchronous/blocking code.
I am creating a CRUD api with express and mongodb. I have a specific route which queries one collection in my mongo db and retrieves whatever documents match the query criteria. My program then loops through these documents and trys to find the latest cross entry in another collection in my db
exports.findLatestCommitforAllRepos = function(req,res,next){
var githubCommitDataCollection = index.db.collection(githubCommitDataCollectionName);
var enabledRepoCollection = index.db.collection(enabledRepoCollectionName);
var latestCommits = [];
enabledRepoCollection.find({enabled:true}).toArray(function(err,repos) {
if (err) { next(err); }
if (repos.length === 0 || repos === 'undefined') {
res.status(404);
res.send("There are no repos being tracked")
}
else {
repos.forEach(function(enabledRepo) {
var repo = enabledRepo.repo;
var userOrOrg = enabledRepo.userOrOrg;
githubCommitDataCollection.find({repo: repo, userOrOrg:userOrOrg}).sort({commitDate: -1}).limit(1).toArray(function(err,commit) {
if (commit.length === 0 || repos === 'undefined') {
res.send("No commit found for repo " + repo);
}
// console.log(commit[0]);
latestCommits.push(commit[0]);
console.log(latestCommits);
});
});
res.setHeader('Content-Type', 'application/json');
res.status(200);
res.json(latestCommits);
res.end();
}
});
}
This results in an empty array being returned.
You can use the async libary especially the async.waterfall() method when you need to run a tasks array of functions in series, each passing their results to the next in the array.
Consider the following example:
// Include the async package
// Make sure you add "async" to your package.json
async = require("async");
exports.findLatestCommitforAllRepos = function(req,res,next){
var latestCommits = [];
async.waterfall([
// Load all documents
function(callback) {
index.db.collection(enabledRepoCollectionName).find({"enabled": true}).toArray(function(err,repos){
if (err) return callback(err);
callback(null, repos);
});
},
// Get count of documents where price is empty
function(reposData, callback) {
async.each(reposData, function(enabledRepo, callback) {
index.db.collection(githubCommitDataCollectionName)
.findOne({repo: enabledRepo.repo, userOrOrg: enabledRepo.userOrOrg})
.sort({commitDate: -1}).limit(1)
.exec(function(err, commit) {
latestCommits.push(commit);
callback();
});
}, callback);
}
], function(err, result) { //This function gets called after the three tasks have called their "task callbacks"
if (err) return next(err);
res.setHeader('Content-Type', 'application/json');
res.status(200);
res.json(latestCommits);
res.end();
});
});
One minor suggestion in code,
use .findOne instead of .find
Means instead of
githubCommitDataCollection.find({repo: repo, userOrOrg:userOrOrg}).sort({commitDate: -1}).limit(1).toArray(function(err,commit) {
use
githubCommitDataCollection.findOne({repo: repo, userOrOrg:userOrOrg}).sort({commitDate: -1}).exec(function(err,commit) {
It will return only one commit and check console.log(commit) value to check what your are getting as result.
Or Please check share existing documents of githubCommitDataCollection