Get mongodb collection by name in node.js - node.js

I'm learning node.js currently and this is my first ever project in it. It's (supposedly) a simple to-do list app where there are multiple lists I can load/edit/save/remove.
In the todo_list.ejs file I have a div where I list all the collection names:
<div id="list_container" class="lists">
<ul id="col_list" class="collection-list">
<% lists.forEach(list => { %>
<li class="collection-list-item">
<div class="list-name-container">
<a href="/<%=list.name %>" class="list-link">
<span class="list-name" name="list_name"><%=list.name %></span>
</a>
</div>
</li>
<% }) %>
</ul>
</div>
looks like this:
When I click on the link of a list. I try to use the following code to load in a new list (which is a mongodb collection):
app.route("/:list_name").get((req, res) => {
MongoClient.connect(process.env.DB_CONNECT, (err, db) => {
if(err) throw err;
var database = db.db("myFirstDatabase");
const cursor = database.collection(req.params.list_name).find({}); /* stuck here */
database.listCollections().toArray((err, collections) => {
if(err) throw err;
db.close();
collections.forEach(element => {
if(element.name == req.params.list_name){
current_list = element;
current_list_name = element.name;
}
});
task.find({}, (err, todo_tasks) => { /*currently using the model.find() method to list all the documents which always looks at the "tasks" collection*/
res.render("todo_list.ejs", { tasks: todo_tasks, lists: collections, curr_list: current_list_name });
});
});
});
});
I commented where I'm stuck in the code above. I'm trying to get a mongodb collection by name, and then load all its contents onto a list after, but I don't know how to find a collection by name. Reading through the node.js documentation lead me to the cursor object, which has a ton of info and properties I have no clue what to do with...
Is there a simple way to find a collection by name and get a list of it's documents?
EDIT 1:
this is where i add tasks:
//ADD TASK TO LIST
app.post('/', async (req, res) => {
const tsk = new task({ /*the mongodb model for tasks*/
content: req.body.content,
deadline: req.body.deadline
});
try {
await tsk.save();
res.redirect("/");
} catch(e) {
console.log(e);
res.redirect("/");
}
});

I will not address the EJS part in this answer as I'm not qualified and the code you provided seems all good. However, I'll review the back-end part.
Also, since I don't know what kind of coding background you have (if any), this answer will contain a lot of explanation on perhaps simple concepts.
Summary
From your second code snippet, there are a couple things that are to be discussed:
Asynchronous code
The database connection and generalities
Actual implementation
Code conception
[EDIT]: Save/Edit implementations
There is also a lot more to cover depending on the knowledge of OP, such as try/catch clauses, MongoDB models validation, the usage of express's Router and more but I will only edit my answer if needed.
Asynchronous code
For the rest of the answer, most of the code will be surrounded by async/await keywords. These are necessary for the code to work properly.
Basically, JS being a language that is made for the web, you sometimes need to wait for network or database requests to be done before you do any other action. That's where the callbacks, the promises or the async/await syntax (which is syntactic sugar for promises) come in handy.
Let's say you need, like your example, to retrieve a list of tasks:
app.route("/:list_name").get((req, res) => {
MongoClient.connect(process.env.DB_CONNECT, (err, db) => {
if(err) throw err;
var database = db.db("myFirstDatabase");
const cursor = database.collection(req.params.list_name).find({}); /* stuck here */
console.log(cursor);
// ..........
});
});
JS being asynchronous by default, if you run this code, chances are high that cursor will be undefined. The reason for that is that the code doesn't wait for the database.collection(............. to finish in order to continue the execution. But with the help of the aforementioned callback/promises/async-await, our code can now wait until this instruction is done.
You can read on async/await here and here, and see here that MongoDB examples are using async/await as well, but you will see in the following sections more "practical" usages of it.
Keep in mind that what you are using (whether it is callbacks, promises or async/await syntax) is completely up to you and your preferences.
Database connection
As the code is currently written, everytime a user clicks on any item on your list, a connection to MongoDB will be established, and that connection doesn't belong to the route handler. Your back-end app should connect to the database once (at least for this case, it could prove useful to initiate multiple connections for some advanced cases), and close the connection when your back-end app stops (generally not the case with an API).
Atlas cloud databases for example, have a limit of 500 connections. Meaning that if, let's say, 501 users click on an item simultaneously on your front-end list, the best case scenario is someone doesn't get what he asked, but it could be worse.
For this matter, you have several options. One would be to go with a framework that helps you leverage some of the code and boilerplate, such as Mongoose or work with the native MongoDB driver which we will do, since you seem to already work with that and I strongly believe working with the lowest layer first will make you learn higher-level frameworks way faster.
Now, let's tackle the problem. We want to put the database connection somewhere else where it'll be called once. Again, there's several options you can go with, but I like to create a class for it, and exporting a new instance to do what I want anywhere in my code. Here is a (really) simple example of what my minimal go-to looks like:
mongo-client.js:
const { MongoClient } = require('mongodb');
class MongoCli {
constructor() {
let url = `mongodb://testuser:my_sup3r_passwOrd#127.0.0.1:27017/?authSource=my_database_name`;
this.client = new MongoClient(url, { useUnifiedTopology: true });
}
async init() {
if (this.client) {
await this.client.connect();
this.db = this.client.db('test');
} else
console.warn("Client is not initialized properly");
}
}
module.exports = new MongoCli();
Actual implementation
Of course, this code on his own won't work and we need to call and wait for it, before defining routes. So, right before app.route("/:list_name")............, call this: await MongoCli.init();.
Here is what my (again, really) simple server.js look like (I have separated the mongo-client code from the server):
const express = require('express');
const MongoCli = require('./mongo-cli.js');
const server = async () => {
const app = express();
await MongoCli.init();
app.route("/:list_name").get(async (req, res) => {
});
return app;
};
module.exports = server;
Now, let's start implementing what you really want from the beginning, a.k.a once a user click on a topic of tasks, it will display all the tasks on the topic he clicked:
const express = require('express');
const MongoCli = require('./mongo-cli.js');
const server = async () => {
const app = express();
await MongoCli.init();
app.route("/:list_name").get(async (req, res) => {
// we will query the collection specified by req.params.list_name
// then, .find({}) indicates we want all the results (empty filter)
// finally, we call .toArray() to transform a Cursor to a human-readable array
const tasks = await MongoCli.db.collection(req.params.list_name).find({}).toArray();
// making sure we got what we needed, you can remove the line below
console.log(tasks);
// return a HTTP 200 status code, along with the results we just queried
res.status(200).json(tasks);
});
return app;
};
module.exports = server;
Quite simple, right?
Keep in mind my server.js might not look quite as yours since there are many ways to handle this and it is to the developer to find his own preferred method, but you get the idea.
Code conception
We got our GET route going, we get the results when we call the route, everything's great! ... not quite.
What happens now if we have, say, 1500 topics of tasks? Should we really create 1500 different collections, knowing that a task consist of a description, a status, a deadline, eventually a name? Sure, we can do it, but it doesn't mean we have to.
Instead, what about creating one and only collection tasks, and adding a key topic to it?
Considering the above sentences, here's what the route would now look like:
const express = require('express');
const MongoCli = require('./mongo-cli.js');
const server = async () => {
const app = express();
await MongoCli.init();
app.route("/:topic_wanted").get(async (req, res) => {
// we now know the collection is named 'tasks'
// then, .find({topic: req.params.topic_wanted}) indicates we want all the results where the key 'topic' corresponds to req.params.topic_wanted
// finally, we call .toArray() to transform a Cursor to a human-readable array
const tasks = await MongoCli.db.collection('tasks').find({topic: req.params.topic_wanted}).toArray();
// making sure we got what we needed
console.log(tasks);
// return a HTTP 200 OK, along with the results we just queried
res.status(200).json(tasks);
});
return app;
};
module.exports = server;
Last words
I hope I'm not too off-topic and my answer could help you.
Also, I saw while writing the answer that you need to figure out how to post tasks now. Please let me know in the comments if you need further information/explanation or even help for posting tasks.
EDIT (added):
Save/Edit implementations
Seeing your implementation of creating a new task, I assume you already use mongoose. Unfortunately, when declaring a model in Mongoose, it will automatically search for (or create if it doesn't exist) the collection having the same name of your declared model, except in lowercase and pluralized (see here for more info). Meaning you can't declare a new task and assign it to a collection named "users" for example.
That's where the part 4 of this answer, "Code conception", comes into play. Otherwise, the code you edited-in has no "major" flaw.

Try this, this should work.
Changes that I made :-
MongoDb connect callback function changed to async.
Add toArray() function in the end of database.collection(req.params.list_name).find({});
And made the above function to await.
You can choose .then or async/await, it is up to you!
app.route("/:list_name").get((req, res) => {
MongoClient.connect(process.env.DB_CONNECT,async (err, db) => {
if(err) throw err;
var database = db.db("myFirstDatabase");
const todo_tasks = await database.collection(req.params.list_name).find({}).toArray(); /* add '.toArray()' */
database.listCollections().toArray((err, collections) => {
if(err) throw err;
db.close();
collections.forEach(element => {
if(element.name == req.params.list_name){
current_list = element;
current_list_name = element.name;
}
});
res.render("todo_list.ejs", { tasks: todo_tasks, lists: collections, curr_list: current_list_name });
});
});
});
After some improvements :-
app.route("/:list_name").get((req, res) => {
// Connecting to MongoDb database
MongoClient.connect(process.env.DB_CONNECT, async (err, db) => {
if (err) throw err;
// Choosing 'myFirstDatabase' database
const database = db.db("myFirstDatabase");
let todo_tasks = [];
let collections = [];
let current_list_name = "";
// Getting selected list items(todo tasks) to array
try {
todo_tasks = await database.collection(req.params.list_name).find({}).toArray(); // Change :- Add '.toArray()'
} catch (err) {
if (err) throw err;
}
// Getting collections names
try {
collections = await database.listCollections().toArray();
db.close();
} catch (err) {
if (err) throw err;
}
// Getting selected list details
collections.forEach(element => {
if (element.name === req.params.list_name) {
current_list = element; // I don't understand what this code here
current_list_name = element.name;
}
});
// Rendering front end
res.render("todo_list.ejs", {
tasks: todo_tasks,
lists: collections,
curr_list: current_list_name,
});
});
});

Related

Why is my response empty from the second mongoose query? MERN-Stack

Goal: Get the students based on the given library name. The student model has the library name linked in the database.
What's happening:
1: Retrieving the name that is linked to the given ID (library ID aquired with useParams().id).
2: Looking for all students based on that found library name.
Result: Empty response. I feel like the problem has to do with the line library_name = library.name;. Simply that value hasn't set yet when the second query starts to execute? Because when I log the result right after this line, with res.send(library_name); the name is showing correctly.
app.get("/students/:id", (req, res) => {
const id = req.params.id;
let library_name = "";
LibraryModel.findById(id, (err, library) => {
library_name = library.name;
});
StudentModel.find({library: library_name}, (err, students) => {
if (err) {
res.send(err);
} else {
res.send(students);
}
});
});
You are right. library_name gehts only set in the callback function you passed, which happens after the StudentModel.find(...) gets called. Basically you are currently performing these 2 calls in parallel.
There are three ways to resolve this issue.
Moving second call to callback function
app.get("/students/:id", (req, res) => {
const id = req.params.id;
let library_name = "";
LibraryModel.findById(id, (err, library) => {
library_name = library.name;
StudentModel.find({library: library_name}, (err, students) => {
if (err) {
res.send(err);
} else {
res.send(students);
}
});
});
});
Use promises
To avoid what's known as "callback hell" can you also use promises instead of callback functions and await them:
app.get("/students/:id", async (req, res) => {
const id = req.params.id;
try {
const library = await LibraryModel.findById(id);
const students = await StudentModel.find({library: library.name});
res.send(students);
} catch (err) {
res.send(err);
}
});
Use a single aggregation pipeline
You can also merge these two separate databases queries into a single aggregation pipeline. You would need to first use $lookup and afterwards use $match to filter for the specific entries. Nevertheless there is additional information on the schemas needed in order to build this query.
Another Hint
I assume you are trying to create a RESTful API. You might want to review your path structure, since a RESTful approach would expect the ':id' to be the ID of a student, not the ID of a library.
It looks like GET '/libraries/:id/students' makes more sense in your case.

Querying multiple MongoDB collections in Node.js asynchronously

I need to fetch two different MongoDB collections (db.stats and db.tables ) for the same request req.
Now, in the code below, I am nesting the queries within the callback function.
router.post('/', (req, res) => {
let season = String(req.body.year);
let resultData, resultTable;
db.stats.findOne({Year: season}, function (err, data) {
if (data) {
resultData = getResult(data);
db.tables.findOne({Year: season}, function (err, data) {
if (data) {
resultTable = getTable(data);
res.render('index.html', {
data:{
result : resultData,
message: "Working"}
});
} else {
console.log("Error in Tables");
}
});
} else {
console.log("Error in Stats");
}
});
});
This code works, but there a few things that don't seem right. So my question is:
How do I avoid this nested structure? Because it not only looks ugly but also, while I am processing these requests the client side is unresponsive and that is bad.
What you have right now is known as the callback hell in JavaScript. This is where Promises comes in handy.
Here's what you can do:
router.post('/', (req, res) => {
let season = String(req.body.year);
var queries = [
db.stats.findOne({ Year: season }),
db.tables.findOne({ Year: season })
];
Promise.all(queries)
.then(results => {
if (!results[0]) {
console.log("Error in Stats");
return; // bad response. a better way is to return status 500 here
} else if (!results[1]) {
console.log("Error in Tables");
return; // bad response. a better way is to return status 500 here
}
let resultData = getResult(results[0]);
let resultTable = getTable(results[1]);
res.render('index.html', { data: {
result : resultData,
message: "Working"
} });
})
.catch(err => {
console.log("Error in getting queries", err);
// bad response. a better way is to return status 500 here
});
});
It looks like you are using Mongoose as your ODM to access your mongo database. When you don't pass in a function as the second parameter, the value returned by the function call (e.g. db.stats.findOne({ Year: season })) will be a Promise. We will put all of these unresolved Promises in an array and call Promise.all to resolve them. By using Promise.all, you are waiting until all of your database queries get executed before moving on to render your index.html view. In this case, the results of your database function calls will be stored in the results array in the order of your queries array.
Also, I would recommend doing something like res.status(500).send("A descriptive error message here") whenever there is an error on the server side in addition to the console.log calls.
The above will solve your nested structure problem, but latter problem will still be there (i.e. client side is unresponsive when processing these requests). In order to solve this, you need to first identify your bottleneck. What function calls are taking up most of the time? Since you are using findOne, I do not think that will be the bottleneck unless the connection between your server and the database has latency issues.
I am going to assume that the POST request is not done through AJAX since you have res.render in it, so this problem shouldn't be caused by any client-sided code. I suspect that either one of getResult or getTable (or both) is taking up quite a significant amount of time, considering the fact that it causes the client side to be unresponsive. What's the size of the data when you query your database? If the size of it is so huge that it takes a significant amount of time to process, I would recommend changing the way how the request is made. You can use AJAX on the front-end to make a POST request to the back-end, which will then return the response as a JSON object. That way, the page on the browser would not need to reload, and you'll get a better user experience.
mongodb driver return a promise if you dont send a callback so you can use async await
router.post('/', async(req, res) => {
let season = String(req.body.year);
let resultData, resultTable;
try {
const [data1,data2] = await Promise.all([
db.stats.findOne({Year: season}),
db.tables.findOne({Year: season})
]);
if (data1 && data2) {
resultData = getResult(data1);
resultTable = getTable(data2);
return res.render('index.html', {
data: {
result: resultData,
message: "Working"
}
});
}
res.send('error');
console.log("Error");
} catch (err) {
res.send('error');
console.log("Error");
}
});

How can I direct calls to different instance MongoDB based on logged in user?

I'm working on a project that will be a multi-tenant Saas application, and am having difficulty implementing a way to log into various databases depending on the user login info. Right now, I just want to split traffic between a Sandbox database (for demo purposes, and will be wiped on a regular basis), and an Alpha database (for current client testing and development). I have written the middleware below, config.js, that detects the user ID on login and assigns a database object using mongoose.createConnection(). This key-value pair is then added to a store using memory-cache. Here is the config.js code:
var mcache = require('memory-cache'),
Promise = require("bluebird"),
mongoose = require('mongoose');
Promise.promisifyAll(require("mongoose"));
(function () {
'use strict';
var dbSand = mongoose.createConnection(process.env.DB_SAND);
var dbAlpha = mongoose.createConnection(process.env.DB_ALPHA);
function dbPathConfigMiddlewareWrapper (){
return function setDbPath(req, res, next){
if ( req ){
if (!mcache.get(req.session.id) && req.body.email){
var login = req.body.email;
if (login === 'demo#mysite.com'){
mcache.put(req.session.id, dbSand);
} else {
mcache.put(req.session.id, dbAlpha);
}
}
req.dbPath = mcache.get(req.session.id);
next();
}
};
}
module.exports = dbPathConfigMiddlewareWrapper;
}());
So far so good. But I have been unsuccessful in calling the correct database in my routes. When I was just using a single database, I could easily use this:
var connStr = process.env.DBPATH;
if(mongoose.connection.readyState === 0){
mongoose.connect(connStr, function(err) {
if (err) throw err;
console.log('Successfully connected to MongoDB');
});
}
Now, I'm trying this to no avail:
var connStr = req.dbPath; //where req.dbPath is assigned in the config middleware above.
if(connStr.connection.readyState === 0){
mongoose.connect(req.dbPath, function(err) {
if (err) throw err;
console.log('Successfully connected to MongoDB');
});
}
Any guidance here would be greatly appreciated. This seems like it should be much more straightforward, and the documentation alludes to it but does not elaborate.
Here, I think, the problem is you are saving a database object to your key value storage. mcache.put(req.session.id, dbSand);. Which caused error in if(connStr.connection.readyState === 0).
You can stringify your object. mcache.put(req.session.id, JSON.stringify(dbSand));. And get the object's string and parse it into JSON like var connStr = JSON.parse(req.dbPath);.
You don't call mongoose.connect() if you're manually creating connections.
Instead, you have to register your models for each connection, which is a bit of a PITA but as far as I know there's no way around that. It may require some restructuring of your code.
Here's some untested code on how you could set something like that up.
Your middleware file:
// Create connections
const registerModels = require('./register-models');
let dbSand = mongoose.createConnection(process.env.DB_SAND);
let dbAlpha = mongoose.createConnection(process.env.DB_ALPHA);
// Register your models for each connection.
registerModels(dbSand);
registerModels(dbAlpha);
function dbPathConfigMiddlewareWrapper() { ... }
register-models.js:
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
let UserSchema = new mongoose.Schema(...);
module.exports = function(conn) {
conn.model('User', UserSchema);
};
This does mean that you can't use User.find(...) in your routes, because that only works when you're using a single connection (the default one that gets created with mongoose.connect(), which you're not using).
Instead, you should use something like this in your routes:
req.dbPath.model('User').find(...);

How to access the value of variable outside the function in node js (node js with postgresql)

var pg = require('pg');
var conString = "postgres://postgres:abk#localhost/bot";
var res = [];
pg.connect(conString, function(err, client, done) {
if(err) {
return console.error('error fetching client', err);
}
client.query('SELECT * FROM bot.questions', function(err, result)
{
done();
if(err) {
return console.error('error running query', err);
}
res.push(result.rows[0]);
console.log(res); //inside function scope
});
});
console.log(res); // outside function scope
"console.log()" which has called inside the function gives proper resulting array, but outside function the same array variable shows an empty array. I have tried lot of things, such as callbacks, promises functionalities in node js but was not able to see the resulting "res" outside the function.
Please suggest me how do I make that "res" variable accessible from outside of function.
NOTE: Specifically, I need "console.log(res)" to print the "res" outside of function as mentioned in above code.
res variable is indeed accessible outside the function, as it is defined outside the function
res variable is indeed populated outside of the function, as it is seen as populated inside the function
root cause of your problem is that the outside print happens chronologically earlier than the inside activity, as the function is asynchronous, the call comes out immediately, and proceeds to execute the last line in the code.
If you are just particular about getting the value outside, it is indeed is the case
If you are wanting to just print the value outside, use a delayed callback in which to print the content
If you are wanting to post-process the value outside, you will need to link the connect function with a next function, through synchronization primitives.
Hope this helps.
As explained before, the query's result will exist only when the callback is fired, so it won't make sense accessing it in a part of the code that would execute before that.
It seems like you need to read a bit about node's asynchronous programming.
I suggest you read about promises and generators.
I found a solution finally, I have created seperate file for db config along with resulting array of values using callback function to export. Here is the "db.js" file which I have created.
var pg = require('pg');
var conString = "postgres://postgres:abk#localhost/chatbot";
var resp = [];
function executeQuery (callback) {
pg.connect(conString, function(err, client, done) {
if(err) {
return console.error('error fetching client', err);
}
client.query('SELECT * FROM bot.questions', function(err, result) {
done();
if(err) {
return console.error('error running query', err);
}
resp.push(result.rows[0]);
// console.log(res);
callback(resp);
});
});
};
exports.executeQuery = executeQuery;
and this I have imported into my "app.js" like bellow
var express = require('express')
var bodyParser = require('body-parser')
var request = require('request')
var app = express()
var db = require('./db')
app.post('/webhook/', function (req, res) {
db.executeQuery(function ( resp) {
console.log(resp);
});
});
This is how I can now access "resp" as resulting array, and can pass it to any function as an argument wherever required.
There is specific need as per my requirements to make the db config file separate from app.js file, otherwise I know it can also be manipulated in same app.js file.
So finally, This works for me perfectly.
This is what a kind of example I was expecting. But I found it by myself.
Anyways thanks for your involvement, and suggessions.

How does Node JS managed global vars as he is single threaded and asynchronous?

I am trying to developpe an API with NodeJs which accepts an object containing multiple queries to mongdb and answers an object with the different results (in fact Json).
I use express and my code is :
var nb_query=0;
var results;
//api
app.get("/api/:p",api);
function api(req, res) {
var jsonq=decodeURIComponent(req.params.p);
//console.log(jsonq);
var queries=JSON.parse(jsonq);
nb_query=Object.keys(queries).length;
results={};
for(var nq in queries) { // for each query
do_find_query(nq,queries[nq], function() {
//todo : managing head
res.end(JSON.stringify(results));
}
);
}
} // end of api function
function do_find_query (name_query,query,callback) {
var collection=fdb.collection(query.collection);
collection.find(query.find,query.fields,query.options).toArray(function(err,docs) {
if(err) throw err;
results[name_query]=docs;
nb_query--;
if(nb_query==0)
callback();
}
);
}
As you see, I use global vars to store the results and the counter nb_query. And I ask myself if it is a problem or not (now no because I am alone on the server, but when we will be thousands of billions? :-) ).
As I understand Node, there is only one thread and I think Node will finalize a started job unless he encoutered an io access. In this case, he stacks the io with the callback, and begins to answer to a new request.
If this is correct, I think that Node could answer to 2 or more different calls to my api (which need mongo calls) and so store different values in global vars which is shared (there's only one thread).
If this is right, I would also know what is the best way to change it.
I have the idea of declaring results and nb_query in api function and pass it to do_find_query, but nb_query isn't an object and is so not changed correctly.
I know I can put nb_query in an object to pass it 'by reference', but I want to know first if it is necessary and if it is a good way or if there is a better one.
Thanks for your help !
Doom.
------------------------------------------------------------------------------
EDIT :
I have change my code and it seems to work without global vars and without async library (which is for me using a hammer to swat a fly)
//api
app.get("/api/:p",api);
function api(req, res) {
var jsonq=decodeURIComponent(req.params.p);
//console.log(jsonq);
var queries=JSON.parse(jsonq);
var query_names=Object.keys(queries);
var results={};
var query_left=query_names.length;
query_names.map( function(query_name) {
var query=queries[query_name];
var collection=fdb.collection(query.collection);
collection.find(query.find,query.fields,query.options).toArray(function(err,docs) {
if(err) throw err; //todo : handle errors in a better way
results[query_name]=docs;
if(--query_left==0)
res.json(results);
}
);
}
);
}
But I still do not know if this is necessary to do or not. (I think so but I am new in Node so ...)
Thanks to mscdex as his answer make me known res.json() and help me understand scope of variable.
Instead of using globals, try this (uses the async module):
var async = require('async');
// ...
app.get('/api/:p', api);
function api(req, res) {
var jsonq = decodeURIComponent(req.params.p),
queries = JSON.parse(jsonq),
keys = Object.keys(queries),
queriesLeft = keys.length,
results = {};
async.each(keys, function(name, cb) {
var query = queries[name],
collection = fdb.collection(query.collection);
collection.find(query.find, query.fields, query.options)
.toArray(function(err, docs) {
if (err) return cb(err);
results[name] = docs;
cb();
}
);
}, function(err) {
if (err) throw err; // TODO: handle better
res.json(results);
});
} // end of api function

Resources