I am new to node, express and mongo so I need some guidance. I set up a mongo database with 3 objects. I created an application with a route so that http://localhost:3000/Employeeid will lead the page to display all the 3 objects in the database, which the page does. However for each subsequent refresh, the same data gets displayed multiple number of times based on the number of refresh. (i.e. For the first refresh, the data duplicates once. For the second refresh, the data duplicates twice.) Does anyone know what may be wrong?
var express = require('express')
var app = express()
var MongoClient = require('mongodb').MongoClient
var url = 'mongodb://localhost:27017'
var str = ''
app.route('/Employeeid').get(function(req, res) {
MongoClient.connect(url, { useNewUrlParser: true }, function(err, client) {
var db = client.db('EmployeeDB')
var cursor = db.collection('Employee').find()
cursor.forEach(function(item) {
if (item != null) {
str = str + ' Employee id  ' +
item.Employeeid + '</br>'
}
})
res.send(str)
client.close()
})
})
var server = app.listen(3000, function() {})
You are appending to str every time the /EmployeeId route is called. To fix this, move str inside the callback:
app.route('/Employeeid').get(function(req, res) {
MongoClient.connect(url, { useNewUrlParser: true }, function(err, client) {
var str = ''
var db = client.db('EmployeeDB')
var cursor = db.collection('Employee').find()
cursor.forEach(function(item) {
if (item != null) {
str = str + ' Employee id  ' +
item.Employeeid + '</br>'
}
})
res.send(str)
client.close()
})
})
Side note - you should reuse the Mongo connection instead of calling connect and close on every request.
The Reason why your getting duplicate results is you are not making str = null after sending the result. you are concatenating str for each request because str is a global variable. your code can be modified for performance as like this.
var express = require('express')
var app = express()
var MongoClient = require('mongodb').MongoClient
var url = 'mongodb://localhost:27017'
var str = ''
var db = null
MongoClient.connect(url, { useNewUrlParser: true }, function(err, client) {
if( err ) {
throw new Error( err );
}
db = client.db('EmployeeDB') // we are storing db reference in global variable db
});
app.route('/Employeeid').get(function(req, res) {
db.collection('Employee').find().toArray( function(err, cursor){
if( err ) {
res.send('');
throw new Error(err );
}
cursor.forEach(function(item) {
if (item != null) {
str = str + ' Employee id  ' +
item.Employeeid + '</br>'
}
}
res.send(str)
str = '';
})
})
})
var server = app.listen(3000, function() {})
If you don't move res.send() into callback function you may send result before getting data from db due to async nature of node.
Related
So I am making a kind of API middleware for my company that will grab information from the NOAA API and then store in in my database. It does more then but that a separate part. I have set it up so that it works it will get the information and store it in my sql database perfectly The issue is the information I get is based off of zipcode. One request is the information for one zipcode. I need to be able to 'loop" through a list of zipcode one at a time and store the information in the database. I am not sure how to properly get it to work. I have tested a couple of ways but have not been able to get it to work so if someone can get me pointed in the right direction it would be appreciated.
Sorry in advance my code is not cleaned up.
Everything below apiRequest.end() has little function for the question. I keep it for context.
let mysql = require('mysql');
let config = require('./config.js');
var https = require("https");
var express = require("express");
var app = express();
const port = 3000;
var fs= require('fs');
var csv = require('fast-csv');
//last test
//array will replace this zip variable
let zip = '90012';
api(zip);
function api(zips){
//All of the parts for building the get requests url
app.get("/", function(req, response) {
var apiKey = "gPaEVizejLlbRVbXexyWtXYkfkWkoBhd";
let webapi = 'https://www.ncdc.noaa.gov/cdo-web/api/v2/data?';
let datasetid="datasetid=GHCND";
let datatypeid="&datatypeid=TMAX";
let location="&locationid=ZIP:";
const zipcode = zips;
let startdate="&startdate=2019-01-01";
let enddate="&enddate=2020-01-01";
let units = "&units=standard";
let limit="&limit=1000";
let url = webapi + datasetid + datatypeid + location + zipcode + startdate + enddate + units + limit;
var options = {
port: 443,
method: "GET",
headers: {
"token": apiKey
}
};
let data = "";
//request to grab from NOAA api
let apiRequest = https.request(url, options, function(res) {
console.log("Connected");
//grabing all data
res.on("data", chunk => {
data += chunk;
});
res.on("end", () => {
console.log("data collected");
//Format JSON data
response.send(JSON.parse(data));
var getData = JSON.parse(data);
if(isEmpty(getData)){
emptyCorrect();
}
dataFormat(getData);
});
});
apiRequest.end();
});
//fix format for date Can add more formating if needed here
function dataFormat(formData){
for(x in formData.results){
let date = formData.results[x].date;
formData.results[x].date = date.slice(0,10);
}
jsonToSQL(formData.results);
}
//test function is going to be used for inserting the zip
function test(){
var content = "";
console.log("your test worked see ***************");
return "92507";
}
//function to add grabed JSON data into the SQL database
function jsonToSQL(datafin){
var zipcode = zips;
let connection = mysql.createConnection(config);
// insert statment
let stmt = `INSERT INTO test1(ZIPCODE,DATE, TEMP) VALUES ? `;
let values = [];
for(let x in datafin){
values.push([zipcode,datafin[x].date,datafin[x].value]);
}
// execute the insert statment
connection.query(stmt, [values], (err, results, fields) => {
if (err) {
return console.error("error");
}
// get inserted rows
console.log('Row inserted:' + results.affectedRows);
});
// close the database connection
connection.end();
}
function emptyCorrect(){
console.log("Eror correction");
var zipcode = zips;
let connection = mysql.createConnection(config);
// insert statment
let stmt = `INSERT INTO test1(ZIPCODE,DATE, TEMP) VALUES ? `;
let valueE = [];
valueE.push([zipcode,"0","No Data"]);
// execute the insert statment
connection.query(stmt, [valueE], (err, results, fields) => {
if (err) {
return console.error("error");
}
// get inserted rows
console.log('Row inserted:' + results.affectedRows);
});
// close the database connection
connection.end();
}
function isEmpty(obj) {
for(var key in obj) {
if(obj.hasOwnProperty(key))
return false;
}
return true;
}
app.listen(port, () => console.log(`Example app listening on port ${port}!`))
}
As I understand your problem can roughly be summarized as "How to loop through asynchronous evaluations in Nodejs".
There are some options for you. I would recommend wrapping call to the NOAA API with a promise and then chain those promises. This can be done as follows:
app.get('/', async function(req, response) {
var apiKey = 'some value';
let webapi = 'https://www.ncdc.noaa.gov/cdo-web/api/v2/data?';
let datasetid = 'datasetid=GHCND';
let datatypeid = '&datatypeid=TMAX';
let location = '&locationid=ZIP:';
let startdate = '&startdate=2019-01-01';
let enddate = '&enddate=2020-01-01';
let units = '&units=standard';
let limit = '&limit=1000';
var options = {
port: 443,
method: 'GET',
headers: {
token: apiKey
}
};
const zipCodes = ['90012', '90013']; // Place a call to your function for fetching zip codes here
let datas = [];
prom = Promise.resolve();
zipCodes.forEach(zipcode => {
prom = prom.then(() =>
new Promise((resolve, reject) => {
let url =
webapi +
datasetid +
datatypeid +
location +
zipcode +
startdate +
enddate +
units +
limit;
let apiRequest = https.request(url, options, function(res) {
console.log('Connected');
let data = '';
res.on('data', chunk => {
data += chunk;
});
res.on('end', () => {
console.log('data collected for zip ' + zipcode);
datas.push(data);
resolve();
});
});
apiRequest.end();
})
);
});
prom.then(() => {
// All requests have now been handled sequentially
response.send(/* You'll need to figure out what to do here */);
});
});
An alternative is to use something like the async library for dealing with sequentially calling callbacks. The async library (https://github.com/caolan/async) describes itself as:
Async is a utility module which provides straight-forward, powerful functions for working with asynchronous JavaScript.
See e.g. Node.js: How do you handle callbacks in a loop? for a similar problem (not with regards to callign an API, but dealing with asynchronous function in a loop).
Actually working on an automaton project (based on Arduino <-> Serial <-> Raspberry), I seem to be kind of stuck on the serial interface.
Basically, I'm using Express to read/write on some parameter on the arduino by URL in that kind of syntax (http://localhost:3000/parameter/6 or http://localhost:3000/parameter/6/set?value=10).
Since I'm willing to get a simple/short result (as an API), I decided to render a simple json object on each request, there is also no way for me to use post-rendering scripts/frameworks like Socket.io/jQuery/... for this purpose (I'd probably often call those URLs from curl/wget/[other html parsers]).
Now the thing is that there is no problem to write on the port, but for the reading, I'd like to wait for the buffer to be returned from serialport.on('data', function(buffer) { ... }) before rendering the page.
As serialport.on('data', ... ) does seem to be called only once per request, the only way I found for the moment is to redirect to the same page until the buffer has been read, which seem kind of a nasty way of getting things done, also it is incomplete ...
Here's a bit of code to get an idea of the actual logic :
Library :
// -> lib/serial.js - LIBRARY
var SerialPort = require("serialport");
var data = {'state': 0};
var serialPort = new SerialPort(port, {
baudrate: 115200,
parser: SerialPort.parsers.readline('\r\n'),
});
serialPort.on('open', function() {
console.log('Serial port Open');
serialPort.on('data', function(buffer) { data.buffer = buffer, data.state = 1 });
});
function readConfig(cmd, paramNb) {
var cmd = String.fromCharCode(cmd);
var param = String.fromCharCode(paramNb);
if (serialPort.isOpen() == true) {
serialPort.write(cmd + param + '\n', function(err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log('message written');
});
}
return data;
};
function writeConfig(cmd, paramNb, value) {
var cmd = String.fromCharCode(cmd);
var param = String.fromCharCode(paramNb);
var value = String.fromCharCode(value);
if (serialPort.isOpen() == true) {
serialPort.write(cmd + param + value + '\n', function(err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log('message written');
});
}
};
exports.readConfig = readConfig;
exports.writeConfig = writeConfig;
Route :
// -> parameter.js - ROUTE
var express = require('express');
var router = express.Router();
var serial = require('../../lib/serial');
var sleep = require('sleep');
/* SET Parameter. */
router.get('/:id/set', function(req, res) {
var id = req.params.id;
var value = req.query.value;
serial.writeConfig('b', id, value);
res.json({'result': value, 'id': id});
});
/* GET Parameter. */
router.get('/:id', function(req, res) {
var id = req.params.id;
var buffer = serial.readConfig('a', id);
if (buffer.state != 0) {
res.json({'result': buffer});
}
else {
sleep.usleep(100000);
res.redirect('/api/parameter/' + id); // <- Nasty Bulls**t
}
});
module.exports = router;
The first idea I came up with was to find a synchronous way to read the port so I can call something like this (pseudo-code) :
while (buffer == '') { var buffer = serial.read; }
res.json({buffer});
Anyway thanks everyone.
//module.js
exports.doA = function(callback) {
db.asyncConnect(/* connect options */, function(err, database) {
if(err == null) {
exports.db = database;
}
});
}
exports.db = null;
// test1.js
var mydb = require('module');
console.log(mydb);
// test2.js
var db = require('module').db;
console.log(db);
How is var mydb = require('module'); different from var mydb = require('module').db; ?
Update: Updating with the code and behavior I am observing
// file: db.js
exports.init = function (start_server){
MongoClient.connect(url, {
db: {
raw: true
},
server: {
poolSize: 5
}
},
function(err, database) {
exports.db = database;
if(err == null)
start_server();
}
);
}
exports.db = null;
.
// file: test.js
var mongodb = require('./db.js');
var db = require('./db.js').db;
console.log("MongoDb " + mongodb.db);
console.log("DB " + db);
Output:
MongoDb [object Object]
DB null
Q. The variable db is coming out to be null but mongodb.db is having values?
Even if I assign the value of mongodb.db in a variable, the value is coming out to be null.
var mydb = require('module');
This gets the entire module.
var mydb = require('module').db;
This gets the property db of the object that the module returns.
The downside to the latter part is that you can't access the "parent" object anymore, ie. whatever is returned from the .db call, will be what you get and nothing else.
Quick example:
//some_module.js
var SomeModule = {
db: function () {
console.log("hello");
}
};
module.exports = SomeModule;
//some_other_module.js
var SomeModule = require('./some_module.js');
console.log(SomeModule); // [Object object]
var SomeDB = require('./some_module.js').db;
console.log(SomeDB); // function () {}
It's basically different
var mydb = require('module');
That means every object that you export will be called/ imported. mydb.doA and mydb.db are available.
var db = require('module').db;
Only db object is called/ imported to your code. Different from first example, db it self is like mydb.db in the first example.
While implementing promises got this code:
var MongoClient = require('mongodb').MongoClient
MongoClient.connect(db_uri, function(err, db) {
if(err) throw err;
var ccoll = db.collection('cdata');
app.locals.dbstore = db;
}
var json= {}
//Auth is a wrapped mongo collection
var Auth = app.locals.Auth;
var coll = app.locals.dbstore.collection('data');
var ucoll = app.locals.dbstore.collection('udata');
var ccoll = app.locals.dbstore.collection('cdata');
var Q = require('q');
//testing with certain _id in database
var _id = require('mongodb').ObjectID('530ede30ae797394160a6856');
//Auth.getUserById = collection.findOne()
var getUser = Q.nbind(Auth.getUserById, Auth);
//getUserInfo gives a detailed information about each user
var getUserInfo = Q.nbind(ucoll.findOne, ucoll);
var getUserData = Q.nbind(ccoll.findOne, ccoll);
//"upr" is a group of users
//getUsers gives me a list of users, belonging to this group
var getUsers = Q.nbind(ucoll.find, ucoll);
//Auth.getUserById = collection.find()
var listUsers = Q.nbind(Auth.listUsers, Auth);
var uupr = {}
var cupr = {}
getUserInfo({_id:_id})
.then(function(entry){
console.log('entry:', entry);
uupr = entry;
var queue = [getUsers({upr:entry.name}), getUserData({_id:entry._id})]
return Q.all(queue);
}
)
.then(function(array2){
console.log('array2:', array2);
cupr = array2[1]
var cursor = array2[0]
var cfill = Q.nbind(cursor.toArray, cursor);
return cfill();
}
)
.then(function(data){
json = {data:data, uupr:uupr, cupr:cupr}
console.log('json:', json)
res.render('test', {json : JSON.stringify(json)})
}
)
Its work can be described by a diagram:
getUserInfo()==>(entry)--+-->getUsers()=====>array2[0]--+-->populate user list===>data--->render
| |
+-->getUserData()==>array2[1]--+
I've used external variables uupr and cupr to store data from first .then calls.
So I have two problems:
1) Avoid using external variables.
2) rearrange code to get alternative flow diagram.
getUserInfo()==>(entry)--+-->getUsers()==>usersList-->populate user list==>usersData-+->render
| |
+-->getUserData()====>uprData-------------------------------+
Any advice is appreciated
Try something along the lines of this pseudo-code:
getUserInfo().then(function(userInfo) {
return Q.all([
userInfo,
getUsers(... userInfo ...).then(convert to array),
getUserData(... userInfo ...)
])
}).spread(function(userInfo, usersArray, userData) {
res.render(...)
}, function(err) {
handle the error
}).done()
You can simply nest them:
getUserInfo({_id:_id})
.then(function(entry){
console.log('entry:', entry);
return Q.all([
getUsers({upr:entry.name}),
getUserData({_id:entry._id})
]);
.spread(function(cursor, cupr) {
console.log('array2:', [cursor, cupr]);
return Q.ninvoke(cursor, "toArray")
.then(function(data){
return {data:data, uupr:entry, cupr:cupr};
});
});
}).then(function(json) {
console.log('json:', json)
res.render('test', {json: JSON.stringify(json)})
});
Now, to let the toArray not wait for the getUserData result, just do those in parallel:
getUserInfo({_id:_id})
.then(function(entry){
console.log('entry:', entry);
return Q.all([
getUsers({upr:entry.name}).invoke("toArray"),
getUserData({_id:entry._id})
]);
.spread(function(data, cupr) {
return {data:data, uupr:entry, cupr:cupr};
});
}).then(function(json) {
console.log('json:', json)
res.render('test', {json: JSON.stringify(json)})
});
(Using invoke instead an explicit then)
I am getting the error "Empty reply from server" In CURL, when I try to run "http://localhost:3000/light"
var express = require('express'),
values = require('./routes/values');
var app = express();
app.get('/light', values.findAll);
app.listen(3000);
console.log('Listening on port 3000...');
and my values.js is
var mongo = require('mongodb');
var Server = mongo.Server,
Db = mongo.Db,
BSON = mongo.BSONPure;
var server = new Server('localhost', 27017, {auto_reconnect: true});
db = new Db('sensordb', server);
db.open(function(err, db) {
if(!err) {
console.log("Connected to 'sensordb' database");
db.collection('values', {safe:true}, function(err, collection) {
if (err) {
console.log("The 'values' collection doesn't exist. Creating it with sample data...");
populateDB();
}
});
}
});
exports.findAll = function(req, res) {
db.collection('values', function(err, collection) {
collection.find().toArray(function(err, items) {
res.send(items);
});
});
};
var populateDB = function() {
var values = [
{
value: "10",
date: "121212",
time: "1214"
},
{
value: "12",
date: "121212",
time: "1224"
}];
db.collection('values', function(err, collection) {
collection.insert(values, {safe:true}, function(err, result) {});
});
};
Well basically in the above code, I create a database and if the database is empty, I try to populate it. When I run the server code on my terminal I also get something like:
= Please ensure that you set the default write concern for the database by setting =
= one of the options =
= =
= w: (value of > -1 or the string 'majority'), where < 1 means =
= no write acknowlegement =
= journal: true/false, wait for flush to journal before acknowlegement =
= fsync: true/false, wait for flush to file system before acknowlegement =
= =
= For backward compatibility safe is still supported and =
= allows values of [true | false | {j:true} | {w:n, wtimeout:n} | {fsync:true}] =
= the default value is false which means the driver receives does not =
= return the information of the success/error of the insert/update/remove =
= =
= ex: new Db(new Server('localhost', 27017), {safe:false}) =
= =
= http://www.mongodb.org/display/DOCS/getLastError+Command =
= =
= The default of no acknowlegement will change in the very near future =
= =
= This message will disappear when the default safe is set on the driver Db =
========================================================================================
Is the above error causing some problem? If so, what? if not, what is responsible?
Any help would be really appreciated.
It seems that your populateDB function uses the global db variable, while the opened db is a different variable, given as parameter to the callback you provide when opening the database.
This is caused since you have two different variables called db, each visible in a different scope and refers to a different object. To make the picture clearer, I added a parameter to your populateDB function, and created db1, db2 callback parameters. The modifications below should help:
db.open(function(err, db1) {
...
populateDB(db1);
...
var populateDB = function(db2) {
...
db2.collection('values', function(err, collection) {
collection.insert(values, {safe:true}, function(err, result) {});
});
};