Populating MongoDB in a loop in Node.js - node.js

I would like to populate a MongoDB database using Node.js (mongoose). I'm trying to do it asynchronously (using async.js) after the DB connection has been made, but it's not working as I hoped. Instead of filling the database with a small number of documents, the database connection is opened and closed before the writes can be made. Sounds like a very typical async problem, but I am at a loss.
My code is currently as follows.
var runStart = '2019-01-01 00:00:00';
var runEnd = '2019-01-17 00:00:00';
var periods = [];
var d = moment(runStart);
while (d < moment(runEnd)) {
periods.push([d.format(), d.add(1, 'day').format()]);
}
mongoose.connect(config.db);
const db = mongoose.connection;
db.on('error', () => {
throw new Error('unable to connect to database');
});
db.on('open', () => {
console.log('DB open');
async.each(
periods,
function(period, next) {
Period({
start: moment(period[0]),
end: moment(period[1]),
type: 'delivery',
notes: ''
}).save((err) => {
if (err) callback(err);
});
next();
},
function(err) {
db.close();
console.log('FINISHED!');
});
});
What am I doing wrong?

function(period, next) {
Period({
start: moment(period[0]),
end: moment(period[1]),
type: 'delivery',
notes: ''
}).save((err) => {
if (err) callback(err); // `callback` is supposed to be `next
});
next(); // `next` is called before the document is saved
},
Change that code to this:
function(period, next) {
Period({
start: moment(period[0]),
end: moment(period[1]),
type: 'delivery',
notes: ''
}).save((err) => {
next(err);
});
},

Related

NodeJs - Rendering same page after multiple SELECT queries

I have below codes in my index.js file. I can print data of profile table. And i need to print data of resume table also in same (index.njk) page. But i couldn't. I also found a similar question but i am new and couldn't modify those codes according to my project. Can you please help?
var express = require('express'),
path = require('path'),
bodyParser = require('body-parser'),
router = express.Router(),
app = express();
var pg =require('pg');
// DB Connect string
var connect = {
user: 'arslan',
database: 'resumedb',
password: '1984',
host: 'localhost',
port: 5432,
max: 10,
idleTimeoutMillis: 30000,
};
router.get('/', function(req, res){
pg.connect(connect, function(err, client, done, skills){
if(err){
return console.error('errrr', err)
}
//Get Profile Informations
client.query('select id,fname,lname,title,description,profileimage from profile', function(err, result){
if(err){
return console.error('error running query', err);
}
if(result.rows.length > 0) {
res.render('index.njk', {
profileName: result.rows[0].fname,
profileLName: result.rows[0].lname , profileTitle: result.rows[0].title
, profileDesc: result.rows[0].description
, profileImage: result.rows[0].profileimage
});
console.log(result.rows[0].profileimage);
}else {
console.log('No rows found in DB');
}
done()
});
});
});
The best solution for all async stuff is using Promises.
Your code uses the config for a connection pool, but later you dont use a pool, but its often a good idea to use one.
You create a new module db.js to query the db
const pg = require('pg')
const connect = { // Normaly you would use an config file to store this information
user: 'arslan',
database: 'resumedb',
password: '1984',
host: 'localhost',
port: 5432,
max: 10,
idleTimeoutMillis: 30000
}
let pool = new pg.Pool(config)
exports.query = (query, values) => {
return new Promise((resolve, reject) => {
pool.connect(function(err, client, done) {
if (err)
return reject(err)
client.query(query, values, (err, result) => {
done()
if (err)
return reject(err)
resolve(result)
})
})
})
}
exports.queryOne = (query, values) => {
return new Promise((resolve, reject) => {
this.query(query, values).then(rows => {
if (rows.length === 1) {
resolve(rows[0])
} else if (rows.length === 0) {
resolve()
} else {
reject(new Error('More than one row in queryOne'))
}
}).catch(err => {
reject(err)
})
})
}
pool.on('error', function (err, client) {
console.error('idle client error', err.message, err.stack)
})
and then in your route
// ...
const db = require('./db')
router.get('/', function(req, res, next) {
let profileQuery = db.queryOne('select id,fname,lname,title,description,profileimage from profile')
let resumeQuery = db.query('???')
Promise.all([profileQuery, resumeQuery]).then(([profile, resume]) => {
if (!profile) {
return res.status(404).send('Profile not found') // error page
res.render('index.njk', {
profileName: profile.fname,
profileLName: profile.lname,
profileTitle: profile.title,
profileDesc: profile.description,
profileImage: profile.profileimage
})
}).catch(err => {
next(err)
})
})
If you want to make a single query you can use db.query('select 1 + 1').then(rows => { /* your code */}).catch(err => { next(err) }).
Because you often only want one row you can use queryOne. It is returning undefined with no rows, the row you want, or an error for multiple rows
The next() with an error as argument will call the express error handlers. There you can log the error and return 500. You should create your own for that
Please ask if you dont understand something, because it could be complicated for the first time :)

nodejs with mongodb: insert/find callbacks not running

here is my model code to insert some records. On my work pc it works perfectly, but when I'm running it on my home pc with the same OS, collection.insert doesn't running its callback, so I get just long request which ends with time out. There are no errors, mongo db logs say "Connection accepted" 5 times, and after that there are no messages. The same happens when I try to fetch objects from database using find(). Inserting records with mongo shell works great, but with node.js I couldn't accomplish that.
/*
* POST populate locations.
*/
var MongoClient = require('mongodb').MongoClient,
_ = require('underscore'),
env = process.env.NODE_ENV || 'development',
config = require('../config/config')[env]
exports.connect = function(cb) {
MongoClient.connect(config.db, function(err, db) {
if (err) throw err;
cb(db)
});
}
exports.populate = function(data, cb) {
var self = this;
self.connect(function(db) {
var collection = db.collection('locations');
collection.insert(data, function(err, docs) {
collection.ensureIndex({
"loc": "2dsphere"
}, function() {
db.close();
cb();
});
});
});
}
Use
exports.populate = function(data, cb) {
MongoClient.connect(config.db, function(db) {
var collection = db.collection('locations');
collection.insert(data, function(err, docs) {
collection.ensureIndex({
"loc": "2dsphere"
}, function() {
db.close();
cb();
});
});
});
}

I can't seem to retrieve data from mongodb using Node.js

I'm having trouble retrieving data from a mongodb collection which I believe has been inserted correctly.
So here is my example code...
var db = require('./database');
module.exports = function (app) {
app.get('/db', function (req, res) {
db.collection('myCollection', function (err, myCollection) {
if (err) {
return console.error(err);
}
var docrow = {
// no id specified, we'll let mongodb handle that
name: 'Mark',
date: '2013/09/11',
description: 'Some text here'
};
console.log('I GET HERE OK');
myCollection.insert(docrow, { safe: true }, function (err, insertedDocument) {
console.log('BUT I DONT GET HERE?');
if (err && err.name === 'MongoError' && err.code === 11000) {
return console.log('This document already exists');
} else if (err) {
return console.log('Something bad happened');
}
myCollection.find({ name: 'Mark' }, function (err, docs) {
docs.each(function (err, doc) {
console.log(doc);
});
});
});
res.end('OK we made it');
});
});
};
...and the database.js file is...
var Db = require('mongodb').Db,
Connection = require('mongodb').Connection,
Server = require('mongodb').Server;
var host = process.env['MONGO_NODE_DRIVER_HOST'] != null ? process.env['MONGO_NODE_DRIVER_HOST'] : 'localhost';
var port = process.env['MONGO_NODE_DRIVER_PORT'] != null ? process.env['MONGO_NODE_DRIVER_PORT'] : Connection.DEFAULT_PORT;
/*
w:1 tells mongo to wait until at least one confirmed write has succeeded before calling any callbacks
*/
var flags = { w: 1 };
var server = new Server(host, port, { auto_reconnect: true, poolSize: 20 });
var db = new Db('TestDBName', server, flags);
module.exports = db;
It looks like I'm able to create a Collection (myCollection) without error, and calling insert on the collection doesn't error either, but also doesn't appear to get any where near inside the callback function for it to trigger either an error or handle a success?
What am I doing wrong here?
Thanks for any help you can give me.
When you connect to mongodb it is asynchronous method, so it will return client handler in callback, and this client handler have to be used onwards instead of handle of that Db object. So change this:
var db = new Db('TestDBName', server, flags);
To this:
new Db('TestDBName', server, flags).open(function(err, client) {
if(err) throw err;
// client - is the guy you are looking for instead of `db` you had
});
As well change:
myCollection.find({ name: 'Mark' }, function (err, docs) {
To:
myCollection.find({ name: 'Mark' }).toArray(function (err, docs) {
It is the only exception with mongo-native where you have to use .toArray instead of direct callback.

How to pass changes from middleware to socket.io?

I am using node.js with socket.io to push real time notifications to users. However, currently I am just sending back a query result done in my socket.io code and sending it back to the client but I need to let socket know about the changes that occur and to either update with the changes or re-query the db to check for the new number and send that to the client.
For example if a user gets a friend request then the notification count will change and I want socket.io to push the new notification count number to the user.
here is my socket.io code in my app.js file:
io.on('connection', function(socket) {
var sessionID = socket.handshake.sessionID,
session = new connect.middleware.session.Session({ sessionStore: sessionStore }, socket.handshake.session)
console.log('socket: new ' + sessionID)
socket.broadcast.emit('arpNewConn', session.passport.user)
var intervalID = setInterval(function() {
socket.handshake.session.reload(function() {
socket.handshake.session.touch().save()
})
socket.emit('pulse', { heartbeat: new Date().toString(), timestamp: new Date().getTime() })
}, 300 * 1000)
socket.on('disconnect', function() {
console.log('socket: dump ' + sessionID)
socket.broadcast.emit('arpLostConn', session.passport.user)
clearInterval(intervalID)
})
socket.emit('entrance', {message: 'Message works'});
dbnotif.findOne(userID, function (err, user) {
if(err) throw err;
notify = user.notifications;
socket.emit('notify', {notific: notify});
});
});
Here is the client side:
div#CheckSocket
script(src='http://localhost:3000/socket.io/socket.io.js')
script.
$(document).ready(function () {
console.log('socket');
var socket = io.connect('http://localhost:3000/');
console.log('entered1');
socket.on('entrance', function (data) {
console.log('entered');
console.log(data.message);
});
socket.on('notify', function (data) {
console.log('noting');
console.log(data.notific);
if(data.notific !== 0)
$('.notifications').html(data.notific);
});
socket.on('reconnecting', function(data) {
setStatus('reconnecting');
console.log('entered2');
});
function setStatus(msg) {
console.log('connection status: ' + msg);
console.log('entered5');
}
});
Here is the example of adding a friend in the route file:
exports.addContactPost = function(req, res, err) {
async.waterfall([
function(callback) {
var success;
var newFriend = new Friend ({
userId: req.signedCookies.userid,
friend_id: mongoose.Types.ObjectId(req.body.otherUser),
friend_status: 1
});
newFriend.save(function(err){
if(err) {
console.log(err);
} else {
console.log("saved it");
success = true;
}
});
callback(null, success)
},
function(success, callback) {
//if(success === true) {
var success2;
var newFriend2 = new Friend ({
userId: mongoose.Types.ObjectId(req.body.otherUser),
friend_id: req.signedCookies.userid,
friend_status: 2
});
newFriend2.save(function(err){
if(err) {
res.send("request not received");
} else {
success2 = true;
}
});
callback(null, success2);
//} else {
// res.send("error with request sent");
//}
},
function(success2, callback) {
console.log('callback3');
//if(success2 === true) {
var success3;
Notification.findOneAndUpdate({userId: mongoose.Types.ObjectId(req.body.otherUser)}, {
$inc: {notifications: 1}
}, function(err, notify) {
if(err) {
res.send(err);
} else {
console.log(notify);
if(notify.added_notifications === true) {
// enable mail and include general u have got a new request... do not include name because not storing it
}
}
success3 = true;
callback(null, success3);
}],
function(err, results) {
res.json({response: true});
console.log("Add successful");
});
};
Notes: dbnotif is a model being called by mongoose,
userID is a global variable available to the file
I helped him solve this question offline, but we ended up using an EventEmitter as a proxy.
// main.js
var EventEmitter = require('events').EventEmitter;
var emitter = new EventEmitter();
Then add it to each request as middleware:
// elsewhere in main.js
app.use(function(req, res, next) {
req.emitter = emitter;
next();
});
Then in external routes file:
// routes.js
exports.addContactPost = function(req, res, err) {
req.emitter.emit( 'some-key', whatever, data, you, want );
};

Node.js reuse MongoDB reference

I am having trouble understanding node.js.
Example, MongoDB access, here's what I've got (mydb.js):
var mongodb = require('mongodb'),
server = new mongodb.Server('staff.mongohq.com', 10030, {
auto_reconnect: true
}),
db = new mongodb.Db('mydb', server);
function authenticateAndGo(db, handle) {
db.authenticate('username', 'password', function(err) {
if (err) {
console.log(err);
return;
}
console.log('Database user authenticated');
var collection = new mongodb.Collection(db, 'test');
handle(collection);
});
}
function query(handle) {
db.open(function(err, db) {
if( err ) {
console.log(err);
return;
}
console.log('Database connected');
authenticateAndGo(db, handle);
});
};
exports.query = query;
So, if I want to use it later, I would
var mydb = require('./mydb');
mydb.query(function(collection) {
collection.find({}, {
limit: 10
}).toArray(function(err, docs) {
console.log(docs);
});
});
But, If I do multiple calls, like so:
var mydb = require('./mydb');
mydb.query(function(collection) {
collection.find({}, {
limit: 10
}).toArray(function(err, docs) {
console.log(docs);
});
});
mydb.query(function(collection) {
collection.find({}, {
limit: 10
}).toArray(function(err, docs) {
console.log(docs);
});
});
I get an exception:
Error: db object already connecting, open cannot be called multiple times
I think that there is really something fundamental that I do not understand about all this and it is probable that this question is stupid ...
Anyway, all help is welcome.
Thanks in advance.
mydb.js:
var mongodb= require('mongodb'),
server = new mongodb.Server('staff.mongohq.com', 10030, {
auto_reconnect: true
}),
db1 = new mongodb.Db('mydb', server);
// callback: (err, db)
function openDatabase(callback) {
db1.open(function(err, db) {
if (err)
return callback(err);
console.log('Database connected');
return callback(null, db);
});
}
// callback: (err, collection)
function authenticate(db, username, password, callback) {
db.authenticate(username, password, function(err, result) {
if (err) {
return callback (err);
}
if (result) {
var collection = new mongodb.Collection(db, 'test');
// always, ALWAYS return the error object as the first argument of a callback
return callback(null, collection);
} else {
return callback (new Error('authentication failed'));
}
});
}
exports.openDatabase = openDatabase;
exports.authenticate = authenticate;
use.js:
var mydb = require('./mydb');
// open the database once
mydb.openDatabase(function(err, db) {
if (err) {
console.log('ERROR CONNECTING TO DATABASE');
console.log(err);
process.exit(1);
}
// authenticate once after you opened the database. What's the point of
// authenticating on-demand (for each query)?
mydb.authenticate(db, 'usernsame', 'password', function(err, collection) {
if (err) {
console.log('ERROR AUTHENTICATING');
console.log(err);
process.exit(1);
}
// use the returned collection as many times as you like INSIDE THE CALLBACK
collection.find({}, {limit: 10})
.toArray(function(err, docs) {
console.log('\n------ 1 ------');
console.log(docs);
});
collection.find({}, {limit: 10})
.toArray(function(err, docs) {
console.log('\n------ 2 ------');
console.log(docs);
});
});
});
Result:
on success:
Database connected
Database user authenticated
------ 1 ------
[ { _id: 4f86889079a120bf04e48550, asd: 'asd' } ]
------ 2 ------
[ { _id: 4f86889079a120bf04e48550, asd: 'asd' } ]
on failure:
Database connected
{ [MongoError: auth fails] name: 'MongoError', errmsg: 'auth fails', ok: 0 }
[Original Answer]:
You're opening the db multiple times (once in each query). You should open the database just once, and use the db object in the callback for later use.
You're using the same variable name multiple times, and that might've caused some confusion.
var mongodb = require('mongodb'),
server = new mongodb.Server('staff.mongohq.com', 10030, {
auto_reconnect: true
}),
db1 = new mongodb.Db('mydb', server);
function authenticateAndGo(db, handle) {
db.authenticate('username', 'password', function(err) {
if (err) {
console.log(err);
return;
}
console.log('Database user authenticated');
var collection = new mongodb.Collection(db, 'test');
handle(collection);
});
}
function query(handle) {
db1.open(function(err, db2) {
if( err ) {
console.log(err);
return;
}
console.log('Database connected');
authenticateAndGo(db2, handle);
});
};
exports.query = query;
I've changed the above code a little (db1 for the original db, db2 for the opened db). As you can see, you're opening db1 multiple times, which is not good. extract the code for opening into another method and use it ONCE and use the db2 instance for all your queries/updates/removes/...
You can only call "open" once. When the open callback fires, you can then do your queries on the DB object it returns. So one way to handle this is to queue up the requests until the open completes.
e.g MyMongo.js
var mongodb = require('mongodb');
function MyMongo(host, port, dbname) {
this.host = host;
this.port = port;
this.dbname = dbname;
this.server = new mongodb.Server(
'localhost',
9000,
{auto_reconnect: true});
this.db_connector = new mongodb.Db(this.dbname, this.server);
var self = this;
this.db = undefined;
this.queue = [];
this.db_connector.open(function(err, db) {
if( err ) {
console.log(err);
return;
}
self.db = db;
for (var i = 0; i < self.queue.length; i++) {
var collection = new mongodb.Collection(
self.db, self.queue[i].cn);
self.queue[i].cb(collection);
}
self.queue = [];
});
}
exports.MyMongo = MyMongo;
MyMongo.prototype.query = function(collectionName, callback) {
if (this.db != undefined) {
var collection = new mongodb.Collection(this.db, collectionName);
callback(collection);
return;
}
this.queue.push({ "cn" : collectionName, "cb" : callback});
}
and then a sample use:
var MyMongo = require('./MyMongo.js').MyMongo;
var db = new MyMongo('localhost', 9000, 'db1');
var COL = 'col';
db.query(COL, function(collection) {
collection.find({}, {
limit: 10
}).toArray(function(err, docs) {
console.log("First:\n", docs);
});
});
db.query(COL, function(collection) {
collection.find({}, {
limit: 10
}).toArray(function(err, docs) {
console.log("\nSecond:\n", docs);
});
});
I simply call the open function once directly after the db init:
var mongodb = require('mongodb');
var server = new mongodb.Server('foo', 3000, {auto_reconnect: true});
var db = new mongodb.Db('mydb', server);
db.open(function(){});
After that I do not have to care about that anymore because of auto_reconnect is true.
db.collection('bar', function(err, collection) { [...] };

Resources