I have 2 schemas set up in mongoose:
var Job = new mongoose.Schema({
title: String,
budget: Number
});
var JobModel = mongoose.model('Job', Job);
var Customer = new mongoose.Schema({
name: String,
jobs: [Job]
});
var CustomerModel = mongoose.model('Customer', Customer);
The Customer model has an array of job models.
I am adding a new job as follows:
app.post('/api/jobs', function(req, res){
var job = new JobModel({
title: req.body.title,
budget: req.body.budget
});
job.save(function(err){
if(!err){
CustomerModel.findById(req.body.customerId, function(err, customer){
if(!err){
customer.jobs.push(job);
customer.save(function(err){
if(!err){
return console.log('saved job to customer');
}
});
}
});
return console.log('created job');
} else {
return console.log(err);
}
});
return res.send(job);
});
When I add a new job and GET all the customers I get which I think is correct:
[{
"__v": 1,
"_id": "50f85695771aeeda08000001",
"name": "Customer1",
"jobs": [
{
"_id": "50fad6985edd968840000002",
"budget": 100,
"title": "job1"
}
]
}, ...]
Now if I update job1, and GET all the jobs, job 1 has been updated (budget is now 500)
[{
"title": "job1",
"budget": 500,
"_id": "50fad6985edd968840000002",
"__v": 0
}, ...]
but the job1 in the customers job array remains unchanged.
[{
"__v": 1,
"_id": "50f85695771aeeda08000001",
"name": "Customer1",
"jobs": [
{
"_id": "50fad6985edd968840000002",
"budget": 100,
"title": "job1"
}
]
}, ...]
Do I therefore need to search the Customer1's job array and find the job1 job and update that also every time I update or delete or am I totally doing the whole nesting thing the wrong way?
I get that the jobs array in customer is an array of objects but I thought that they may be some how just a reference copy of the job rather than a duplicate??
If you use an embedded array of Job documents as you are here, they're completely independent copies and it's up to you to keep them in sync with the separate jobs collection.
The alternative is to have the jobs field of Customer contain an array of ObjectId references to 'Job' and then use Mongoose's query population to populate them on-demand, as needed.
jobs: [{type: Schema.Types.ObjectId, ref: 'Job'}]
Related
I am trying to build a system to record timesheets. For each day, an employee can record different hours, against different tasks assigned to them. Every entry in my Timesheet collection contains an array of individual timesheet entries. Sample data/schema for this collection:
[
{
"_id": "5db2c672620ed61854818ccd",
"EmployeeId": "5da9aed442e3070bbd9f7581",
"TimeSheet": [
{
"_id": "5db2c672620ed61854818ccf",
"TaskId": "5db14152e6537a05258bf573",
"Hours": "2.5",
"Remarks": "Test 2.5"
},
{
"_id": "5db2c672620ed61854818cce",
"TaskId": "5db1886ee6537a05258bf575",
"Hours": "11.5",
"Remarks": "Test 11.5"
}
],
"__v": 0
}
]
For the corresponding Task collection, the data resides in the following way -
[
{
"_id": "5db14152e6537a05258bf573",
"EmployeeId": "5da9aed442e3070bbd9f7581",
"ProjectId": "5db141d9e6537a05258bf574",
"TaskName": "Finish the timesheet page",
"TaskDescription": "Write the front-end and back-end code to allow employees to record their timesheets."
},
{
"_id": "5db1886ee6537a05258bf575",
"EmployeeId": "5da9aed442e3070bbd9f7581",
"ProjectId": "5db141d9e6537a05258bf574",
"TaskName": "Learn Populate",
"TaskDescription": "Learn how MongoDB/Mongoose uses the equivalent of SQL joins"
},
{
"_id": "5db27e3ca2445c05255dbad0",
"EmployeeId": "5da9aed442e3070bbd9f7581",
"ProjectId": "5db141d9e6537a05258bf574",
"TaskName": "Timesheet save API",
"TaskDescription": "Code the API to save a timesheet to the database"
}
]
I am trying to get the Task details (TaskName, TaskDescription and others) into every individual task-timesheet entry. For this, I tried using the populate() method in my controller, like so -
exports.findByEmployee = (req, res) => {
TimeSheet.find({ EmployeeId: req.query.EmployeeId })
.then(timesheets => {
timesheets.forEach((ts, ts_index) => {
ts.TimeSheet.forEach((item, index) => {
Task.findById(item.TaskId).populate('TaskId').exec((err, taskDetails) => {
item.TaskDetails = taskDetails;
})
});
})
res.send(timesheets);
}).catch(err => {
res.status(500).send({
message: err.message || "Some error occurred while retrieving timesheets."
});
});
}
However, the API response (for getting all timesheets) does not contain a key called TaskDetails in the individual task-timesheet section. My guess is that because of asynchronous nature of the function calls, the part res.send(timesheets) is getting fired before the section above it has had the time to finish. So my question is, how do I fix this? To summarize, I want the Task details from the Task collection with every individual timesheet item that is linked to the Task by id. Also, is this the correct way to use populate(), or is there a better/simpler/more correct way that will solve my problem?
EDIT:
Someone requested the models, so here's task and timesheet:
const mongoose = require('mongoose');
var ObjectId = mongoose.Schema.Types.ObjectId;
const TaskSchema = mongoose.Schema({
EmployeeId: ObjectId,
ProjectId: ObjectId,
TaskName: String,
TaskDescription: String
}, { collection: 'TASK' });
module.exports = mongoose.model('Task', TaskSchema);
const mongoose = require('mongoose');
var ObjectId = mongoose.Schema.Types.ObjectId;
const TimeSheetSchema = mongoose.Schema({
EmployeeId: ObjectId,
Date: Date,
TimeSheet: [
{
TaskId: {
type: ObjectId,
ref: 'TASK'
},
Hours: String,
Remarks: String
}
]
}, { collection: 'EMPLOYEE_TIMESHEET' });
module.exports = mongoose.model('TimeSheet', TimeSheetSchema);
There are a few things going on here. 1) TaskId doesn't exist in the Task collection so populate doesn't work (and there is no need to use it there) 2) the item object you declare in your ts.TimeSheet.forEach() will not exist outside of that forEach() loop so adding taskDetails to it doesn't accomplish anything because the item object is destroyed when the forEach() loop finishes.
I believe what you want is something like:
exports.findByEmployee = (req, res) => {
try {
// returns just the TimeSheet object from within the TimeSheet collection
// (recommend renaming one of these to avoid confusion!)
TimeSheet.find({ EmployeeId: req.query.EmployeeId }, 'TimeSheet')
// populates TaskId (which I recommend renaming 'Task')
// with the details from the Task collection
.populate('TimeSheet.TaskId')
// executes the query
.exec(timesheets => {
// sends the object once the query has finished executing
res.send(timesheets);
});
} catch (err) {
res.status(500).send({
message: err.message || 'Some error occurred while retrieving timesheets.',
});
}
};
I would highly recommend the MDN "Local Library" Express tutorial for a good introduction to working with MongoDB and Mongoose.
I am trying to send a list of total paid and unpaid client with count along with data from my node API.
In mongoose method, I am stuck at thinking how to go further.
can anyone suggest the best way to achieve this?
router.get("/", ensureAuthenticated, (req, res) => {
Loan.aggregate([
{
$match: {
ePaidunpaid: "Unpaid"
}
}
]).then(function(data) {
console.log(data);
res.render("dashboard", { admin: req.user.eUserType, user: req.user,data:data });
});
});
Loan Model:
const Loan = new Schema({
sName: { type: String },
sPurpose: [String],
sBankName: String,
sBranchName: [String],
nTotalFees: { type: Number },
ePaidunpaid: { type: String ,default:'Unpaid'},
sCashOrCheque: { type: String },
});
Outcome:
Details of a user with a count of paid and unpaid clients
[
Paid:{
// Paid users
},
Unpaid:{
// Unpaid Users
},
]
Well in that case, try this -
Loan.aggregate([
{
$group: {
_id: "$ePaidunpaid",
data: { $push: "$$ROOT" },
count: { $sum: 1 }
}
}
]);
Output would be something like this -
{
"_id": "Paid",
"data": [
// All the documents having ePaidunpaid = Paid
{ _id: "asdasd123 1eqdsada", sName: "Some name", // Rest of the fields },
{ _id: "asdasd123 1eqdsada", sName: "Some name", // Rest of the fields }
],
count: 2
},
{
"_id": "Unpaid",
"data": [
// All the documents of having ePaidunpaid = Unpaid
{ _id: "asdasd123 1eqdsada", sName: "Some name", // Rest of the fields },
{ _id: "asdasd123 1eqdsada", sName: "Some name", // Rest of the fields }
],
count: 2
},
Explanation
First stage of the pipeline $group groups all the documents according to ePaidunpaidfield which only have two values Paid or Unpaid thus rendering only two documents respectively.
Next step is to accumulate original data (documents) being grouped together. This is achieved using $push accumulator on data field, pushing $$ROOT which effectively references the document currently being processed by pipeline stage.
Since you needed count of all paid and unpaid users hence a $sum accumulator to count all the items in each group.
I'm trying to get mongoose populate to work.
I have two models one for orders and the other for the order items.
I'm doing this purely to understand populate.
I did have two files for this but I have now got everything in one file called main.js
main.js creates the 3 items in items model.
I then try to populate the order model with the 3 items.
Output for the query now gives the correct populated result.
That is to say the output from the line
console.log(JSON.stringify(orders, null, "\t");
is now:
[
{
"_id": "55d32e4594db780b1bbb4372",
"__v": 0,
"lines": [
{
"price": 2.4,
"quantity": 5,
"_id": "55d32e4594db780b1bbb436f",
"__v": 0
},
{
"price": 3.7,
"quantity": 7,
"_id": "55d32e4594db780b1bbb4370",
"__v": 0
},
{
"price": 1.2,
"quantity": 3,
"_id": "55d32e4594db780b1bbb4371",
"__v": 0
}
]
}
]
The database however is not populating.
Below is the main.js file
var mongoose = require('mongoose');
mongoose.connect('mongodb://localhost:27017/mydb');
var Schema = mongoose.Schema;
var OrderSchema = new Schema({
lines: [{type: mongoose.Schema.Types.ObjectId, ref: 'Item'}]
});
var ItemSchema = new Schema({
price: Number,
quantity: Number
});
var Order = mongoose.model('Order', OrderSchema);
var Item = mongoose.model('Item', ItemSchema);
var itemArray = [
{price: 2.4, quantity: 5},
{price: 3.7, quantity: 7},
{price: 1.2, quantity: 3}
];
Item.create(itemArray, function(err) {
if (err) {
console.log('Error creating items: ', err);
}
var order = new Order();
Item.find({}, {_id: 1}, function(err, result) {
result.forEach(function(obj) {
order.lines.push(obj._id);
});
}).exec().then(function() {
order.save(function(err) {
if (!err) {
Order.find({})
.populate('lines')
.exec(function(err, orders) {
console.log(JSON.stringify(orders, null, "\t")); // output is populated the database is still not populating.
});
}
});
} );
});
It appears there is some confusion for what the populate method is for. Essentially populate is mongoose's version of a "join". It allows a query (or a document) to automatically retrieve and "join" any associated documents and returns the modified document(s). Under the hood for every referenced value in a document that is to be populated, the populate method will execute a separate query to fetch those associated documents (generally this is by ID but can be a custom query) and then in memory it will replace the references with the retrieved documents.
It does not alter the documents in the collection at all as this is purely a fetch operation.
I am using the below code to insert data to mongodb
router.post('/NewStory', function (req, res) {
var currentObject = { user: userId , story : story , _id:new ObjectID().toHexString() };
req.db.get('clnTemple').findAndModify({
query: { _id: req.body.postId },
update: { $addToSet: { Stories: currentObject } },
upsert: true
});
});
This code is working fine if i remove the _id:new ObjectID().toHexString()
What i want to achieve here is that for every new story i want a unique _id object to be attached to it
What am i doing wrong?
{
"_id": {
"$oid": "55ae24016fb73f6ac7c2d640"
},
"Name": "some name",
...... some other details
"Stories": [
{
"userId": "105304831528398207103",
"story": "some story"
},
{
"userId": "105304831528398207103",
"story": "some story"
}
]
}
This is the document model, the _id that i am trying to create is for the stories
You should not be calling .toHexString() on this as you would be getting a "string" and not an ObjectID. A string takes more space than the bytes of an ObjectId.
var async = require('async'),
mongo = require('mongodb'),
db = require('monk')('localhost/test'),
ObjectID = mongo.ObjectID;
var coll = db.get('junk');
var obj = { "_id": new ObjectID(), "name": "Bill" };
coll.findAndModify(
{ "_id": new ObjectID() },
{ "$addToSet": { "stories": obj } },
{
"upsert": true,
"new": true
},
function(err,doc) {
if (err) throw err;
console.log(doc);
}
)
So that works perfectly for me. Noting the "new" option there as well so the modified document is returned, rather than the original form of the document which is the default.
{ _id: 55c04b5b52d0ec940694f819,
stories: [ { _id: 55c04b5b52d0ec940694f818, name: 'Bill' } ] }
There is however a catch here, and that is that if you are using $addToSet and generating a new ObjectId for every item, then that new ObjectId makes everything "unique". So you would keep adding things into the "set". This may as well be $push if that is what you want to do.
So if userId and story in combination already make this "unique", then do this way instead:
coll.findAndModify(
{
"_id": docId,
"stories": {
"$not": { "$elemMatch": { "userId": userId, "story": story } }
}
},
{ "$push": {
"stories": {
"userId": userId, "story": story, "_id": new ObjectID()
}
}},
{
"new": true
},
function(err,doc) {
if (err) throw err;
console.log(doc);
}
)
So test for the presence of the unique elements in the array, and where they do not exist then append them to the array. Also noting there that you cannot do an "inequality match" on the array element while mixing with "upserts". Your test to "upsert" the document should be on the primary "_id" value only. Managing array entries and document "upserts" need to be in separate update operations. Do not try an mix the two, otherwise you will end up creating new documents when you did not intend to.
By the way, you can generate an ObjectID just using monk.
var db = monk(credentials.database);
var ObjectID = db.helper.id.ObjectID
console.log(ObjectID()) // generates an ObjectID
I've been building an application in Node.JS using the native MongoDB driver - it includes contacts, and when a user accepts a contact, it should remove from "pending" and "sent" contacts, then add to "contacts".
Example code and documents:
/*
=============================
User "john"
=============================
{
username: "john",
contacts: ["jim"],
pending_contacts: ["bob"]
}
=============================
User "bob"
=============================
{
username: "bob",
contacts: ["dave"],
sent_contacts: ["john"]
}
=============================
What SHOULD happen
=============================
{
username: "bob",
contacts: ["dave", "john"],
sent_contacts: []
},
{
username: "john",
contacts: ["jim", "bob"],
pending_contacts: []
}
=============================
What ACTUALLY happens
=============================
{
username: "john",
contacts: ["jim", "bob"],
pending_contacts: ["bob"]
},
{
username: "bob",
contacts: ["dave", "john"],
sent_contacts: ["john"]
}
*/
var col = this.db.collection('users');
var contact = "bob", username = "john";
var who = [contact, username];
var finishCount = 0;
// finish will run 3 times before callback
function finish(name) {
console.log(name, ' has finished');
finishCount++;
if(finishCount<3) return;
callback(false, null);
}
// run if there's an error
function failed(err) {
callback(err, null)
}
console.log('removing %s and %s from pending and sent', username, contact)
col.update(
{username: { $in: who }},
{
$pullAll: {
sent_contacts: who,
pending_contacts: who
}
}, {multi: 1},
function(err,data) {
if(err) return failed(err);
finish('REMOVE_CONTACTS');
}
);
col.update(
{username: username}, {$addToSet: {contacts: contact}},
function(err,res) {
if(err) return failed(err);
console.log('added 1');
finish('ADD_TO_USER');
}
);
col.update(
{username: contact}, {$addToSet: {contacts: username}},
function(err,res) {
if(err) return failed(err);
console.log('added 2');
finish('ADD_TO_CONTACT');
}
);
The first update removes the contact and the owner from each-others pending/sent list, the second and third update add the owner to the contact's contact list and vice versa.
The issue is, the final result appears to be as if the removal never happened, though the removal query works perfectly fine by itself. I don't know if this is a problem with MongoDB itself (or if it's intended), or if it's an issue with the driver, so I hope someone can at least clarify this for me.
NOTE: Yes I know they run asynchronously. Running them one after the other by putting each update in the previous callback does NOT make a difference. Before anyone complains about how awful this code looks, I previously had it set up within Async.JS but I removed it from this code sample to ensure that Asyn.cJS was not responsible for the issues.
Using the node native driver this works for me every time:
var mongodb = require('mongodb'),
async = require('async'),
MongoClient = mongodb.MongoClient;
var user = "john",
contact = "bob";
var contactsList = [
{
"username": "john",
"contacts": [
"jim"
],
"pending_contacts": [
"bob"
]
},
{
"username": "bob",
"contacts": [
"dave"
],
"sent_contacts": [
"john"
]
}
];
MongoClient.connect('mongodb://localhost/test',function(err,db) {
var coll = db.collection("contacts");
async.series(
[
// Wipe clean
function(callback) {
coll.remove({},callback)
},
// Init collection
function(callback) {
async.each(contactsList,function(contact,callback) {
coll.insert(contact,callback);
},callback);
},
// Do updates
function(callback) {
// Init batch
var bulk = coll.initializeOrderedBulkOp();
// Add to user and pull from pending
bulk.find({
"username": user,
"contacts": { "$ne": contact },
}).updateOne({
"$push": { "contacts": contact },
"$pull": { "pending_contacts": contact }
});
// Add to contact and pull from sent
bulk.find({
"username": contact,
"contacts": { "$ne": user },
"sent_contacts": user
}).updateOne({
"$push": { "contacts": user },
"$pull": { "sent_contacts": user }
});
// Execute
bulk.execute(function(err,response) {
console.log( response.toJSON() );
callback(err);
});
},
// List collection
function(callback) {
coll.find({}).toArray(function(err,results) {
console.log(results);
callback(err);
});
}
],
function(err) {
if (err) throw err;
db.close();
}
);
});
And the output:
{ ok: 1,
writeErrors: [],
writeConcernErrors: [],
insertedIds: [],
nInserted: 0,
nUpserted: 0,
nMatched: 2,
nModified: 2,
nRemoved: 0,
upserted: [] }
[ { _id: 55b0c16934fadce812cdcf9d,
username: 'john',
contacts: [ 'jim', 'bob' ],
pending_contacts: [] },
{ _id: 55b0c16934fadce812cdcf9e,
username: 'bob',
contacts: [ 'dave', 'john' ],
sent_contacts: [] } ]
Improvements here are basically to use the Bulk Operations API and send all updates at once to the server and get a single response. Also note the use of operators in the updates and the query selection as well.
Simply put, you already know the "user" as well as the "contact" they are accepting. The contact to be accepted is "pending" and the contact themselves have the user in "sent".
These are really just simple $push and $pull operations on either array as is appropriate. Rather than using $addToSet here, the query conditions make sure that the expected values are present when performing the update. This also preserves "order" which $addToSet can basically not guarantee, because it's a "set", which is un-ordered.
One send to the server and one callback response, leaving both users updated correctly. Makes more sense then sending multiple updates and waiting for the callback response from each.
Anyhow, this is a complete self contained listing with only the two named dependencies, so you can easily run it yourself and confirm the results.
When I say "Complete and self contained" it means start a new project and simply run the code. Here's the complete instruction:
mkdir sample
cd sample
npm init
npm install mongodb --save
npm install async --save
Then create a file with the code listing in that folder, say test.js and then run:
node test.js