I have a document like below,
{
"id": "7d9fdc2f4846544d62da3421bf011b31",
"al": [
{ "id16": "0x1d42",
"pos": {
"x": 10.32,
"y": 11.13,
"z": 1.22
},
"resultTime": "2020-06-01T20:45:34.976Z"
},
{ "id16": "0x1342",
"pos": {
"x": 0.32,
"y": 1.13,
"z": 13.22
},
"resultTime": "2021-06-01T20:45:34.976Z"
}
.
.
.
],
"Timestamp": 272179,
"Oid": "Onion1",
}
and Design document is like below
{
"id": "_design/GetALwithAnchorID",
"key": "_design/GetALwithAnchorID",
"value": {
"rev": "32-6db6c4e105336d47a6c8e7e8458ee345"
},
"doc": {
"_id": "_design/GetALwithAnchorID",
"_rev": "32-6db6c4e105336d47a6c8e7e8458ee345",
"views": {
"GetALwithAnchorID": {
"map": "function (doc) {\n\n for (var i=0; i<doc.al.length; i++) { \n emit(doc.al[i].id16, doc.al[i].pos);\n }\n \n}\n\n",
"reduce": "_approx_count_distinct"
}
},
"language": "javascript"
}
}
when I query the view like
http://127.0.0.1:5984/rtls/_design/GetALwithAnchorID/_view/GetALwithAnchorID?group_level=1&key=%220x1d42%22
I get the results as below
{"rows":[
{"key":"0x1d42","value":1}
]}
But I want distinct values of id16 and pos of id16. and to sort these distinct values by time and display the values of pos instead of "value":1 when Iquery?
thank you in advance.
OK so not quite the same as this similar answer. Anyone coming across this Q/A, I recommend reading over that answer.
Consider the following emit for your given doc structure:
doc.al.forEach(e => emit(
[e.pos.x, e.pos.y, e.pos.z, e.resultTime], // key
[e.id16, e.pos, e.resultTime]) // value
));
The emit's complex key visualized in the index (loosely not verbatim):
[-3,-2,-1,"2017-10-28T22:56:58.852Z"]
[-3,-2,-1,"2019-01-23T03:33:20.958Z"] **
. . .
[0,0,0,"2016-05-27T01:38:36.305Z"]
[0,0,0,"2016-12-27T05:17:02.255Z"] **
. . .
[1,2,3,"2016-11-14T17:31:59.468Z"]
[1,2,3,"2017-07-17T07:52:38.180Z"] **
Where each ** the last item in the pos group and significantly the most recent resultTime. All due to CouchDB's collation.
Working with CouchDB demands understanding the B-tree, and it's documentation has a great rundown of it in its Reduce/Rereduce documentation.
Now consider this reduce function:
function(keys,values,rereduce) {
return values[0];
}
It doesn't look terribly impressive, but further consider calling the view with these parameters:
{
reduce: true,
group_level: 1,
descending: true
}
By reversing the order of the index scan with descending the reduce function is guaranteed to return the most recent row with respect to resultTime of any given pos group.
Here's a simple demo using pouchDB. It generates 6 documents with random resultTime's and randomly selects pos from a pool of 3. Have a look at the design doc.
async function showReduceDocs(view) {
let result = await db.query(view, {
reduce: true,
group_level: 1,
descending: true
});
// show
debugger;
gel('view_reduce').innerText = result.rows.map(row => `${JSON.stringify(row.value)}`.split(',').join(', ')).join('\n');
return result;
}
async function showViewDocs(view) {
let result = await db.query(view, {
reduce: false,
include_docs: false
});
//show
gel('view_docs').innerText = result.rows.map(row => JSON.stringify(row.key))
.join('\n');
}
function getDocsToInstall(count) {
// design document
const ddoc = {
"_id": "_design/SO-66231293",
"views": {
"id16": {
"map": `function (doc) {
doc.al.forEach((e) => emit([e.pos.x, e.pos.y, e.pos.z, e.resultTime],[e.id16, e.pos, e.resultTime]));
}`,
"reduce": `function(keys,values,rereduce) {
return values[0];
}`
}
}
};
// create a set of random documents.
let docs = new Array(count);
let docId = 65;
const posSeed = [{
x: 0,
y: 0,
z: 0
},
{
x: 1,
y: 2,
z: 3
},
{
x: -3,
y: -2,
z: -1
}
];
const dateSeed = [new Date(2000, 0, 1), new Date(), 0, 24];
while (count--) {
let n = 6;
let doc = {
_id: String.fromCharCode(docId++),
al: new Array(n)
};
while (n-- > 0) {
doc.al[n] = {
"id16": "0x000" + n,
"pos": posSeed[Math.floor(Math.random() * 100) % 3],
"resultTime": randomDate(...dateSeed).toISOString()
};
}
docs[count] = doc;
}
docs.push(ddoc);
return docs;
}
const db = new PouchDB('SO-66231293', {
adapter: 'memory'
});
(async() => {
// install docs and show view in various forms.
await db.bulkDocs(getDocsToInstall(6));
gel('content').classList.remove('hide')
showReduceDocs('SO-66231293/id16');
showViewDocs('SO-66231293/id16');
})();
const gel = id => document.getElementById(id);
/*
https://stackoverflow.com/questions/31378526/generate-random-date-between-two-dates-and-times-in-javascript/31379050#31379050
*/
function randomDate(start, end, startHour, endHour) {
var date = new Date(+start + Math.random() * (end - start));
var hour = startHour + Math.random() * (endHour - startHour) | 0;
date.setHours(hour);
return date;
}
<script src="https://cdn.jsdelivr.net/npm/pouchdb#7.1.1/dist/pouchdb.min.js"></script>
<script src="https://github.com/pouchdb/pouchdb/releases/download/7.1.1/pouchdb.memory.min.js"></script>
<div id='content' class='hide'>
<div>View: reduce</div>
<pre id='view_reduce'></pre>
<hr/>
<div>View: complex key</div>
<pre id='view_docs'></pre>
</div>
Edit
Amended the demo snippet according to OP's comments.
Related
I'm currently trying to get the number of events for one organizer.
This is what my organizer document looks like:
{
"doc_type": "User",
"email": "xxx#gmail.com",
"blebleble: "blebleble",
}
This is what my event document looks like:
{
"doc_type": "Event",
"email": "xxx#gmail.com",
"blablabla: "blablabla",
}
I still couldn't figure out how to do some kind of jointure between both docs and do a count on the number of event that shares the same. I think I can work around the email that both docs shares but I don't know how I can do that. I'm still having trouble with CouchDB. Doesn't seems like a hard thing to do in SQL, but can't find out for nosql.
Thanks you in advance.
"jointure" is not not a term I've encountered in my field so I am left to guess what is meant is join.
Joins are possible with CouchDB views, but what I read from the requirement in the OP is to get counts of events by email. See CouchDB's Joins With Views documentation. For that, I don't see documents with an ancestral relation rather a one-to-many relation, i.e. user ==> events.
Consider this design document:
{
"_id": "_design/SO-68999682",
"views": {
"user_events": {
"map": `function (doc) {
if(doc.doc_type === 'Event') {
emit(doc.email);
}
}`,
"reduce": '_count'
}
}
The view's map function simply adds doc.email to the 'user_events' index when appropriate. Of particular interest the reduce function specifies the built-in reduce function _count.
Given such a view index one may apply the /db/_design/design-doc/_view/view-name endpoint to, for example,
View all events
{
reduce: false,
include_docs: true
}
Get a count of all events
{
reduce: true
}
Get a count of events for every email (summary)
{
reduce: true,
group_level: 1
}
Get a count of events for a specific email
{
reduce: true,
group_level: 1,
key: email
}
Get all events for a specific email
{
reduce: false,
include_docs: true,
key: email
}
The _count reduce built-in provides high performance. The snippet below demonstrates the above using the very handy and compatible PouchDB.
async function showAllEventDocs() {
let result = await db.query('SO-68999682/user_events', {
reduce: false,
include_docs: true
});
//show
gel('user_events_view').innerText = result.rows.map(row => [row.doc.email, row.doc.date].join('\t\t')).join('\n');
}
async function showEventCountTotal() {
let result = await db.query('SO-68999682/user_events', {
reduce: true
});
gel('event_count_total').innerText = result.rows[0].value;
}
async function showEventCountSummary() {
let result = await db.query('SO-68999682/user_events', {
reduce: true,
group_level: 1
});
//show key/value (email, count)
gel('event_count_summary').innerText = result.rows.map(row => [row.key, row.value].join('\t\t')).join('\n');
}
async function showUserEventCount(email, displayElement) {
let result = await db.query('SO-68999682/user_events', {
reduce: true,
group_level: 1,
key: email
});
//show value (count)
gel(displayElement).innerText = result.rows[0].value;
}
async function showUserEvents(email, displayElement) {
let result = await db.query('SO-68999682/user_events', {
reduce: false,
include_docs: true,
key: email
});
//show
gel(displayElement).innerText = result.rows.map(row => [row.doc.email, row.doc.date].join('\t\t')).join('\n');
}
function getDocsToInstall(count) {
const docs = [{
"doc_type": "User",
"email": "Jerry#gmail.com"
},
{
"doc_type": "User",
"email": "Bobby#gmail.com"
},
{
"doc_type": "Event",
"email": "Jerry#gmail.com",
"date": getDocDate().toISOString().slice(0, 10)
}, {
"doc_type": "Event",
"email": "Jerry#gmail.com",
"date": getDocDate().toISOString().slice(0, 10)
}, {
"doc_type": "Event",
"email": "Jerry#gmail.com",
"date": getDocDate().toISOString().slice(0, 10)
}, {
"doc_type": "Event",
"email": "Bobby#gmail.com",
"date": getDocDate().toISOString().slice(0, 10)
}, {
"doc_type": "Event",
"email": "Bobby#gmail.com",
"date": getDocDate().toISOString().slice(0, 10)
},
];
// design document
const ddoc = {
"_id": "_design/SO-68999682",
"views": {
"user_events": {
"map": `function (doc) {
if(doc.doc_type === 'Event') {
emit(doc.email);
}
}`,
"reduce": '_count'
}
}
};
docs.push(ddoc);
return docs;
}
const db = new PouchDB('SO-68999682', {
adapter: 'memory'
});
// install docs and show view in various forms.
(async() => {
await db.bulkDocs(getDocsToInstall(20));
await showAllEventDocs();
await showEventCountTotal();
await showEventCountSummary();
await showUserEventCount('Jerry#gmail.com', 'jerry_event_count');
await showUserEventCount('Bobby#gmail.com', 'bobby_event_count');
await showUserEvents('Jerry#gmail.com', 'jerry_events');
await showUserEvents('Bobby#gmail.com', 'bobby_events');
})();
const gel = id => document.getElementById(id);
function getDocDate() {
const today = new Date();
const day = Math.random() * 100 % today.getDay() + 1; // keep it basic
return new Date(today.getFullYear(), today.getMonth(), day)
}
.bold {
font-weight: bold
}
.plain {
font-weight: normal
}
<script src="https://cdn.jsdelivr.net/npm/pouchdb#7.1.1/dist/pouchdb.min.js"></script>
<script src="https://github.com/pouchdb/pouchdb/releases/download/7.1.1/pouchdb.memory.min.js"></script>
<pre>All user_events (entire view)</pre>
<pre id='user_events_view'></pre>
<hr/>
<pre>Total number of events: <span id='event_count_total'></span> events</pre>
<hr/>
<pre>Event count summary (user, count)</pre>
<pre id='event_count_summary'></pre>
<hr/>
<pre>Event count by email (specific to user)</pre>
<pre>Bobby#gmail.com has <span id='bobby_event_count'></span> events</pre>
<pre>Jerry#gmail.com has <span id='jerry_event_count'></span> events</pre>
<hr/>
<pre>Events by email</pre>
<pre class="bold">Bobby#gmail.com <pre class="plain" id='bobby_events'></pre></pre>
<pre class="bold">Jerry#gmail.com <pre class="plain" id='jerry_events'></pre></pre>
<hr/>
Notice the demo snippet's documents have a date field. If such a field existed in the OPs Event documents, then changing the emit to
emit(doc.email + '/' + doc.date);
would allow all the aforementioned queries plus the option to query by a date or date range, an exercise which I'll leave readers to explore.
I have to deal with objects of the following type in a NodeJS app (using mongodb driver):
data_test = {
"id": "105-20090412",
"date": new Date('2020-09-04T14:00:00.000Z'),
"station": {
"name": "AQ105",
"loc": {
"type": "Point",
"coordinates": [14.324498, 40.821930]
},
"properties": {}
},
"samples": [{
"t": new Date('2020-09-04T14:14:00.000Z'),
"data": {
//"temp_celsius": 31.81,
//"humRelPercent": 39,
"press_mBar": 1021.12,
"PM10": 200
}
}]
}
I receive every 2 minutes data as above.
I want to:
If the data received has an id not yet present on MongoDB do an insert
If the data received has a sample object with a Date (t property) yet present then add properties to this one (for example readings of different sensors)
If the data received has a sample object with a Date (t property) not yet present in samples array, then add this new one
I would like to do what described above with the minor count possible of round-trips to the MongoDB server.
I hope to have been clear enough.
Any suggestion?
Thanks in advance.
Here's my suggestion, this is not the correct answer. You will need to fiddle with the query portion. The query below should work for 1 & 3, for 2 you will have to play around.
db.collection.updateOne(
{ "id" : "105-20090412", "samples.t": <Date> },
{ $push: { "samples" : <sample> } },
{ $setOnInsert: { station: <station> } },
{ upsert: true }
);
References:
https://docs.mongodb.com/manual/reference/method/db.collection.updateOne/
https://docs.mongodb.com/manual/reference/operator/update/setOnInsert/#up._S_setOnInsert
https://docs.mongodb.com/manual/reference/operator/update/push/
I finally came to the following solution, perhaps not the most efficient one:
try {
const db = client.db(dbName);
const collection = db.collection(collectionName);
// retrive id, station, date and samplesToAdd as separate objects
let {
id,
...dataToInsert
} = data
//id = new ObjectID(id)
const queryBy_id = {
_id: id
}
// first check if doc exists
let res_query = await collection.findOne(queryBy_id)
// if doc does not exists then insert a new one
if (!res_query) {
res_insert = await collection.insertOne({
_id: id,
...dataToInsert
})
return res_insert;
} else {
// retrive samples from initial query
let current_samples = res_query.samples
// check if sample in dataToInsert yet exists
// use getTime to correctly compare dates
let idx = current_samples.findIndex(x => x.t.getTime() == dataToInsert.samples[0].t.getTime())
if (idx >= 0) {
// find index of sample to update
let current_t = current_samples[idx].t
// merge data yet stored with new one
current_samples.data = {
...current_samples[idx].data,
...dataToInsert.samples[0].data
}
let resUpdateSample = await collection.updateOne({
_id: id,
'samples.t': current_t
}, {
$set: {
'samples.$.data': current_samples.data
}
})
return resUpdateSample
} else {
// add data to samples array
let resAddToSamples = await collection.updateOne({
_id: id
}, {
$push: {
samples: dataToInsert.samples[0]
}
})
return resAddToSamples
}
}
} catch (err) {
logger.error(err);
}
How can I improve it?
Thanks.
I want to fetch data from mongodb using mongoose and send it as a response, but I don't get the exact answer, what's my mistake?
My codes are as below:
Firstly My model file:
* I'm inserting data in bulk with create()
const express = require('express');
const mongoose= require('mongoose');
const Schema = mongoose.Schema;
const ourDataSchema = new Schema ({
rank : Number,
totalPoints : Number
});
const rankTotalpoint = mongoose.model("rankTotalpoint", ourDataSchema);
const ourData = [
{rank : 1, totalPoints : 2000},
{rank : 2, totalPoints : 1980},
{rank: 3, totalPoints : 1940},
{rank: 4, totalPoints : 1890},
{rank : 5, totalPoints : 1830},
{rank : 6, totalPoints : 1765},
{rank : 7, totalPoints : 1600},
{rank : 8, totalPoints : 1565},
{rank : 9, totalPoints : 1465},
{rank : 10, totalPoints : 1450}
];
rankTotalpoint.create(ourData, function (error, data) {
if (error) {
console.log(error)
}
else {
console.log('saved!');
}
});
exports.result = function (param) {
const finalresult = rankTotalpoint.aggregate([
{
$project: {
diff: {
$abs: {
$subtract: [
param, // <<<----------------------- THIS IS THE USER SUPPLIED VALUE
"$totalPoints"
]
}
},
doc: "$$ROOT"
}
},
{
$sort: {
diff: 1
}
},
{
$limit: 1
},
{
$project: {
_id: 0,
rank: "$doc.rank"
}
}
])
return finalresult;
};
And my controller file codes where I imported my above(result) function to it :
const express = require('express');
const model = require('../model/logic');
exports.index = (req, res, next) => {
res.status(200).json({message : 'INSERT INPUTS HERE'});
};
exports.getUserData = (req, res, next) => {
const literature = req.body.literature * 4;
const arabic = req.body.arabic * 2;
const religion = req.body.religion * 3;
const english = req.body.english * 2;
const math = req.body.math * 4;
const physics = req.body.physics * 3;
const chemistry = req.body.chemistry *2;
//user supplied value
const TOTALPOINT = literature + arabic + religion + english + math + physics + chemistry;
let result = model.result(TOTALPOINT);
res.status(200).json(result);
};
And finally that's the response I get with postman :
{
"_pipeline": [
{
"$project": {
"diff": {
"$abs": {
"$subtract": [
0,
"$totalPoints"
]
}
},
"doc": "$$ROOT"
}
},
{
"$sort": {
"diff": 1
}
},
{
"$limit": 1
},
{
"$project": {
"_id": 0,
"rank": "$doc.rank"
}
}
],
"options": {}
}
What I want to get?
I want to get a rank based on the user input(TOTALPOINT) that I'm getting, So instead of sending the above response, I just want to send back the rank to the user.
If the user value matches a totalpoints, send it's rank as a response and if the exact value doesn't exist, find the closest totalPoints and send the rank as response.
Like this:
[
{
"rank": 5
}
]
Thank you
Your issue is because Mongoose is promise/async based. You are not awaiting anything, so your code returns a variable that has not been set yet by your query..
I was testing using 2 files: myMongoose.js and index.js..
// myMongoose.js
// ** CODE THAT SAVES DATA TO DATABASE HAS BEEN REMOVED FOR BREVITY **
require('dotenv').config();
const mongoose = require('mongoose');
const RankTotalpointSchema = new mongoose.Schema({
rank: Number,
totalPoints: Number
});
mongoose.set('useCreateIndex', true);
const mongoConnection = mongoose.createConnection(process.env.MONGO_DB_STRING, {
useUnifiedTopology: true,
useNewUrlParser: true,
useFindAndModify: false,
});
const RankTotalpoint = mongoConnection.model("RankTotalpoint", RankTotalpointSchema, 'Testing');
/**
* ~~~~~~ **** THIS HAS TO BE AN ASYNC FUNCTION **** ~~~~~~
*/
exports.result = async function (param) {
const finalresult = await RankTotalpoint.aggregate([{
$project: {
diff: {
$abs: {
$subtract: [
param, // <<<----------------------- THIS IS THE USER SUPPLIED VALUE
"$totalPoints"
]
}
},
doc: "$$ROOT"
}
},
{
$sort: {
diff: 1
}
},
{
$limit: 1
},
{
$project: {
_id: 0,
rank: "$doc.rank"
}
}
])
return finalresult;
};
...and then in index.js:
// index.js
const { result } = require('./myMongoose');
// Use it like this:
async function init() {
try {
const d = await result(1800);
console.log(d);
} catch (err) {
console.error(err);
}
}
init(); // -> [ { rank: 5 } ]
// --------------------------------------------------------------------
// ...or like this:
(async () => {
try {
const d = await result(1800);
console.log(d); // -> [ { rank: 5 } ]
} catch (err) {
console.error(err);
}
})()
// --------------------------------------------------------------------
// ...or like this:
result(1800)
.then(d => console.log(d)) // -> [ { rank: 5 } ]
.catch(err => console.error(err))
How to update the multiple documents in MongoDB and set the value of the element in an increasing order?
I have got the document as follows
{
"_id" : ObjectId("5b162a31dfaf342dc44c920d")
}
{
"_id" : ObjectId("5b162a31dfaf342dc44c920f")
}
{
"_id" : ObjectId("5b162a31dfaf342dc44c920c")
}
How can I update the whole documents with a single query so that I can have a new element called "order" in every single field in an increasing order as below
{
"_id" : ObjectId("5b162a31dfaf342dc44c920d"),
"order": 1
}
{
"_id" : ObjectId("5b162a31dfaf342dc44c920f"),
"order": 2
}
{
"_id" : ObjectId("5b162a31dfaf342dc44c920c"),
"order": 3
}
Currently I am using the following way to solve the problem
for(let i = 0; i <= req.body.id.length;i++) {
const queryOpts = {
_id: ObjectId(req.body.id[i])
};
const updateOpts = {
$set: {
'order': i + 1
}
};
const dataRes = await req.db.collection('GalleryImage').updateOne(queryOpts, updateOpts);
if(i === req.body.id.length-1) {
return commonHelper.sendResponseMessage(res, dataRes, {
_id: req.body.id
}, moduleConfig.message.updateGalleryOrder);
}
If there any better way than this so that it would not be the expensive operation if there are large number of documents ?
Use bulkWrite() with Array.map() to construct the statement:
try {
let response = await req.db.collection('GalleryImage').bulkWrite(
req.body.id.map((_id,order) =>
({ updateOne: {
filter: { _id: ObjectId(_id) },
update: {
$set: { order: order+1 }
}
}})
)
);
} catch(e) {
// deal with any errors
}
Array.map() has the "index" of the array element being processed within it's second function argument. So simply use that to get the order and set that on all statements.
Rather than writing/responding with the database n times, this only needs happen "once".
There is no other way to get a "sequence" other than introducing it yourself, but at least we can do it with "one" write this way instead of several. Note also to "trap your possible errors" when using async/await syntax.
Example listing
const { MongoClient, ObjectID: ObjectId } = require('mongodb');
const uri = 'mongodb://localhost:27017';
const data = [
"5b162a31dfaf342dc44c920d",
"5b162a31dfaf342dc44c920f",
"5b162a31dfaf342dc44c920c"
];
const log = data => console.log(JSON.stringify(data, undefined, 2));
(async function() {
try {
const client = await MongoClient.connect(uri);
let db = client.db('test');
// Set up
await db.collection('gallery').removeMany({});
await db.collection('gallery').insertMany(
data.map(_id => ({ _id: ObjectId(_id) }))
);
// Update with indexes
let response = await db.collection('gallery').bulkWrite(
data.map((_id,idx) =>
({
updateOne: {
filter: { _id: ObjectId(_id) },
update: { $set: { order: idx+1 } }
}
})
)
);
log({ response });
let items = await db.collection('gallery').find().toArray();
log({ items });
client.close();
} catch(e) {
console.error(e)
} finally {
process.exit()
}
})()
And the output
{
"response": {
"ok": 1,
"writeErrors": [],
"writeConcernErrors": [],
"insertedIds": [],
"nInserted": 0,
"nUpserted": 0,
"nMatched": 3,
"nModified": 3,
"nRemoved": 0,
"upserted": [],
"lastOp": {
"ts": "6563535160225038345",
"t": 18
}
}
}
{
"items": [
{
"_id": "5b162a31dfaf342dc44c920d",
"order": 1
},
{
"_id": "5b162a31dfaf342dc44c920f",
"order": 2
},
{
"_id": "5b162a31dfaf342dc44c920c",
"order": 3
}
]
}
Clearly shows nMatched: 3 and nModified: 3 just as is expected.
I have array 'pets': [{'fido': ['abc']} that is a embeded document. When I add a pet to the array, how can I check to see if that pet already exists? For instance, if I added fido again... how can I check if only fido exists and not add it? I was hoping I could use $addToSet but I only want to check part of the set(the pets name).
User.prototype.updatePetArray = function(userId, petName) {
userId = { _id: ObjectId(userId) };
return this.collection.findOneAndUpdate(userId,
{ $addToSet: { pets: { [petName]: [] } } },
{ returnOriginal: false,
maxTimeMS: QUERY_TIME });
Result of adding fido twice:
{u'lastErrorObject': {u'updatedExisting': True, u'n': 1}, u'ok': 1, u'value': {u'username': u'bob123', u'_id': u'56d5fc8381c9c28b3056f794', u'location': u'AT', u'pets': [{u'fido': []}]}}
{u'lastErrorObject': {u'updatedExisting': True, u'n': 1}, u'ok': 1, u'value': {u'username': u'bob123', u'_id': u'56d5fc8381c9c28b3056f794', u'location': u'AT', u'pets': [{u'fido': [u'abc']}, {u'fido': []}]}}
If there is always going to be "variable" content within each member of the "pets" array ( i.e petName as the key ) then $addToSet is not for you. At least not not at the array level where you are looking to apply it.
Instead you basically need an $exists test on the "key" of the document being contained in the array, then either $addToSet to the "contained" array of that matched key with the positional $ operator, or where the "key" was not matched then $push directly to the "pets" array, with the new inner content directly as the sole array member.
So if you can live with not returning the modified document, then "Bulk" operations are for you. In modern drivers with bulkWrite():
User.prototype.updatePetArray = function(userId, petName, content) {
var filter1 = { "_id": ObjectId(userId) },
filter2 = { "_id": ObjectId(userId) },
update1 = { "$addToSet": {} },
update2 = { "$push": { "pets": {} } };
filter1["pets." + petName] = { "$exists": true };
filter2["pets." + petName] = { "$exists": false };
var setter1 = {};
setter1["pets.$." + petName] = content;
update1["$addToSet"] = setter1;
var setter2 = {};
setter2[petName] = [content];
update2["$push"]["pets"] = setter2;
// Return the promise that yields the BulkWriteResult of both calls
return this.collection.bulkWrite([
{ "updateOne": {
"filter": filter1,
"update": update1
}},
{ "updateOne": {
"filter": filter2,
"update": update2
}}
]);
};
If you must return the modified document, then you are going to need to resolve each call and return the one that actually matched something:
User.prototype.updatePetArray = function(userId, petName, content) {
var filter1 = { "_id": ObjectId(userId) },
filter2 = { "_id": ObjectId(userId) },
update1 = { "$addToSet": {} },
update2 = { "$push": { "pets": {} } };
filter1["pets." + petName] = { "$exists": true };
filter2["pets." + petName] = { "$exists": false };
var setter1 = {};
setter1["pets.$." + petName] = content;
update1["$addToSet"] = setter1;
var setter2 = {};
setter2[petName] = [content];
update2["$push"]["pets"] = setter2;
// Return the promise that returns the result that matched and modified
return new Promise(function(resolve,reject) {
var operations = [
this.collection.findOneAndUpdate(filter1,update1,{ "returnOriginal": false}),
this.collection.findOneAndUpdate(filter2,update2,{ "returnOriginal": false})
];
// Promise.all runs both, and discard the null document
Promise.all(operations).then(function(result) {
resolve(result.filter(function(el) { return el.value != null } )[0].value);
},reject);
});
};
In either case this requires "two" update attempts where only "one" will actually succeed and modify the document, since only one of the $exists tests is going to be true.
So as an example of that first case, the "query" and "update" are resolving after interpolation as:
{
"_id": ObjectId("56d7b759e955e2812c6c8c1b"),
"pets.fido": { "$exists": true }
},
{ "$addToSet": { "pets.$.fido": "ccc" } }
And the second update as:
{
"_id": ObjectId("56d7b759e955e2812c6c8c1b"),
"pets.fido": { "$exists": false }
},
{ "$push": { "pets": { "fido": ["ccc"] } } }
Given varibles of:
userId = "56d7b759e955e2812c6c8c1b",
petName = "fido",
content = "ccc";
Personally I would not be naming keys like this, but rather change the structure to:
{
"_id": ObjectId("56d7b759e955e2812c6c8c1b"),
"pets": [{ "name": "fido", "data": ["abc"] }]
}
That makes the update statements easier, and without the need for variable interpolation into the key names. For example:
{
"_id": ObjectId(userId),
"pets.name": petName
},
{ "$addToSet": { "pets.$.data": content } }
and:
{
"_id": ObjectId(userId),
"pets.name": { "$ne": petName }
},
{ "$push": { "pets": { "name": petName, "data": [content] } } }
Which feels a whole lot cleaner and can actually use an "index" for matching, which of course $exists simply cannot.
There is of course more overhead if using .findOneAndUpdate(), since this is afterall "two" actual calls to the server for which you need to await a response as opposed to the Bulk method which is just "one".
But if you need the returned document ( option is the default in the driver anyway ) then either do that or similarly await the Promise resolve from the .bulkWrite() and then fetch the document via .findOne() after completion. Albeit that doing it via .findOne() after the modification would not truly be "atomic" and could possibly return the document "after" another similar modification was made, and not only in the state of that particular change.
N.B Also assuming that apart from the keys of the subdocuments in "pets" as a "set" that your other intention for the array contained was adding to that "set" as well via the additional content supplied to the function. If you just wanted to overwrite a value, then just apply $set instead of $addToSet and similarly wrap as an array.
But it sounds reasonable that the former was what you were asking.
BTW. Please clean up by horrible setup code in this example for the query and update objects in your actual code :)
As a self contained listing to demonstrate:
var async = require('async'),
mongodb = require('mongodb'),
MongoClient = mongodb.MongoClient;
MongoClient.connect('mongodb://localhost/test',function(err,db) {
var coll = db.collection('pettest');
var petName = "fido",
content = "bbb";
var filter1 = { "_id": 1 },
filter2 = { "_id": 1 },
update1 = { "$addToSet": {} },
update2 = { "$push": { "pets": {} } };
filter1["pets." + petName] = { "$exists": true };
filter2["pets." + petName] = { "$exists": false };
var setter1 = {};
setter1["pets.$." + petName] = content;
update1["$addToSet"] = setter1;
var setter2 = {};
setter2[petName] = [content];
update2["$push"]["pets"] = setter2;
console.log(JSON.stringify(update1,undefined,2));
console.log(JSON.stringify(update2,undefined,2));
function CleanInsert(callback) {
async.series(
[
// Clean data
function(callback) {
coll.deleteMany({},callback);
},
// Insert sample
function(callback) {
coll.insert({ "_id": 1, "pets": [{ "fido": ["abc"] }] },callback);
}
],
callback
);
}
async.series(
[
CleanInsert,
// Modify Bulk
function(callback) {
coll.bulkWrite([
{ "updateOne": {
"filter": filter1,
"update": update1
}},
{ "updateOne": {
"filter": filter2,
"update": update2
}}
]).then(function(res) {
console.log(JSON.stringify(res,undefined,2));
coll.findOne({ "_id": 1 }).then(function(res) {
console.log(JSON.stringify(res,undefined,2));
callback();
});
},callback);
},
CleanInsert,
// Modify Promise all
function(callback) {
var operations = [
coll.findOneAndUpdate(filter1,update1,{ "returnOriginal": false }),
coll.findOneAndUpdate(filter2,update2,{ "returnOriginal": false })
];
Promise.all(operations).then(function(res) {
//console.log(JSON.stringify(res,undefined,2));
console.log(
JSON.stringify(
res.filter(function(el) { return el.value != null })[0].value
)
);
callback();
},callback);
}
],
function(err) {
if (err) throw err;
db.close();
}
);
});
And the output:
{
"$addToSet": {
"pets.$.fido": "bbb"
}
}
{
"$push": {
"pets": {
"fido": [
"bbb"
]
}
}
}
{
"ok": 1,
"writeErrors": [],
"writeConcernErrors": [],
"insertedIds": [],
"nInserted": 0,
"nUpserted": 0,
"nMatched": 1,
"nModified": 1,
"nRemoved": 0,
"upserted": []
}
{
"_id": 1,
"pets": [
{
"fido": [
"abc",
"bbb"
]
}
]
}
{"_id":1,"pets":[{"fido":["abc","bbb"]}]}
Feel free to change to different values to see how different "sets" are applied.
Please try this one with string template, here is one example running under mongo shell
> var name = 'fido';
> var t = `pets.${name}`; \\ string temple, could parse name variable
> db.pets.find()
{ "_id" : ObjectId("56d7b5019ed174b9eae2b9c5"), "pets" : [ { "fido" : [ "abc" ]} ] }
With the following update command, it will not update it if the same pet name exists.
> db.pets.update({[t]: {$exists: false}}, {$addToSet: {pets: {[name]: []}}})
WriteResult({ "nMatched" : 0, "nUpserted" : 0, "nModified" : 0 })
If the pets document is
> db.pets.find()
{ "_id" : ObjectId("56d7b7149ed174b9eae2b9c6"), "pets" : [ { "fi" : [ "abc" ] } ] }
After update with
> db.pets.update({[t]: {$exists: false}}, {$addToSet: {pets: {[name]: []}}})
WriteResult({ "nMatched" : 1, "nUpserted" : 0, "nModified" : 1 })
The result shows add the pet name if it does Not exist.
> db.pets.find()
{ "_id" : ObjectId("56d7b7149ed174b9eae2b9c6"), "pets" : [ { "fi" : [ "abc" ] }, { "fido" : [ ] } ] }