Nodejs - sort an array of object with another array - node.js

Is it possible to sort or re-arrange the json object with another array set
Here is the JSON object with url key
var items-array = [
{
"_source":{
"Edition":{
"Values":{
"title":"new featured parallex",
"indexContent":" new "
}
},
"url":"/demo-inline-component-capability/demo-of-featured-parallex"
},
"isPromoted":true
},
{
"_source":{
"Edition":{
"Values":{
"title":"demo of event careers",
"description":"careers",
"indexContent":" careers demo urrent"
}
},
"url":"/demo-inline-component-capability/demo-of-event-card"
},
"isPromoted":true
},
{
"_source":{
"Edition":{
"Values":{
"title":"reference event cards",
"indexContent":" reference event cards <cmp id=\"jvcbt0if\" class=\"cmp\" contenteditable=\"false\"> orange"
}
},
"url":"/demo-inline-component-capability/demo-of-ref-event-card"
},
"isPromoted":true
},
{
"_source":{
"Edition":{
"Values":{
"title":"demo of video playlists",
"indexContent":" demo "
}
},
"url":"/demo-inline-component-capability/demo-of-video-playlist"
},
"isPromoted":true
},
{
"_source":{
"Edition":{
"Values":{
"title":"demo of data point set careers",
"description":"careers",
"indexContent":" careers demo of data point set red"
}
},
"url":"/demo-inline-component-capability/demo-of-data-point-set"
},
"sort":[
1555320208440
]
},
{
"_source":{
"Edition":{
"Values":{
"title":"Xfinity TV’sa Lunar New Year collection is a celebration of the vast contributions the Asian American community makes to entertainment across comcast careers",
"description":"mobile name",
"indexContent":" mobile name "
}
},
"url":"/lunar-festival"
},
"sort":[
1551093922066
]
}
]
To match with sorting array of url
var sortingArr = ["/demo-inline-component-capability/demo-of-video-playlist","/demo-inline-component-capability/demo-of-featured-parallel","/demo-inline-component-capability/demo-of-ref-event-card","/demo-inline-component-capability/demo-of-event-card"]
Unfortunately, I don’t have any IDs to keep track on. I would need to priority the items-array to match the sortingArr as close as possible.
Any idea how this can be done.

The code below will output a new array newArr with whatever it finds in the order provided. It will not include any items that aren't in the sortingArr.
var newArr = [];
sortingArr.forEach(s => {
var i = itemsArray.find(o => {
return o["_source"]["url"] === s;
})
if(i != null) {
newArr.push(i);
}
});
newArr.forEach(o => {
console.log(o);
});
Also, the sortingArr is malformed. It should be:
var sortingArr = ["/demo-inline-component-capability/demo-of-video-playlist","/demo-inline-component-capability/demo-of-featured-parallel","/demo-inline-component-capability/demo-of-ref-event-card","/demo-inline-component-capability/demo-of-event-card"]

You can implement a custom sort for this
const sorted = items_array.sort((a, b) => {
// take index of URL from sorting array
const ai = sortingArr.findIndex(x => a._source.url === x);
const bi = sortingArr.findIndex(x => b._source.url === x);
// handle edge case where lower URL is sortable and other isn't
if (ai === -1 && bi > ai) return 1;
// handle edge case where higher URL is sortable and other isn't
if (bi === -1 && ai > bi) return -1;
// handle all other scenarios
return ai - bi;
});
Example
var items_array = [
{
"_source":{
"Edition":{
"Values":{
"title":"new featured parallex",
"indexContent":" new "
}
},
"url":"/demo-inline-component-capability/demo-of-featured-parallex"
},
"isPromoted":true
},
{
"_source":{
"Edition":{
"Values":{
"title":"demo of event careers",
"description":"careers",
"indexContent":" careers demo urrent"
}
},
"url":"/demo-inline-component-capability/demo-of-event-card"
},
"isPromoted":true
},
{
"_source":{
"Edition":{
"Values":{
"title":"reference event cards",
"indexContent":" reference event cards <cmp id=\"jvcbt0if\" class=\"cmp\" contenteditable=\"false\"> orange"
}
},
"url":"/demo-inline-component-capability/demo-of-ref-event-card"
},
"isPromoted":true
},
{
"_source":{
"Edition":{
"Values":{
"title":"demo of video playlists",
"indexContent":" demo "
}
},
"url":"/demo-inline-component-capability/demo-of-video-playlist"
},
"isPromoted":true
},
{
"_source":{
"Edition":{
"Values":{
"title":"demo of data point set careers",
"description":"careers",
"indexContent":" careers demo of data point set red"
}
},
"url":"/demo-inline-component-capability/demo-of-data-point-set"
},
"sort":[
1555320208440
]
},
{
"_source":{
"Edition":{
"Values":{
"title":"Xfinity TV’sa Lunar New Year collection is a celebration of the vast contributions the Asian American community makes to entertainment across comcast careers",
"description":"mobile name",
"indexContent":" mobile name "
}
},
"url":"/lunar-festival"
},
"sort":[
1551093922066
]
}
]
var sortingArr = [
"/demo-inline-component-capability/demo-of-video-playlist",
"/demo-inline-component-capability/demo-of-featured-parallel",
"/demo-inline-component-capability/demo-of-ref-event-card",
"/demo-inline-component-capability/demo-of-event-card"
];
console.log('BEFORE');
for (var x of items_array) {
console.log(x._source.url);
}
const sorted = items_array.sort((a, b) => {
// take index of URL from sorting array
const ai = sortingArr.findIndex(x => a._source.url === x);
const bi = sortingArr.findIndex(x => b._source.url === x);
// handle edge case where lower URL is sortable and other isn't
if (ai === -1 && bi > ai) return 1;
// handle edge case where higher URL is sortable and other isn't
if (bi === -1 && ai > bi) return -1;
// handle all other scenarios
return ai - bi;
});
console.log('AFTER');
for (var y of sorted) {
console.log(y._source.url);
}

Hi I have come with this O(n) solution
you can create an object having keys as the url and value as list of objects corresponding to the URL
Then based on the sorting order URL present in the sortArray you can iterate over the sort array and get the corresponding list of objects in the object and merge them to get the final array
function sortItem(items_array, sortUrl) {
let urlMap = {}
for (item of items_array) {
let url = item._source.url
if (!urlMap.hasOwnProperty(url)) {
urlMap[url] = []
}
urlMap[url].push(item)
}
let finalList =[]
for (url of sortUrl){
if(urlMap.hasOwnProperty(url)){
let itemList = urlMap[url]
finalList = finalList.concat(itemList)
}
}
return finalList
}
sortUrl is the list of urls to sort the items_array
var sortingArr = ["/demo-inline-component-capability/demo-of-video-playlist","/demo-inline-component-capability/demo-of-featured-parallel","/demo-inline-component-capability/demo-of-ref-event-card","/demo-inline-component-capability/demo-of-event-card"]

Related

CouchDB display distinct values

I have a document like below,
{
"id": "7d9fdc2f4846544d62da3421bf011b31",
"al": [
{ "id16": "0x1d42",
"pos": {
"x": 10.32,
"y": 11.13,
"z": 1.22
},
"resultTime": "2020-06-01T20:45:34.976Z"
},
{ "id16": "0x1342",
"pos": {
"x": 0.32,
"y": 1.13,
"z": 13.22
},
"resultTime": "2021-06-01T20:45:34.976Z"
}
.
.
.
],
"Timestamp": 272179,
"Oid": "Onion1",
}
and Design document is like below
{
"id": "_design/GetALwithAnchorID",
"key": "_design/GetALwithAnchorID",
"value": {
"rev": "32-6db6c4e105336d47a6c8e7e8458ee345"
},
"doc": {
"_id": "_design/GetALwithAnchorID",
"_rev": "32-6db6c4e105336d47a6c8e7e8458ee345",
"views": {
"GetALwithAnchorID": {
"map": "function (doc) {\n\n for (var i=0; i<doc.al.length; i++) { \n emit(doc.al[i].id16, doc.al[i].pos);\n }\n \n}\n\n",
"reduce": "_approx_count_distinct"
}
},
"language": "javascript"
}
}
when I query the view like
http://127.0.0.1:5984/rtls/_design/GetALwithAnchorID/_view/GetALwithAnchorID?group_level=1&key=%220x1d42%22
I get the results as below
{"rows":[
{"key":"0x1d42","value":1}
]}
But I want distinct values of id16 and pos of id16. and to sort these distinct values by time and display the values of pos instead of "value":1 when Iquery?
thank you in advance.
OK so not quite the same as this similar answer. Anyone coming across this Q/A, I recommend reading over that answer.
Consider the following emit for your given doc structure:
doc.al.forEach(e => emit(
[e.pos.x, e.pos.y, e.pos.z, e.resultTime], // key
[e.id16, e.pos, e.resultTime]) // value
));
The emit's complex key visualized in the index (loosely not verbatim):
[-3,-2,-1,"2017-10-28T22:56:58.852Z"]
[-3,-2,-1,"2019-01-23T03:33:20.958Z"] **
. . .
[0,0,0,"2016-05-27T01:38:36.305Z"]
[0,0,0,"2016-12-27T05:17:02.255Z"] **
. . .
[1,2,3,"2016-11-14T17:31:59.468Z"]
[1,2,3,"2017-07-17T07:52:38.180Z"] **
Where each ** the last item in the pos group and significantly the most recent resultTime. All due to CouchDB's collation.
Working with CouchDB demands understanding the B-tree, and it's documentation has a great rundown of it in its Reduce/Rereduce documentation.
Now consider this reduce function:
function(keys,values,rereduce) {
return values[0];
}
It doesn't look terribly impressive, but further consider calling the view with these parameters:
{
reduce: true,
group_level: 1,
descending: true
}
By reversing the order of the index scan with descending the reduce function is guaranteed to return the most recent row with respect to resultTime of any given pos group.
Here's a simple demo using pouchDB. It generates 6 documents with random resultTime's and randomly selects pos from a pool of 3. Have a look at the design doc.
async function showReduceDocs(view) {
let result = await db.query(view, {
reduce: true,
group_level: 1,
descending: true
});
// show
debugger;
gel('view_reduce').innerText = result.rows.map(row => `${JSON.stringify(row.value)}`.split(',').join(', ')).join('\n');
return result;
}
async function showViewDocs(view) {
let result = await db.query(view, {
reduce: false,
include_docs: false
});
//show
gel('view_docs').innerText = result.rows.map(row => JSON.stringify(row.key))
.join('\n');
}
function getDocsToInstall(count) {
// design document
const ddoc = {
"_id": "_design/SO-66231293",
"views": {
"id16": {
"map": `function (doc) {
doc.al.forEach((e) => emit([e.pos.x, e.pos.y, e.pos.z, e.resultTime],[e.id16, e.pos, e.resultTime]));
}`,
"reduce": `function(keys,values,rereduce) {
return values[0];
}`
}
}
};
// create a set of random documents.
let docs = new Array(count);
let docId = 65;
const posSeed = [{
x: 0,
y: 0,
z: 0
},
{
x: 1,
y: 2,
z: 3
},
{
x: -3,
y: -2,
z: -1
}
];
const dateSeed = [new Date(2000, 0, 1), new Date(), 0, 24];
while (count--) {
let n = 6;
let doc = {
_id: String.fromCharCode(docId++),
al: new Array(n)
};
while (n-- > 0) {
doc.al[n] = {
"id16": "0x000" + n,
"pos": posSeed[Math.floor(Math.random() * 100) % 3],
"resultTime": randomDate(...dateSeed).toISOString()
};
}
docs[count] = doc;
}
docs.push(ddoc);
return docs;
}
const db = new PouchDB('SO-66231293', {
adapter: 'memory'
});
(async() => {
// install docs and show view in various forms.
await db.bulkDocs(getDocsToInstall(6));
gel('content').classList.remove('hide')
showReduceDocs('SO-66231293/id16');
showViewDocs('SO-66231293/id16');
})();
const gel = id => document.getElementById(id);
/*
https://stackoverflow.com/questions/31378526/generate-random-date-between-two-dates-and-times-in-javascript/31379050#31379050
*/
function randomDate(start, end, startHour, endHour) {
var date = new Date(+start + Math.random() * (end - start));
var hour = startHour + Math.random() * (endHour - startHour) | 0;
date.setHours(hour);
return date;
}
<script src="https://cdn.jsdelivr.net/npm/pouchdb#7.1.1/dist/pouchdb.min.js"></script>
<script src="https://github.com/pouchdb/pouchdb/releases/download/7.1.1/pouchdb.memory.min.js"></script>
<div id='content' class='hide'>
<div>View: reduce</div>
<pre id='view_reduce'></pre>
<hr/>
<div>View: complex key</div>
<pre id='view_docs'></pre>
</div>
Edit
Amended the demo snippet according to OP's comments.

postman POST request is populating the database correctly, but is getting null values when writing to file

I have been having a weird issue with one of my node controllers. For context, on a POST request for this specific controller, I store a an object in a mongo database, and also write the necessary parts of the object to a file. There is a website that already exists, which interfaces directly with the server, but I am writing a REST api for customers who would like a custom interface. (I did not write the node server or the website.)
My issue is that for some reason, the values being written to the file in this case are coming through as Null after a "post" like so:
{"legs":[{"ptu":{"tilt":{},"pan":{}}},{"audio":{"mute":false,"vol":0},"ptu":{"tilt":{"abs":null},"pan":{"abs":null}}},{"audio":{"mute":true,"vol":0},"ptu":{"tilt":{"abs":null},"pan":{"abs":null}}}]}
however, the forms on the website populate correctly, and if I press "save" from the website, the file is correctly updated. i.e.
{"legs":[{"ptu":{"tilt":{"abs":0},"pan":{"abs":0}}},{"audio":{"track":"/home/rahd/ult0316-p002/resources/tracks/Maid with the Flaxen Hair.mp3","vol":0,"mute":false},"ptu":{"tilt":{"abs":10},"pan":{"abs":10}}},{"audio":{"track":null,"vol":0,"mute":true},"ptu":{"tilt":{"abs":10},"pan":{"abs":10}}}]}
here is my postman request which is being sent as raw JSON:
{
"name": "NicksCoolTour3",
"location": "/home/rahd/ult0316-p002/resources/tours/5982374cb492c516c20c40d0.json",
"legs": [
{
"audio": {
"mute": true,
"volPercent": 0,
"vol": -120,
"track": null
},
"ptu": {
"poi": "59823726b492c516c20c40cd",
"tilt": {
"vel": 5,
"rel": 0,
"abs": 0
},
"pan": {
"vel": 5,
"rel": 0,
"abs": 0
},
"direction": "quickest"
},
"time": 0,
"velMode": "time",
"ptuMode": "poi"
},
{
"_id": "5982374cb492c516c20c40d2",
"audio": {
"mute": false,
"volPercent": 100,
"vol": -120,
"track": "5983222d79930a1dbd4d94ac"
},
"ptu": {
"tilt": {
"vel": 5,
"rel": 10,
"abs": 0
},
"pan": {
"vel": 5,
"rel": 10,
"abs": 0
},
"direction": "quickest"
},
"time": 0,
"velMode": "time",
"ptuMode": "rel"
},
{
"_id": "5982374cb492c516c20c40d1",
"audio": {
"mute": true,
"volPercent": 100,
"vol": -120,
"track": "59823711b492c516c20c40cc"
},
"ptu": {
"tilt": {
"vel": 5,
"rel": 0,
"abs": 0
},
"pan": {
"vel": 5,
"rel": 0,
"abs": 0
},
"direction": "quickest"
},
"time": 0,
"velMode": "time",
"ptuMode": "rel"
}
]
}
and here is my POST controller :
router.post('/',function (req, res, next){
var new_tour = new Tour(req.body);
new_tour._id = new mongoose.Types.ObjectId;
new_tour.save( function(err, tour) {
if (err) return next(err);
res.json({ message: "tours database sucessfully updated" });
});
});
I am not sure what could be causing this, it seems that the database is getting the correct values, but the function that writes a request to the file is not behaving properly.
here is the schema which handles the file writing:
var mongoose = require("mongoose")
, fs = require('fs')
, path = require('path')
, resources = require(path.join(__dirname, '..', '..', 'config', 'resources'));
var schema = new mongoose.Schema({
name: { type: String, default: '', unique: true, required: true },
location: { type: String },
legs: [{
ptuMode: { type: String, default: 'abs' }, // abs || rel || poi
velMode: { type: String, default: 'vel' }, // vel || time
time: { type: Number, default: 0 }, // vel || time
ptu: {
direction: { type: String, default: 'cw' }, // cw || ccw
pan: {
rel: { type: Number },
abs: { type: Number },
vel: { type: Number },
},
tilt: {
rel: { type: Number },
abs: { type: Number },
vel: { type: Number },
},
poi: {
type: mongoose.Schema.Types.ObjectId,
ref: 'POI'
},
},
audio: {
mute: { type: Boolean },
vol: { type: Number },
volPercent: { type: Number },
track: {
type: mongoose.Schema.Types.ObjectId,
ref: 'Track'
},
}
}]
},
{
timestamps: true
});
schema.pre('save', function (next) {
var tour = this;
if (!tour.location || tour.location.length < 1) {
tour.location = path.join(resources.tours, tour._id + '.json');
}
tour.writeToFile(function (err) {
console.error(err);
});
next();
});
schema.post('remove', function (tour) {
if (tour.location && tour.location.length > 0) {
fs.exists(tour.location, function (exists) {
if (exists) {
fs.unlink(tour.location);
console.log('Deleted Tour: ' + tour.location);
} else {
console.log('Tour not found, so not deleting file.');
}
});
}
});
schema.methods.writeToFile = function (callback) {
function saveToFile(tour, callback) {
var filePath = tour.location;
var name = tour.name;
delete tour.location;
delete tour.name;
delete tour.createdAt;
delete tour.updatedAt;
delete tour._id;
delete tour.__v;
for (li in tour.legs) {
var leg = tour.legs[li];
var index = li;
if (typeof index === 'string') {
index = parseInt(li);
}
delete leg._id;
// Delete rel, force abs
delete leg.ptu.tilt.rel;
delete leg.ptu.pan.rel;
if (leg.audio.hasOwnProperty("volPercent")) {
var x = leg.audio.volPercent;
delete leg.audio.volPercent;
var n = -120;
if (x > 0) {
var val = Math.pow((x / 100), 4);
n = Math.max(20 * Math.log10(val), -120)
}
leg.audio.vol = n;
}
if (index == 0) {
delete leg.ptu.pan.vel;
delete leg.ptu.tilt.vel;
} else {
if (leg.ptu.pan.vel == 0) {
leg.ptu.pan.vel = 50;
}
if (leg.ptu.tilt.vel == 0) {
leg.ptu.tilt.vel = 50;
}
if (leg.ptu.direction === 'ccw') {
leg.ptu.pan.vel = -(Math.abs(leg.ptu.pan.vel));
}
}
if (leg.ptu.direction === 'quickest') {
delete leg.ptu.tilt.vel;
delete leg.ptu.pan.vel;
}
if (typeof (leg.audio.track) === 'object' && leg.audio.track !== null) {
leg.audio.track = leg.audio.track.location;
}
// Handle Delay
if (leg.ptuMode == 'delay') {
delete leg.ptu.pan;
delete leg.ptu.tilt;
} else {
delete leg.ptu.delay;
}
delete leg.ptu.poi;
delete leg.time;
delete leg.ptu.direction;
delete leg.ptuMode;
delete leg.velMode;
if (index == 0) {
delete leg.audio;
}
}
if (filePath && filePath.length > 0) {
fs.writeFile(filePath, JSON.stringify(tour), function (err) {
if (err) {
if (callback) callback(err);
return console.error(err);
}
console.log("Tour Written: " + name);
});
} else {
console.error("Tour location empty: " + name);
}
}
var tour = this.prepareExport();
saveToFile(tour, callback);
};
schema.methods.prepareExport = function () {
// TODO: Ensure Track and POI are loaded
var tour = this.toObject();
var prevLeg = false;
// Calculate proper abs positions before prepare for export
for (li in tour.legs) {
var leg = tour.legs[li];
if (leg.ptuMode == 'poi') {
leg.ptu.pan.abs = leg.ptu.poi.pan;
leg.ptu.tilt.abs = leg.ptu.poi.tilt;
} else if (leg.ptuMode == 'rel' && prevLeg) {
leg.ptu.pan.abs = prevLeg.ptu.pan.abs + leg.ptu.pan.rel;
leg.ptu.tilt.abs = prevLeg.ptu.tilt.abs + leg.ptu.tilt.rel;
}
if (leg.ptuMode !== 'delay') {
prevLeg = leg;
}
}
// Calulcate degrees per second for each leg
prevLeg = false;
for (li in tour.legs) {
var leg = tour.legs[li];
if (prevLeg && leg.velMode == 'time') {
var time = Math.abs(leg.time) || 0;
if (time > 0) {
if (leg.ptuMode == 'delay') {
leg.ptu.delay = time;
} else if (leg.ptuMode == 'rel') {
leg.ptu.pan.vel = leg.ptu.pan.rel / time;
leg.ptu.tilt.vel = leg.ptu.tilt.rel / time;
} else {
leg.ptu.pan.vel = (leg.ptu.pan.abs - prevLeg.ptu.pan.abs) / time;
leg.ptu.tilt.vel = (leg.ptu.tilt.abs - prevLeg.ptu.tilt.abs) / time;
}
} else {
leg.ptu.pan.vel = 0;
leg.ptu.tilt.vel = 0;
}
leg.ptu.pan.vel = Math.abs(leg.ptu.pan.vel);
leg.ptu.tilt.vel = Math.abs(leg.ptu.tilt.vel);
if (leg.ptu.direction === 'ccw') {
leg.ptu.pan.vel = -leg.ptu.pan.vel;
}
// Vel bounds
if (leg.ptu.pan.vel > 50) {
leg.ptu.pan.vel = 50;
} else if (leg.ptu.pan.vel < 5 && leg.ptu.pan.vel > 0) {
leg.ptu.pan.vel = 5;
}
if (leg.ptu.tilt.vel > 50) {
leg.ptu.tilt.vel = 50;
} else if (leg.ptu.tilt.vel < 5 && leg.ptu.tilt.vel > 0) {
leg.ptu.tilt.vel = 5;
}
// Quickest was using 50ยบ/s, but should be omitted
if (leg.ptu.direction === 'quickest' && time === 0) {
delete leg.ptu.pan.vel;
delete leg.ptu.tilt.vel;
}
// Remove invalid tilt velocity when tilt diff is 0 so vector vel on control server is calculated correctly
if (prevLeg.ptu.tilt.abs - leg.ptu.tilt.abs == 0) {
delete leg.ptu.tilt.vel;
}
}
prevLeg = leg;
}
return tour;
};
/* bounds logic
if (leg.ptu.pan.abs > 180) {
leg.ptu.pan.abs = leg.ptu.pan.abs - 360;
} else if (leg.ptu.pan.abs < -180) {
leg.ptu.pan.abs = leg.ptu.pan.abs + 360;
}
if (leg.ptu.tilt.abs > 90) {
leg.ptu.tilt.abs = 90;
} else if (leg.ptu.tilt.abs < -90) {
leg.ptu.tilt.abs = -90;
}
*/
var Tour = mongoose.model("Tour", schema);
module.exports = Tour;
There's error on your async functions :
schema.pre('save', function (next), next must me in the callback function
tour.writeToFile(function (err) {
console.error(err);
next();
});
schema.methods.writeToFile = function (callback) : you must return callback on success to and not only on error
if (filePath && filePath.length > 0) {
fs.writeFile(filePath, JSON.stringify(tour), function (err) {
if (err) {
if (callback) callback(err);
return console.error(err);
}
else
{
console.log("Tour Written: " + name);
if (callback) callback(err);
}
});
} else {
console.error("Tour location empty: " + name);
if (callback) callback(err);
}

$addToSet Based on Object key exists

I have array 'pets': [{'fido': ['abc']} that is a embeded document. When I add a pet to the array, how can I check to see if that pet already exists? For instance, if I added fido again... how can I check if only fido exists and not add it? I was hoping I could use $addToSet but I only want to check part of the set(the pets name).
User.prototype.updatePetArray = function(userId, petName) {
userId = { _id: ObjectId(userId) };
return this.collection.findOneAndUpdate(userId,
{ $addToSet: { pets: { [petName]: [] } } },
{ returnOriginal: false,
maxTimeMS: QUERY_TIME });
Result of adding fido twice:
{u'lastErrorObject': {u'updatedExisting': True, u'n': 1}, u'ok': 1, u'value': {u'username': u'bob123', u'_id': u'56d5fc8381c9c28b3056f794', u'location': u'AT', u'pets': [{u'fido': []}]}}
{u'lastErrorObject': {u'updatedExisting': True, u'n': 1}, u'ok': 1, u'value': {u'username': u'bob123', u'_id': u'56d5fc8381c9c28b3056f794', u'location': u'AT', u'pets': [{u'fido': [u'abc']}, {u'fido': []}]}}
If there is always going to be "variable" content within each member of the "pets" array ( i.e petName as the key ) then $addToSet is not for you. At least not not at the array level where you are looking to apply it.
Instead you basically need an $exists test on the "key" of the document being contained in the array, then either $addToSet to the "contained" array of that matched key with the positional $ operator, or where the "key" was not matched then $push directly to the "pets" array, with the new inner content directly as the sole array member.
So if you can live with not returning the modified document, then "Bulk" operations are for you. In modern drivers with bulkWrite():
User.prototype.updatePetArray = function(userId, petName, content) {
var filter1 = { "_id": ObjectId(userId) },
filter2 = { "_id": ObjectId(userId) },
update1 = { "$addToSet": {} },
update2 = { "$push": { "pets": {} } };
filter1["pets." + petName] = { "$exists": true };
filter2["pets." + petName] = { "$exists": false };
var setter1 = {};
setter1["pets.$." + petName] = content;
update1["$addToSet"] = setter1;
var setter2 = {};
setter2[petName] = [content];
update2["$push"]["pets"] = setter2;
// Return the promise that yields the BulkWriteResult of both calls
return this.collection.bulkWrite([
{ "updateOne": {
"filter": filter1,
"update": update1
}},
{ "updateOne": {
"filter": filter2,
"update": update2
}}
]);
};
If you must return the modified document, then you are going to need to resolve each call and return the one that actually matched something:
User.prototype.updatePetArray = function(userId, petName, content) {
var filter1 = { "_id": ObjectId(userId) },
filter2 = { "_id": ObjectId(userId) },
update1 = { "$addToSet": {} },
update2 = { "$push": { "pets": {} } };
filter1["pets." + petName] = { "$exists": true };
filter2["pets." + petName] = { "$exists": false };
var setter1 = {};
setter1["pets.$." + petName] = content;
update1["$addToSet"] = setter1;
var setter2 = {};
setter2[petName] = [content];
update2["$push"]["pets"] = setter2;
// Return the promise that returns the result that matched and modified
return new Promise(function(resolve,reject) {
var operations = [
this.collection.findOneAndUpdate(filter1,update1,{ "returnOriginal": false}),
this.collection.findOneAndUpdate(filter2,update2,{ "returnOriginal": false})
];
// Promise.all runs both, and discard the null document
Promise.all(operations).then(function(result) {
resolve(result.filter(function(el) { return el.value != null } )[0].value);
},reject);
});
};
In either case this requires "two" update attempts where only "one" will actually succeed and modify the document, since only one of the $exists tests is going to be true.
So as an example of that first case, the "query" and "update" are resolving after interpolation as:
{
"_id": ObjectId("56d7b759e955e2812c6c8c1b"),
"pets.fido": { "$exists": true }
},
{ "$addToSet": { "pets.$.fido": "ccc" } }
And the second update as:
{
"_id": ObjectId("56d7b759e955e2812c6c8c1b"),
"pets.fido": { "$exists": false }
},
{ "$push": { "pets": { "fido": ["ccc"] } } }
Given varibles of:
userId = "56d7b759e955e2812c6c8c1b",
petName = "fido",
content = "ccc";
Personally I would not be naming keys like this, but rather change the structure to:
{
"_id": ObjectId("56d7b759e955e2812c6c8c1b"),
"pets": [{ "name": "fido", "data": ["abc"] }]
}
That makes the update statements easier, and without the need for variable interpolation into the key names. For example:
{
"_id": ObjectId(userId),
"pets.name": petName
},
{ "$addToSet": { "pets.$.data": content } }
and:
{
"_id": ObjectId(userId),
"pets.name": { "$ne": petName }
},
{ "$push": { "pets": { "name": petName, "data": [content] } } }
Which feels a whole lot cleaner and can actually use an "index" for matching, which of course $exists simply cannot.
There is of course more overhead if using .findOneAndUpdate(), since this is afterall "two" actual calls to the server for which you need to await a response as opposed to the Bulk method which is just "one".
But if you need the returned document ( option is the default in the driver anyway ) then either do that or similarly await the Promise resolve from the .bulkWrite() and then fetch the document via .findOne() after completion. Albeit that doing it via .findOne() after the modification would not truly be "atomic" and could possibly return the document "after" another similar modification was made, and not only in the state of that particular change.
N.B Also assuming that apart from the keys of the subdocuments in "pets" as a "set" that your other intention for the array contained was adding to that "set" as well via the additional content supplied to the function. If you just wanted to overwrite a value, then just apply $set instead of $addToSet and similarly wrap as an array.
But it sounds reasonable that the former was what you were asking.
BTW. Please clean up by horrible setup code in this example for the query and update objects in your actual code :)
As a self contained listing to demonstrate:
var async = require('async'),
mongodb = require('mongodb'),
MongoClient = mongodb.MongoClient;
MongoClient.connect('mongodb://localhost/test',function(err,db) {
var coll = db.collection('pettest');
var petName = "fido",
content = "bbb";
var filter1 = { "_id": 1 },
filter2 = { "_id": 1 },
update1 = { "$addToSet": {} },
update2 = { "$push": { "pets": {} } };
filter1["pets." + petName] = { "$exists": true };
filter2["pets." + petName] = { "$exists": false };
var setter1 = {};
setter1["pets.$." + petName] = content;
update1["$addToSet"] = setter1;
var setter2 = {};
setter2[petName] = [content];
update2["$push"]["pets"] = setter2;
console.log(JSON.stringify(update1,undefined,2));
console.log(JSON.stringify(update2,undefined,2));
function CleanInsert(callback) {
async.series(
[
// Clean data
function(callback) {
coll.deleteMany({},callback);
},
// Insert sample
function(callback) {
coll.insert({ "_id": 1, "pets": [{ "fido": ["abc"] }] },callback);
}
],
callback
);
}
async.series(
[
CleanInsert,
// Modify Bulk
function(callback) {
coll.bulkWrite([
{ "updateOne": {
"filter": filter1,
"update": update1
}},
{ "updateOne": {
"filter": filter2,
"update": update2
}}
]).then(function(res) {
console.log(JSON.stringify(res,undefined,2));
coll.findOne({ "_id": 1 }).then(function(res) {
console.log(JSON.stringify(res,undefined,2));
callback();
});
},callback);
},
CleanInsert,
// Modify Promise all
function(callback) {
var operations = [
coll.findOneAndUpdate(filter1,update1,{ "returnOriginal": false }),
coll.findOneAndUpdate(filter2,update2,{ "returnOriginal": false })
];
Promise.all(operations).then(function(res) {
//console.log(JSON.stringify(res,undefined,2));
console.log(
JSON.stringify(
res.filter(function(el) { return el.value != null })[0].value
)
);
callback();
},callback);
}
],
function(err) {
if (err) throw err;
db.close();
}
);
});
And the output:
{
"$addToSet": {
"pets.$.fido": "bbb"
}
}
{
"$push": {
"pets": {
"fido": [
"bbb"
]
}
}
}
{
"ok": 1,
"writeErrors": [],
"writeConcernErrors": [],
"insertedIds": [],
"nInserted": 0,
"nUpserted": 0,
"nMatched": 1,
"nModified": 1,
"nRemoved": 0,
"upserted": []
}
{
"_id": 1,
"pets": [
{
"fido": [
"abc",
"bbb"
]
}
]
}
{"_id":1,"pets":[{"fido":["abc","bbb"]}]}
Feel free to change to different values to see how different "sets" are applied.
Please try this one with string template, here is one example running under mongo shell
> var name = 'fido';
> var t = `pets.${name}`; \\ string temple, could parse name variable
> db.pets.find()
{ "_id" : ObjectId("56d7b5019ed174b9eae2b9c5"), "pets" : [ { "fido" : [ "abc" ]} ] }
With the following update command, it will not update it if the same pet name exists.
> db.pets.update({[t]: {$exists: false}}, {$addToSet: {pets: {[name]: []}}})
WriteResult({ "nMatched" : 0, "nUpserted" : 0, "nModified" : 0 })
If the pets document is
> db.pets.find()
{ "_id" : ObjectId("56d7b7149ed174b9eae2b9c6"), "pets" : [ { "fi" : [ "abc" ] } ] }
After update with
> db.pets.update({[t]: {$exists: false}}, {$addToSet: {pets: {[name]: []}}})
WriteResult({ "nMatched" : 1, "nUpserted" : 0, "nModified" : 1 })
The result shows add the pet name if it does Not exist.
> db.pets.find()
{ "_id" : ObjectId("56d7b7149ed174b9eae2b9c6"), "pets" : [ { "fi" : [ "abc" ] }, { "fido" : [ ] } ] }

formatting pre-aggregated data

I'm trying to prepare a pre-aggregated data set from a log file for later analysis
for example, I have a log file such as this
2016-01-01 11:13:06 -0900 alphabetical|a
2016-01-01 11:20:16 -0900 alphabetical|a
2016-01-01 11:21:52 -0900 alphabetical|b
The data (after data/time/timezone) is split on a pipe
entry|detail
I'm creating a data set that has a separate document for each year-month and entry
my data as a result looks like this : https://jsonblob.com/56a7d7d8e4b01190df4b8a55
{
"action":"alphabetical",
"date":"2016-0",
"detail":{
"a":{
"daily":{
"1":5,
"2":4,
"3":5
},
"monthly":14
},
"b":{
"daily":{
"1":5,
"2":5,
"3":2
},
"monthly":12
},
"c":{
"daily":{
"1":2,
"2":2,
"3":2
},
"monthly":6
},
"d":{
"daily":{
"3":1
},
"monthly":1
}
},
"monthly":33,
"daily":{
"1":12,
"2":11,
"3":10
},
"dow":{
"0":10,
"5":12,
"6":11
}
}
by using
var logHit = function(data, callback){
var update = {};
var inc = {};
var detail = data.data.info[1];
inc['detail.'+escape(detail)+'.daily.'+data.date.d] = 1;
inc['detail.'+escape(detail)+'.monthly'] = 1;
inc['monthly'] = 1;
inc['daily.'+data.date.d] = 1;
inc['dow.'+data.date.dow] = 1;
update['$inc'] = inc;
collection.update(
{
directory_id: data.directory_id,
date: data.date.y+'-'+data.date.m,
action: data.data.info[0],
},
update,
{upsert: true},
function(error, result){
assert.equal(error, null);
assert.equal(1, result.result.n);
callback();
});
}
while the data that I'm looking to store is included, working with it as a object series makes it harder to process when it is retrieved. I'm using d3.js and having to convert objects to arrays.
How do I store the data in arrays instead of objects like this https://jsonblob.com/56a7da76e4b01190df4b8a74
{
"action":"alphabetical",
"date":"2016-0",
"detail":[
{
"name":"a",
"daily":[
{
"count":5
},
{
"count":4
},
{
"count":5
}
],
"monthly":14
},
{
"name":"b",
"daily":[
{
"count":5
},
{
"count":5
},
{
"count":2
}
],
"monthly":12
},
{
"name":"c",
"daily":[
{
"count":2
},
{
"count":2
},
{
"count":2
}
],
"monthly":6
},
{
"name":"d",
"daily":[
{
},
{
},
{
"count":1
}
],
"monthly":1
}
],
"monthly":33,
"daily":{
"1":12,
"2":11,
"3":10
},
"dow":{
"0":10,
"5":12,
"6":11
}
}
where the objects become part of an array, and the key instead is put inside an array, similar this answer https://stackoverflow.com/a/30751981/197546
In MongoDB, array documents can be referenced by index, but not by value. For instance, in your target data model you can change the name value of the first array element with the update argument:
{ $set: { "detail.0.name" : "me" }
Or even increment a deeply nested value like:
{ $inc: { "detail.0.daily.0.count" : 1 }
But in both cases knowing the index is necessary, which doesn't seem like that would work for your use case.
I would recommend referencing the docs on Array Update operators as well.

MongoSkin wrong insertion

I have an array with countries with the following structure:
{
"code": "ZW",
"name": "Zimbabwe",
"zipPattern": "[\\s\\S]*",
"states": [
{
"name": "Bulawayo"
},
{
"name": "Harare"
},
{
"name": "Manicaland"
},
{
"name": "Mashonaland Central"
},
{
"name": "Mashonaland East"
},
{
"name": "Mashonaland West"
},
{
"name": "Masvingo"
},
{
"name": "Matabeleland North"
},
{
"name": "Matabeleland South"
},
{
"name": "Midlands"
}
]
}
I am trying to insert them into MongoDb using MongoSkin with the following code
var countries = require('./mongo/ready/Countries');
db.collection('countries').find().toArray(function (err, result) {
if (result.length === 0) {
for (var i = 0; i < countries.length; i++) {
var obj = countries[i];
var states = obj.states;
db.collection('countries').insert({
name: obj.name,
code: obj.code,
zipPattern: obj.zipPattern
}, function (error, countryResult) {
var id = countryResult[0]._id;
for (var j = 0; j < states.length; j++) {
var state = states[j];
db.collection('states').insert({
countryId: id,
name: state.name
}, function (stateError, stateResult) {
if (stateError) console.log(stateError);
console.log(stateResult);
});
}
});
}
}
});
but the code inserts the states of the last country in the array (Zimbabwe) for each country in the array instead of the correct states. How can I fix it?
Generally we don't use async query(insert) between sync loop(simple for loop). Its give us abnoramal results. Node provides async loop to overcome this.
First of all require async module for this.
var async = require('async');
Now you can use following code for insertion of countries and their respective states
async.each(countries, function(obj, callback) {
var states = obj.states;
db.collection('countries').insert({
name: obj.name,
code: obj.code,
zipPattern: obj.zipPattern
}, function(error, countryResult) {
if (error) {
callback(error);
} else {
var id = countryResult[0]._id;
async.each(states, function(state, callback) {
db.collection('states').insert({
countryId: id,
name: state.name
}, function(stateError, stateResult) {
if (stateError) {
callback(stateError);
} else {
callback();
}
});
});
callback();
}
}); }, function(err) {
if (err) {
// handle error here
} else {
// do stuff on completion of insertion
} });
Thanks

Resources