formatting pre-aggregated data - node.js

I'm trying to prepare a pre-aggregated data set from a log file for later analysis
for example, I have a log file such as this
2016-01-01 11:13:06 -0900 alphabetical|a
2016-01-01 11:20:16 -0900 alphabetical|a
2016-01-01 11:21:52 -0900 alphabetical|b
The data (after data/time/timezone) is split on a pipe
entry|detail
I'm creating a data set that has a separate document for each year-month and entry
my data as a result looks like this : https://jsonblob.com/56a7d7d8e4b01190df4b8a55
{
"action":"alphabetical",
"date":"2016-0",
"detail":{
"a":{
"daily":{
"1":5,
"2":4,
"3":5
},
"monthly":14
},
"b":{
"daily":{
"1":5,
"2":5,
"3":2
},
"monthly":12
},
"c":{
"daily":{
"1":2,
"2":2,
"3":2
},
"monthly":6
},
"d":{
"daily":{
"3":1
},
"monthly":1
}
},
"monthly":33,
"daily":{
"1":12,
"2":11,
"3":10
},
"dow":{
"0":10,
"5":12,
"6":11
}
}
by using
var logHit = function(data, callback){
var update = {};
var inc = {};
var detail = data.data.info[1];
inc['detail.'+escape(detail)+'.daily.'+data.date.d] = 1;
inc['detail.'+escape(detail)+'.monthly'] = 1;
inc['monthly'] = 1;
inc['daily.'+data.date.d] = 1;
inc['dow.'+data.date.dow] = 1;
update['$inc'] = inc;
collection.update(
{
directory_id: data.directory_id,
date: data.date.y+'-'+data.date.m,
action: data.data.info[0],
},
update,
{upsert: true},
function(error, result){
assert.equal(error, null);
assert.equal(1, result.result.n);
callback();
});
}
while the data that I'm looking to store is included, working with it as a object series makes it harder to process when it is retrieved. I'm using d3.js and having to convert objects to arrays.
How do I store the data in arrays instead of objects like this https://jsonblob.com/56a7da76e4b01190df4b8a74
{
"action":"alphabetical",
"date":"2016-0",
"detail":[
{
"name":"a",
"daily":[
{
"count":5
},
{
"count":4
},
{
"count":5
}
],
"monthly":14
},
{
"name":"b",
"daily":[
{
"count":5
},
{
"count":5
},
{
"count":2
}
],
"monthly":12
},
{
"name":"c",
"daily":[
{
"count":2
},
{
"count":2
},
{
"count":2
}
],
"monthly":6
},
{
"name":"d",
"daily":[
{
},
{
},
{
"count":1
}
],
"monthly":1
}
],
"monthly":33,
"daily":{
"1":12,
"2":11,
"3":10
},
"dow":{
"0":10,
"5":12,
"6":11
}
}
where the objects become part of an array, and the key instead is put inside an array, similar this answer https://stackoverflow.com/a/30751981/197546

In MongoDB, array documents can be referenced by index, but not by value. For instance, in your target data model you can change the name value of the first array element with the update argument:
{ $set: { "detail.0.name" : "me" }
Or even increment a deeply nested value like:
{ $inc: { "detail.0.daily.0.count" : 1 }
But in both cases knowing the index is necessary, which doesn't seem like that would work for your use case.
I would recommend referencing the docs on Array Update operators as well.

Related

count nested array inside documen using updateOne() function

const reset_qr_list_and_update_count = await stock_read_log.updateOne(
{
payload: {$ne:req.body.payload},
"qr_list.payload": req.body.new_qr_list[indexx].payload,
company_id:req.body.company_id
},
{
"$pull": {
"qr_list": {
payload: req.body.new_qr_list[indexx].payload
}
},
$set:{
qty: xx
},
}
);
$set:{
qty: model.aggreation({
//the query
}).count()
},
after pulling one of the list above,i want to re-count list left ,how can i achieve that within this function?

aggregate with dynamic field path- mongo DB and nodeJS

I have a collection- 'products' that contains the following documents:
{
productName: "computer",
updateAt: "2022-07-12T12:44:47.485Z",
createAt: ""2022-06-12T10:34:03.485Z",
changeAt: ""2022-09-12T10:39:40.485Z"
}
I want to create an aggregation that convert the field "updateAt" from string to date.
for this, I created this aggregation:
db.products.aggregate([{
$set: {
updateAt: {
$dateFromString: {
dateString: '$updateAt'
}
}
},
},
{
$out: 'products'
}]
)
It works fine for this need, but as you can see I specified the field path "updateAt" in a hard coded way.I want to use the above aggregation in a dynamic way-
considering I have an array of fields that I want to change:
const fields = ['updateAt', 'createAt', 'changeAt']
I want to loop over the fields array and use each field as a fieldPath so I can transfer the field name to the aggregation, something like that-
fields.forEech(field -> {
db.products.aggregate([{
$set: {
`${field}`: {
$dateFromString: {
dateString: `$${field}`
}
}
},
},
{
$out: 'products'
}]
)
}
As you can understand it's not working for me....
How can I achieve my goal?
You have some errors in your nodejs function, also, aggregate method returns a Promise, so you will need to wait for it, to resolve before moving further.
Try this:
const fields = ['updateAt', 'createAt', 'changeAt']
for(let i=0; i < fields.length; i++) {
let field = fields[i];
await db.products.aggregate([{
"$set": {
[field]: {
"$dateFromString": {
"dateString": `$${field}`
}
}
},
},
{
"$out": 'products'
}]
)
}
Also, make the function containing this piece of code async.

Mongoose findbyIdAndUpdate - how to update one field using sum from another array field

So this is how my document looks like
"donator": [
{
"_id": "5edbd7d182af1f5aceab62bb",
"donatorName": "Niki",
"donationValue": 5000000
},
{
"_id": "5edbd7d182af1f5aceab62bc",
"donatorName": "Brian",
"donationValue": 5000000
}
],
"currentValue" : 1000000
I get my currentValue from donationValue sum in donator array :
donator.reduce((a, { donationValue }) => a + donationValue,0);
and I want to update the document by adding new object in donator array, and automatically updates currentValue field when the data updated.
I tried using aggregate
MyCollection.findByIdAndUpdate(id, [
{
$set: req.body,
$set: { currentValue: { $sum: { $sum: donator.donationValue } } },
},
]);
It's a good practice to sanitise your input and not directly use req.body.
so let's say
const donator = sanitizeDonator(req.body)
Then you can add donator and update currentValue as follows.
Note that _id on donator won't be auto generated by the update query, so it has to be present in the donator object.
MyCollection.findByIdAndUpdate(id, [
{
$set: {
donator: {
$concatArrays: ["$donator", [donator]] // adding donator to the last element
}
}
}, {
$set: {
currentValue: {
$reduce: {
initialValue: 0,
input: "$donator",
in: {
$add: ["$$this.donationValue", "$$value"]
}
}
}
}
}
]);
Relevant APIs
$concatArrays,
$reduce

Set field if doesn't exist, push to array, then return document

I was hoping to do this in one operation, with just hitting the database once... but I don't know if it's possible with the api's.....
what I want is to:
find the document by id(which always will exist)
add object if it doesn't already exist { dayOfYear: 3, dataStuff: [{time:
Date(arg), data: 123] }
push {time: Date(arg), data: 123] } to dataStuff array
return modified document
I cooked up something along the lines of
return this.collection.findOneAndUpdate(dataDoc,
{ $set: { dayOfYear: reqBody.dayOfYear ,
$addToSet: { dataStuff: { time: Date(reqBody.date), data: reqBody.data }
}
but no success
The update object needs "seperate" top level keys for each atomic operation:
return this.collection.findOneAndUpdate(
dataDoc,
{
"$set": { dayOfYear: reqBody.dayOfYear },
"$addToSet": {
"dataStuff": { "time": Date(reqBody.date), "data": reqBody.data }
}
},
{ "returnOriginal": false }
)
With .findOneAndUpdate() from the core API you also need to set the "returnOrginal" option to false in order to return the modified document. With the mongoose API, it is { "new": true } instead.
In this syntax, both calls are returning a "Promise" to be resolved, and not just a direct response.
If you want to check if the whole object exists, you'll have to compare all the properties, like
this.collections.update({
_id: dataDoc,
dayOfYear: {
$ne: reqBody.dayOfYear
},
dataStuff: {
$elemMatch: {
time: {
$ne: Date(arg)
},
data: {
$ne: reqBody.data
}
}
}
}, {
$set: {
dayOfYear: reqBody.dayOfYear,
},
$addToSet: {
dataStuff: {
time: Date(arg),
data: reqBody.data
}
}
});
This way, you ensure that you always update one or zero collection items. The first argument is the query that either returns no elements or a single element (because _id is there), and if it returns one element, it gets updated. Which is, I believe, exactly what you need.

Upserting array into document in mongoose

I have a json array with objects and I'm doing the following loop in order to upsert (insert or update) the data into MongoDB using mongoose:
var currentMiniApp;
function retResult(err) {
if (err) {
console.log(err);
}
}
for (var i = 0 ; i < miniappData.miniapps.length; i++) {
currentMiniApp = new MiniApp(miniappData.miniapps[i]);
MiniApp.findOneAndUpdate(
{id: currentMiniApp.id},
currentMiniApp,
{upsert: true},
retResult);
}
How can I do it in one command without using a loop?
I want that the document will contain the items in the data array
My data looks like:
{
"miniapps" :
[
{
"id":"app1",
"icon" : "256fko6.png"
},
{
"id":"app2",
"icon" : "icon60x60.png"
}
]
}
Consider using the $addToSet operator with the $each modifier in your upsert update. This update operation adds multiple values to an array unless the values are already present, in which case $addToSet does nothing to that array:
var miniAppids = [];
for (var i = 0 ; i < miniappData.miniapps.length; i++) {
currentMiniApp = new MiniApp(miniappData.miniapps[i]);
miniAppids.push(currentMiniApp.id);
};
MiniApp.update(
{ "id": { "$in": miniAppids } },
{ "$addToSet": { "miniapps": { "$each": miniappData.miniapps } } },
{ "upsert": true },
retResult
)

Resources