Related
I need to return data that matches the range, and that's it. When requested, I will be returned an entire document that meets the conditions.
Do not know how to properly compose a query so that the database returns a ready data rage, without the need to filter after response on the server side.
Document structure:
{
symbol: "test",
data: [{
timestamp: 1,
value: 10,
},
{
timestamp: 2,
value: 20,
},
{
timestamp: 3,
value: 30,
},
{
timestamp: 4,
value: 40,
},
]
}
My request:
function request(symbol, from, to) {
return model.findOne({
symbol,
data: {
$elemMatch: {
timestamp: {
$gte: from,
$lt: to
}
}
}
}).then(res => res.data)
}
request('test', 2, 3)
Got response as full document structure. Using res.data.filter bad approach, because it has a big performance impact. Only the required data needs to be returned.
Example response as needed:
{
symbol: "test",
data: [
{
timestamp: 2,
value: 20,
},
{
timestamp: 3,
value: 30,
},
]
}
// or better
[
{
timestamp: 2,
value: 20,
},
{
timestamp: 3,
value: 30,
},
]
Thanks for the advice. Tried different variants from stackoverflow, nothing helped.
don't need the elementMatch function , you can query from subdocument as :
function request(symbol, from, to) {
return model.findOne({
symbol : symbol,
'data.timestamp' : { $gte: from, $lt: to }
}
}).then(res => res.data)
}
I have a MongoDB schema that I have some time a single array and sometimes I have more than 20 arrays values in it, each array has a field value which I want to sum together and insert the sum into another field in my MongoDB.
Here is what am trying to say, here is my schema, How can i add the value of weight together for every package array inserted to the schema and let it be my total weight schema
{
"status": "In warehouse",
"paymentStatus": "incomplete",
"_id": "5d8b56476a3e4ae2d01f2953",
"unitNo": "0002",
"warehouseNo": "0001",
"trackingNo": "FPZ848505936",
"packages": [
{
"date": "2019-09-26T06:30:39.561Z",
"_id": "5d8c5b0f756838f78d5205d7",
"category": "chil",
"quantity": "177",
"description": "a valueablegoods",
"width": 15,
"itemweight": 123,
"length": 17,
"height": 21,
"dimension": 31.25903614457831,
"weight": 32.25903614457831
},
{
"date": "2019-09-26T06:30:39.561Z",
"_id": "5d8c5b0f756838f78d5202dd,
"category": "chil",
"quantity": "177",
"description": "a valueablegoods",
"width": 15,
"itemweight": 123,
"length": 17,
"height": 21,
"dimension": 35.25903614457831,
"weight": 30
},
{
"date": "2019-09-26T06:30:39.561Z",
"_id": "5d8c5b0f756838f78d51aeq",
"category": "chil",
"quantity": "177",
"description": "a valueablegoods",
"width": 15,
"itemweight": 123,
"length": 17,
"height": 21,
"dimension": 32.25903614457831,
"weight": 44
}
],
"totalWeigth": "This should add all weight value in my packages array together and if it is only 1 it should brings only the one"
"date": "2019-09-25T11:57:59.359Z",
"__v": 0
}
This is the api route that add the packages to the package array field and i want the totalWeight to be update anytime new packge is add or updated
// #route POST api/admins/addshipment/:unitNo
// #desc Add shipment for each customer
// #access Private
router.post(
'/addshipment/:unitNo',
passport.authenticate('jwt', { session: false }),
(req, res) => {
Shipments.findOne({unitNo: req.params.unitNo}, {paymentStatus: "incomplete"})
.then(shipments => {
if(shipments === null || shipments.paymentStatus === "complete"){
const errwarehouse = "This user doesn't have an existing warehouse";
return res.status(404).json(errwarehouse);
}else{
if (shipments.paymentStatus === "incomplete") {
function getPrice(){
if (initial > dimension){
return initial
}else if(initial === dimension) {
return initial
}else{
return dimension
}
}
const newPackages = {
category: req.body.category,
quantity: req.body.quantity,
description: req.body.description,
width: req.body.width,
itemweight: req.body.itemweight,
length: req.body.length,
height: req.body.height,
dimension,
weight: getPrice(),
};
Shipments.findOneAndUpdate({unitNo: req.params.unitNo ,paymentStatus: "incomplete"},
{"$push": {"packages": newPackages}}, {totalWeight: {"$sum" : {"packages.weight"}}}) //Here is were i add the package to the package array and here is where i tried sumup packages.weight for every time i add new package
.then(shipments=> res.json(shipments))
}
}
});
});
Thank you
var users =db.users.aggregate([
{$unwind:"$packages"},
{
$group:
{
_id: "$_id",
totalWeigth: { $sum: "$packages.weight" }
}
}
]).toArray()
users.forEach((ele)=>{
db.users.update({_id:ele._id},{$set:{totalWeigth:ele.totalWeigth}})
})
If you actually have MongoDB 4.2 or greater then you can use the new aggregation syntax available for updates. This essentially means adding one of the valid aggregation pipeline statements of either $addFields, $set ( alias to $addFields to make "updates" easier to read ), $projector $replaceRoot, and then actual aggregation operators in order to do the manipulation. In this case $sum:
let writeResult = await db.collection("collection").updateMany(
{},
[{ "$set": { "totalWeight": { "$sum": "$packages.weight" } } }]
);
That adds a new fields of totalWeight to every document based on the values present in the whole array of each document.
The main benefit here is that this is a single request to the server which actually performs ALL updating on the server and requires no information from the collection to be sent back to the client for iteration.
If you have an earlier version ( I suggest you don't use anything earlier than 3.4, but even 3.2 would do here ), then you could use bulkWrite() in a loop:
async function updateCollection() {
let cursor = db.collection("collection").aggregate([
{ "$project": {
"totalWeight": { "$sum": "$packages.weight" }
}}
]);
let batch = [];
while ( await cursor.hasNext() ) {
let { _id, totalWeight } = await cursor.next();
batch.push({
"updateOne": {
"filter": { _id },
"update": { "$set": { totalWeight } }
}
});
if ( batch.length > 1000 ) {
await db.collection("collection").bulkWrite(batch);
batch = [];
})
}
if ( batch.length != 0 ) {
await db.collection("collection").bulkWrite(batch);
batch = [];
}
}
And that would do the same thing, but of course actually requires some interaction back and forth with the server in both reading and writing the result back. Though using bulkWrite() you are only sending back writes in batches rather than per document of the collection.
If you have an even older MongoDB, then Update MongoDB field using value of another field has some references to the same techniques in loop iteration that may also apply. But I really do recommend that you should not have any older MongoDB version than those mentioned in the answer here.
N.B You probably would want to add some try/catch handlers in such update code as well in case of errors. Or on the other hand, such one off operations are probably better executed in something like the MongoDB shell.
Maintenance
The better solution overall however is to actually keep the total up to date on every addition to the array. As an example, this is basically what you would want when using $push for a new array element and $inc to add to the existing total:
let package = {
"date": "2019-09-26T06:30:39.561Z",
"_id": "5d8c5b0f756838f78d5205d7",
"category": "chil",
"quantity": "177",
"description": "a valueablegoods",
"width": 15,
"itemweight": 123,
"length": 17,
"height": 21,
"dimension": 31.25903614457831,
"weight": 32.25903614457831
};
let writeResult = await db.collection('collection').udpdateOne(
{ "_id": myIdValue },
{
"$push": { "packages": package },
"$inc": { "totalWeight": package.weight
}
);
In that way you are actually making sure the total is adjusted with every change you make and therefore it does not need constant reprocessing of another statement in order to keep that total in the document. Similar concepts apply for other types of updates other than adding a new item to an array.
i rewrote the code so that i can use for each of the package weight a sum should be done on them and here is the code
// #route POST api/admins/addshipment/:unitNo
// #desc Add shipment for each customer
// #access Private
router.post(
'/addshipment/:unitNo',
passport.authenticate('jwt', { session: false }),
(req, res) => {
Shipments.findOne({unitNo: req.params.unitNo}, {paymentStatus: "incomplete"})
.then(shipments => {
const newPackages = {
category: req.body.category,
quantity: req.body.quantity,
description: req.body.description,
width: req.body.width,
itemweight: req.body.itemweight,
length: req.body.length,
height: req.body.height,
dimension,
weight: getPrice(),
};
let total = 0;
Shipments.findOne({unitNo: req.params.unitNo ,paymentStatus: "incomplete"})
.then(shipments=> {
shipments.packages.push(newPackages)
shipments.packages.forEach(i=>{ total += i.weight})
shipments.totalWeight = total
shipments.save()
res.json(shipments)
})
}
}
});
});
Mongoose store time in ISO Format like this:
{
"_id": {
"$oid": "5b6ed55b6a12624b1853b29a"
},
"time": {
"$date": "2018-08-11T12:23:55.627Z"
},
"location": "Kathmandu",
"temperature": 23,
"description": "moderate rain",
"humidity": 88,
"__v": 0
}
I want to check if the database already has data within 30 Minute time period. Like this
WeatherSchema.countDocuments({
'location': city,
'time': {
$lt: Date.now() - 1.8e+6
}
But the Date.now() gives the timestamp in millisecond and Database has time in ISO-Format.
You need to use another way to add 30 minutes like
var date = new Date();
var modifiedDate = new Date();
modifiedDate.setMinutes(date.getMinutes() + 30);
WeatherSchema.countDocuments({
'location': city,
'time': {
$lt: modifiedDate
}
Now, modifiedDate is a Date object so this will be converted accordingly to ISO Format by mongoose while querying.
I'm attempting to query a collection and retrieve an average value for the each of the last 7 days excluding the current day. On some or all of the days there may not be an average.
Here's what I have so far:
var dateTill = moment({hour:0,minute:0}).subtract(1, 'days')._d
var dateSevenDaysAgo = moment({hour:0,minute:0}).subtract(7, 'days')._d;
Rating.aggregate([
{
$match:{
userTo:facebookId,
timestamp:{$gt:dateSevenDaysAgo,$lt:dateTill}
}
},
{
$group:{
_id:{day:{'$dayOfMonth':'$timestamp'}},
average:{$avg:'$rating'}
}
},
{
$sort:{
'_id.day':1
}
}
]
This gives me
[ { _id: { day: 20 }, average: 1 },
{ _id: { day: 22 }, average: 3 },
{ _id: { day: 24 }, average: 5 } ]
What I'm trying to get is something like:
[1,,3,,5,,]
Which represents the last 7 days of averages in order and has an empty element where there is no average for that day.
I could try and make a function that detects where the gaps are but this won't work when the averages are spread across two different months. e.g (July 28,29,30,31,Aug 1,2] - the days in august will be sorted to the front of the array I want.
Is there an easier way to do this?
Thanks!
People ask about "empty results" quite often, and the thinking usually comes from how they would have approached the problem with a SQL query.
But whilst it is "possible" to throw a set of "empty results" for items that do not contain a grouping key, it is a difficult process and much like the SQL approach people use, it's just throwing those values within the statement artificially and it really isn't a very performance driven alternative. Think "join" with a manufactured set of keys. Not efficient.
The smarter approach is to have those results ready in the client API directly, without sending to the server. Then the aggregation output can be "merged" with those results to create a complete set.
However you want to store the set to merge with is up to you, it just requires a basic "hash table" and lookups. But here is an example using nedb, which allows you to maintain the MongoDB set of thinking for query and updates:
var async = require('async'),
mongoose = require('mongoose'),
DataStore = require('nedb'),
Schema = mongoose.Schema,
db = new DataStore();
mongoose.connect('mongodb://localhost/test');
var Test = mongoose.model(
'Test',
new Schema({},{ strict: false }),
"testdata"
);
var testdata = [
{ "createDate": new Date("2015-07-20"), "value": 2 },
{ "createDate": new Date("2015-07-20"), "value": 4 },
{ "createDate": new Date("2015-07-22"), "value": 4 },
{ "createDate": new Date("2015-07-22"), "value": 6 },
{ "createDate": new Date("2015-07-24"), "value": 6 },
{ "createDate": new Date("2015-07-24"), "value": 8 }
];
var startDate = new Date("2015-07-20"),
endDate = new Date("2015-07-27"),
oneDay = 1000 * 60 * 60 * 24;
async.series(
[
function(callback) {
Test.remove({},callback);
},
function(callback) {
async.each(testdata,function(data,callback) {
Test.create(data,callback);
},callback);
},
function(callback) {
async.parallel(
[
function(callback) {
var tempDate = new Date( startDate.valueOf() );
async.whilst(
function() {
return tempDate.valueOf() <= endDate.valueOf();
},
function(callback) {
var day = tempDate.getUTCDate();
db.update(
{ "day": day },
{ "$inc": { "average": 0 } },
{ "upsert": true },
function(err) {
tempDate = new Date(
tempDate.valueOf() + oneDay
);
callback(err);
}
);
},
callback
);
},
function(callback) {
Test.aggregate(
[
{ "$match": {
"createDate": {
"$gte": startDate,
"$lt": new Date( endDate.valueOf() + oneDay )
}
}},
{ "$group": {
"_id": { "$dayOfMonth": "$createDate" },
"average": { "$avg": "$value" }
}}
],
function(err,results) {
if (err) callback(err);
async.each(results,function(result,callback) {
db.update(
{ "day": result._id },
{ "$inc": { "average": result.average } },
{ "upsert": true },
callback
)
},callback);
}
);
}
],
callback
);
}
],
function(err) {
if (err) throw err;
db.find({},{ "_id": 0 }).sort({ "day": 1 }).exec(function(err,result) {
console.log(result);
mongoose.disconnect();
});
}
);
Which gives this output:
[ { day: 20, average: 3 },
{ day: 21, average: 0 },
{ day: 22, average: 5 },
{ day: 23, average: 0 },
{ day: 24, average: 7 },
{ day: 25, average: 0 },
{ day: 26, average: 0 },
{ day: 27, average: 0 } ]
In short, a "datastore" is created with nedb, which basically acts the same as any MongoDB collection ( with stripped down functionality ). You then insert your range of "keys" expected and default values for any of the results.
Then running your aggregation statement, which is only going to return the keys that exist in the queried collection, you simply "update" the created datastore at the same key with the aggregated values.
To make that a bit more efficient, I am running both the empty result "creation" and the "aggregation" operations in parallel, utilizing "upsert" functionallity and the $inc operator for the values. These will not conflict, and that means the creation can happen at the same time as the aggregation is running, so no delays.
This is very simple to integrate into your API, so you can have all the keys you want, including those with no data for aggregation in the collection for output.
The same approach adapts well to using another actual collection on your MongoDB server for very large result sets. But if they are very large, then you should be pre-aggregating results anyway, and just using standard queries to sample.
I have a MySQL database and a Waterline model for a fairly simple record: Todo. It's straight-forward as we're using it for proof-of-concept for a Sails.js API.
CREATE TABLE `Todo` (
`ID` int(11) NOT NULL AUTO_INCREMENT,
`TodoItem` varchar(200) NOT NULL,
`Completed` bit(1) NOT NULL DEFAULT b'0',
PRIMARY KEY (`ID`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1 COMMENT=' ';
I have the model set up in this way:
module.exports = {
connection: 'companyMySql',
tableName: 'Todo',
attributes: {
ID: {
type: 'integer',
autoIncrement: true,
primaryKey: true
},
TodoItem: {
type: 'string',
required: true
},
Completed: {
type: 'boolean',
defaultsTo: 'false',
size: 1,
boolean: true
}
},
autoCreatedAt: false,
autoUpdatedAt: false,
autoPK: false
};
The problem I'm having is that the auto-created API for Todo creates a bunch of extra cruft in the 'Completed' field, and I'm trying to find out how I remove it. I'm expecting a boolean value, but I could definitely work with 0/1, or string literals, but that's not what I'm getting. I've tried various validation on the field, like 'min: 0, max: 1', but that has not changed the result.
Here is the results of a retrieval of a single record:
{
"ID": 1,
"TodoItem": "Create Todo API",
"Completed": {
"0": 1,
"length": 1,
"parent": [
100,
0,
0,
0,
3,
83,
69,
76,
69,
67,
96,
46,
96,
73,
68,
96,
44,
32,
96,
116,
111,
100,
111,
96,
// This continues on for quite some time
],
"offset": 2128 // This value increases each time I call a GET until it gets to ~8000, then it resets to ~500
}
}
So does anyone have any idea on what I can do to get a straight-forward boolean value out of this?
After some further testing, I was able to get a boolean value to work as expected by using TINYINT(1) for my column datatype. Once I did that, this model worked as I expected it to:
Completed: {
type: 'boolean',
defaultsTo: false,
boolean: true
}
This still doesn't seem ideal, but it was a solution for my purposes.