How to orderby/sort incoming data from Firebase - node.js

I'm working on a firebase and node.js Express project which is basically an ordering system.
I want to sort my incoming order by time. I've tried using the Orderby() function that firebase provide but it still messes up the order in the json output.
It seems that this should work and i didn't have to sort it in afterwards on my frontend
This is my get route for the firebase
// route for database
router.get("/orderslist", (req, res) => {
let orders = []
let number_of_orders
// Getting the snapshot of the order collection
db.collection('orders').orderBy('time').get().then( productSnapshot => {
number_of_orders = productSnapshot.size
// iterating over the order snapshot
productSnapshot.forEach(orderDoc => {
// creating an order object and assigning the ID and the rest of the information from the database
var order = {
id: orderDoc.id,
customer_name: orderDoc.data().customer_name,
date: orderDoc.data().date,
comments: orderDoc.data().comments,
time: orderDoc.data().time,
total: orderDoc.data().total,
order_status: orderDoc.data().order_status,
products: []
}
// using the id, to get the products from the subcollection
db.collection('orders/' + order.id + '/products').get().then( productSnapshot => {
// iterating over the product snapshot
productSnapshot.forEach(productDoc => {
// creating a product object
var product = {
name: productDoc.data().name,
price: productDoc.data().price
}
// then we push the product object to the list of products in the order object
order.products.push(product)
});
// we are finished iterating over the productsnapshot and now we can push it to the orders list
orders.push(order)
// checks if the list is filled with everything from the database
if(orders.length == number_of_orders){
return res.json(orders)
}
});
});
});
});
This is the data in json
[
{
"id": "xWmlB9fHD4rw8Di75llp",
"customer_name": "Emil",
"date": "07/05-2020",
"comments": "without sugar",
"time": "a",
"total": 40,
"order_status": true,
"products": [
{
"name": "Latte",
"price": 40
}
]
},
{
"id": "GWJoxe0DVFDWfi2AW6jS",
"customer_name": "test",
"date": "222222",
"comments": "",
"time": "f",
"total": 10,
"order_status": false,
"products": [
{}
]
},
{
"id": "ggYSVKA1i0U8khIpeud4",
"customer_name": "Oliver",
"date": "01/05-2020",
"comments": "order to be packed neatly because im on a bike",
"time": "d",
"total": 38,
"order_status": false,
"products": [
{
"name": "Latte Macchiato",
"price": 38
}
]
}
]

I recommend that you create a proper Firestore timestamp field to be used as a reference not the string field you have called "time".
With that being said, knowing Firestore timestamps can be tricky, here's how you can use them.
When you get timestamps from Firestore they are of the following type:
To convert this into a normal timestamp you can use the .toDate() function.
For example, for a document like the following:
We can use something like:
db.collection('[COLLECTION]').doc('[DOCUMENT]').get().then(function(doc) {
console.log(doc.data().[FIELD].toDate());
});
and the output will be like:
2019-12-16T16:27:33.031Z
Now to process that timestamp further, you can convert it into a string and use regex to modify it according to your needs.
For example: (I'm using Node.js here)
db.collection('[COLLECTION]').doc('[DOCUMENT]').get().then(function(doc) {
var stringified = doc.data().[FIELD].toDate().toISOString();
//console.log(stringified);
var split1 = stringified.split('T');
var date = split1[0].replace(/\-/g, ' ');
console.log(date);
var time = split1[1].split('.');
console.log(time[0]);
});
Will give you an output like this:
Now you can use the orderby approach on the query itself or once you fetch them in your frontend.
An example orderby is the following:
db.collection('[COLLECTION]')
.orderBy("timeStamp", Query.Direction.ASCENDING)
However, if you plan to keep using the string type you have there (not recommended), you will have to convert that string to a numeric value (possible ASCII) to be able to filter on that properly as a time object.

Related

Mongodb update all the documents with unique id

I have collection with name products with almost 100k documents. I want to introduce a new key called secondaryKey with unique value uuid in all the documents.
I do this using nodejs.
Problem I am facing:-
When I try the below query,
db.collection('products').updateMany({},{"$set":{secondaryKey: uuid()}});
Here it updates all the documents with same uuid value,
I try with loop to update document one by one,but here issues is I don't have filter value in updateOne because I want to update all the documents.
Can anyone please help me here.
Thanks :)
If you are using MongoDB version >= 4.4 You can try this:
db.products.updateMany(
{},
[
{
$set: {
secondaryKey: {
$function: {
body: function() {
return UUID().toString().split('"')[1];
},
args: [],
lang: "js"
}
}
}
}
]
);
Output
[
{
"_id": ObjectId("..."),
"secondaryKey": "f41b15b7-a0c5-43ed-9d15-69dbafc0ed29"
},
{
"_id": ObjectId("..."),
"secondaryKey": "50ae7248-a92e-4b10-be7d-126b8083ff64"
},
{
"_id": ObjectId("..."),
"secondaryKey": "fa778a1a-371b-422a-b73f-8bcff865ad8e"
}
]
Since it's not the same value you want to put in each document you have to use the loop.
In your loop, you have to update the current document of the iteration. So you have to filter with the _id in the updateOne
The above reply didn't work for me. Plus, it compromises security when you enable javascript on your database (see here $function and javascript enabling on database). The best way is to not overload your server, do your work on local as below:
const { nanoid, customAlphabet } = require('nanoid')
async function asdf() {
const movies = await client.db("localhost").collection("productpost");
var result2 = []
let result = await movies.find({}).toArray()
result.forEach(element => {
const nanoid = customAlphabet('1234567890', 10)
console.log(element.price)
element.price = 4
element.id = nanoid()
result2.push(element)
});
console.log("out reult2", result2)
await movies.deleteMany({})
await movies.insertMany(result2)
})
It will delete any objects on your collections and update with the new ones. Using nanoid as uniqueids.
This is the database object array after adding unique id:
{ "_id": { "$oid": "334a98519a20b05c20574dd1" }, "attach": "[\"http://localhost:8000/be/images/2022/4/bitfinicon.png\"]", "title": "jkn jnjn", "description": "jnjn", "price": 4, "color": "After viewing I am 48.73025772956596% more satisfied with life.", "trademark": "", "category": "[]", "productstate": "Published", "createdat": { "$date": "2022-04-03T17:40:54.743Z" }, "language": "en"}
P.S: Please backup your collection before doing this or filter the array on your needs for not going through all collection.

Destructuring a Sequelize query

I'm having trouble deconstructing a simple Sequelize query.
const character = await Character.findAll({
order: ['created_at'],
attributes: ['id', 'name', 'created_at'],
});
I want to access the "created_at" of the list, but I try to
const { created_at } = character;
But it returns undefined.
I need to subtract the current date from the created_at date.
Could someone tell me what I'm doing wrong or how could I do it?
I have Moment.js installed if needed.
Query return
{ "id": 10, "name": "John Doe", "created_at": "2020-08-08T20:52:06.262Z" },
{ "id": 11, "name": "Mary Jane", "created_at": "2020-08-08T21:01:49.562Z" },
{ "id": 12, "name": "Clark Kent", "created_at": "2020-08-08T21:02:51.948Z" }
]
In you case you are trying to do a destructuring from an array. But it only works in an object.
To it to work you should use an index.
Something like:
const { created_at } = character[0];
Notice that it will only destruct the position zero from your results contained in the character array. If you do not would like the position zero you should use another function to choose the desired result.

How can i select a particuar field in my MongoDB array and sum it up

I have a MongoDB schema that I have some time a single array and sometimes I have more than 20 arrays values in it, each array has a field value which I want to sum together and insert the sum into another field in my MongoDB.
Here is what am trying to say, here is my schema, How can i add the value of weight together for every package array inserted to the schema and let it be my total weight schema
{
"status": "In warehouse",
"paymentStatus": "incomplete",
"_id": "5d8b56476a3e4ae2d01f2953",
"unitNo": "0002",
"warehouseNo": "0001",
"trackingNo": "FPZ848505936",
"packages": [
{
"date": "2019-09-26T06:30:39.561Z",
"_id": "5d8c5b0f756838f78d5205d7",
"category": "chil",
"quantity": "177",
"description": "a valueablegoods",
"width": 15,
"itemweight": 123,
"length": 17,
"height": 21,
"dimension": 31.25903614457831,
"weight": 32.25903614457831
},
{
"date": "2019-09-26T06:30:39.561Z",
"_id": "5d8c5b0f756838f78d5202dd,
"category": "chil",
"quantity": "177",
"description": "a valueablegoods",
"width": 15,
"itemweight": 123,
"length": 17,
"height": 21,
"dimension": 35.25903614457831,
"weight": 30
},
{
"date": "2019-09-26T06:30:39.561Z",
"_id": "5d8c5b0f756838f78d51aeq",
"category": "chil",
"quantity": "177",
"description": "a valueablegoods",
"width": 15,
"itemweight": 123,
"length": 17,
"height": 21,
"dimension": 32.25903614457831,
"weight": 44
}
],
"totalWeigth": "This should add all weight value in my packages array together and if it is only 1 it should brings only the one"
"date": "2019-09-25T11:57:59.359Z",
"__v": 0
}
This is the api route that add the packages to the package array field and i want the totalWeight to be update anytime new packge is add or updated
// #route POST api/admins/addshipment/:unitNo
// #desc Add shipment for each customer
// #access Private
router.post(
'/addshipment/:unitNo',
passport.authenticate('jwt', { session: false }),
(req, res) => {
Shipments.findOne({unitNo: req.params.unitNo}, {paymentStatus: "incomplete"})
.then(shipments => {
if(shipments === null || shipments.paymentStatus === "complete"){
const errwarehouse = "This user doesn't have an existing warehouse";
return res.status(404).json(errwarehouse);
}else{
if (shipments.paymentStatus === "incomplete") {
function getPrice(){
if (initial > dimension){
return initial
}else if(initial === dimension) {
return initial
}else{
return dimension
}
}
const newPackages = {
category: req.body.category,
quantity: req.body.quantity,
description: req.body.description,
width: req.body.width,
itemweight: req.body.itemweight,
length: req.body.length,
height: req.body.height,
dimension,
weight: getPrice(),
};
Shipments.findOneAndUpdate({unitNo: req.params.unitNo ,paymentStatus: "incomplete"},
{"$push": {"packages": newPackages}}, {totalWeight: {"$sum" : {"packages.weight"}}}) //Here is were i add the package to the package array and here is where i tried sumup packages.weight for every time i add new package
.then(shipments=> res.json(shipments))
}
}
});
});
Thank you
var users =db.users.aggregate([
{$unwind:"$packages"},
{
$group:
{
_id: "$_id",
totalWeigth: { $sum: "$packages.weight" }
}
}
]).toArray()
users.forEach((ele)=>{
db.users.update({_id:ele._id},{$set:{totalWeigth:ele.totalWeigth}})
})
If you actually have MongoDB 4.2 or greater then you can use the new aggregation syntax available for updates. This essentially means adding one of the valid aggregation pipeline statements of either $addFields, $set ( alias to $addFields to make "updates" easier to read ), $projector $replaceRoot, and then actual aggregation operators in order to do the manipulation. In this case $sum:
let writeResult = await db.collection("collection").updateMany(
{},
[{ "$set": { "totalWeight": { "$sum": "$packages.weight" } } }]
);
That adds a new fields of totalWeight to every document based on the values present in the whole array of each document.
The main benefit here is that this is a single request to the server which actually performs ALL updating on the server and requires no information from the collection to be sent back to the client for iteration.
If you have an earlier version ( I suggest you don't use anything earlier than 3.4, but even 3.2 would do here ), then you could use bulkWrite() in a loop:
async function updateCollection() {
let cursor = db.collection("collection").aggregate([
{ "$project": {
"totalWeight": { "$sum": "$packages.weight" }
}}
]);
let batch = [];
while ( await cursor.hasNext() ) {
let { _id, totalWeight } = await cursor.next();
batch.push({
"updateOne": {
"filter": { _id },
"update": { "$set": { totalWeight } }
}
});
if ( batch.length > 1000 ) {
await db.collection("collection").bulkWrite(batch);
batch = [];
})
}
if ( batch.length != 0 ) {
await db.collection("collection").bulkWrite(batch);
batch = [];
}
}
And that would do the same thing, but of course actually requires some interaction back and forth with the server in both reading and writing the result back. Though using bulkWrite() you are only sending back writes in batches rather than per document of the collection.
If you have an even older MongoDB, then Update MongoDB field using value of another field has some references to the same techniques in loop iteration that may also apply. But I really do recommend that you should not have any older MongoDB version than those mentioned in the answer here.
N.B You probably would want to add some try/catch handlers in such update code as well in case of errors. Or on the other hand, such one off operations are probably better executed in something like the MongoDB shell.
Maintenance
The better solution overall however is to actually keep the total up to date on every addition to the array. As an example, this is basically what you would want when using $push for a new array element and $inc to add to the existing total:
let package = {
"date": "2019-09-26T06:30:39.561Z",
"_id": "5d8c5b0f756838f78d5205d7",
"category": "chil",
"quantity": "177",
"description": "a valueablegoods",
"width": 15,
"itemweight": 123,
"length": 17,
"height": 21,
"dimension": 31.25903614457831,
"weight": 32.25903614457831
};
let writeResult = await db.collection('collection').udpdateOne(
{ "_id": myIdValue },
{
"$push": { "packages": package },
"$inc": { "totalWeight": package.weight
}
);
In that way you are actually making sure the total is adjusted with every change you make and therefore it does not need constant reprocessing of another statement in order to keep that total in the document. Similar concepts apply for other types of updates other than adding a new item to an array.
i rewrote the code so that i can use for each of the package weight a sum should be done on them and here is the code
// #route POST api/admins/addshipment/:unitNo
// #desc Add shipment for each customer
// #access Private
router.post(
'/addshipment/:unitNo',
passport.authenticate('jwt', { session: false }),
(req, res) => {
Shipments.findOne({unitNo: req.params.unitNo}, {paymentStatus: "incomplete"})
.then(shipments => {
const newPackages = {
category: req.body.category,
quantity: req.body.quantity,
description: req.body.description,
width: req.body.width,
itemweight: req.body.itemweight,
length: req.body.length,
height: req.body.height,
dimension,
weight: getPrice(),
};
let total = 0;
Shipments.findOne({unitNo: req.params.unitNo ,paymentStatus: "incomplete"})
.then(shipments=> {
shipments.packages.push(newPackages)
shipments.packages.forEach(i=>{ total += i.weight})
shipments.totalWeight = total
shipments.save()
res.json(shipments)
})
}
}
});
});

How to use mongodb aggregation to transform document in nested array

My document looks like as shown below and I want to transform it using aggregation. Inside favourite_products I have product array and it has product_id corresponding to each shops. Now I just want product ids of all shops but sorted based on time. :
"favourite_products": [
{
"shop": {
"shop_id": "59465888f7babb000485574b",
"time": "2017-07-12T06:11:19.817Z"
},
"product": [
{
"product_id": "594c2d56f7afcf00043b1195",
"time": "2017-07-12T06:10:36.775Z"
},
{
"product_id": "594ac36c76de720004e819f6",
"time": "2017-07-12T06:11:19.817Z"
}
]
},
{
"shop": {
"shop_id": "593acc24a2902d0004211f1f",
"time": "2017-07-12T06:12:59.372Z"
},
"product": [
{
"product_id": "594ac36c76de720004e819f6",
"time": "2017-07-12T06:12:59.372Z"
}
]
}
]
I want to transform it into this:
"favourite_products"
["59465888f7babb000485574b",594c2d56f7afcf00043b1195","594ac36c76de720004e819f6","593acc24a2902d0004211f1f","594ac36c76de720004e819f6"]
Below returns time ordered documents of favourite_products.product.product_id
use project if you want the result as different documents.
or use group if you want the result as the array in one document.
db['testing-aggregate'].aggregate([
{$unwind:'$favourite_products'},
{$unwind:'$favourite_products.product'},
{$sort:{'favourite_products.product.time':1}}, // order by time. 1=ascending | -1=descending
// {$project:{
// '_id':0, // exclude _id from output
// 'favourite_products':'$favourite_products.product.product_id' // return only product_id
// }},
{$group:{
_id:null,
product_id:{$push:'$favourite_products.product.product_id'}
}}
])

MongoDB: Query model and check if document contains object or not, then mark / group result

I have a Model called Post, witch contains an property array with user-ids for users that have liked this post.
Now, i need to query the post model, and mark the returned results with likedBySelf true/false for use in by client - is this possible?
I dont have to store the likedBySelf property in the database, just modify the results to have that property.
A temporary solution i found was to do 2 queries, one that finds the posts that is liked by user x, and the ones that have not been liked by user x, and en map (setting likedBySelf true/false) and combine the 2 arrays and return the combined array. But this gives some limitations to other query functions such as limit and skip.
So now my queries looks like this:
var notLikedByQuery = Post.find({likedBy: {$ne: req.body.user._id}})
var likedByQuery = Post.find({likedBy: req.body.user._id})
(I'm using the Mongoose lib)
PS. A typical post can look like this (JSON):
{
"_id": {
"$oid": "55fc463c83b2d2501f563544"
},
"__t": "Post",
"groupId": {
"$oid": "55fc463c83b2d2501f563545"
},
"inactiveAfter": {
"$date": "2015-09-25T17:13:32.426Z"
},
"imageUrl": "https://hootappprodstorage.blob.core.windows.net/devphotos/55fc463b83b2d2501f563543.jpeg",
"createdBy": {
"$oid": "55c49e2d40b3b5b80cbe9a03"
},
"inactive": false,
"recentComments": [],
"likes": 8,
"likedBy": [
{
"$oid": "558b2ce70553f7e807f636c7"
},
{
"$oid": "559e8573ed7c830c0a677c36"
},
{
"$oid": "559e85bced7c830c0a677c43"
},
{
"$oid": "559e854bed7c830c0a677c32"
},
{
"$oid": "559e85abed7c830c0a677c40"
},
{
"$oid": "55911104be2f86e81d0fb573"
},
{
"$oid": "559e858fed7c830c0a677c3b"
},
{
"$oid": "559e8586ed7c830c0a677c3a"
}
],
"location": {
"type": "Point",
"coordinates": [
10.01941398718396,
60.96738099591897
]
},
"updatedAt": {
"$date": "2015-09-22T08:45:41.480Z"
},
"createdAt": {
"$date": "2015-09-18T17:13:32.426Z"
},
"__v": 8
}
#tskippe you can use a method like following to process whether the post is liked by the user himself and call the function anywhere you want.
var processIsLiked = function(postId, userId, doc, next){
var q = Post.find({post_id: postId});
q.lean().exec(function(err,res){
if(err) return utils.handleErr(err, res);
else {
if(_.find(doc.post.likedBy,userId)){ //if LikedBy array contains the user
doc.post.isLiked = true;
} else {
doc.post.isLiked = false;
}
});
next(doc);
}
});
}
Because you are using q.lean() you dont need to actually persist the data. You need to just process it , add isLiked field in the post and send back the response. **note that we are manuplating doc directly. Also you chan tweek it to accept doc containing array of posts and iterating it and attach an isLiked field to each post.
I found that MongoDB's aggregation with $project tequnique was my best bet. So i wrote up an aggregation like this.
Explanation:
Since i want to keep the entire document, but $project purpose is to modify the docs, thus you have to specify the properties you want to keep. A simple way of keeping all the properties is to use "$$ROOT".
So i define a $project, set all my original properties to doc: "$$ROOT", then create a new property "likedBySelf", which is marked true / false if a specified USERID is in the $likedBy set.
I think that this is more clean and simple, than querying every single model after a query to set a likedBySelf flag. It may not be faster, but its cleaner.
Model.aggregate([
{ $project: {
doc: "$$ROOT",
likedBySelf: {
$cond: {
"if": { "$setIsSubset": [
[USERID],
"$likedBy"
]},
"then": true,
"else": false
}
}
}}
]);

Resources