How to condition sort on mongodb - node.js

storeId is null or ObjectId.
I want to sort like this :
if storeId is null : sort by name
if storeId is ObjectId: sort by createdAt
I'm using this code to sort, but it's not my exception :
sort: { storeId: 1, name: 1, createdAt: -1 },
Any one can help me

Related

Using EXISTS with bound parameters in sub-query with Sequelize

Suppose I have a table of something like cars, where inside is a JSONB object specifying possible customizations:
+----+-----------------------------------------+
| id | customizations JSONB |
+----+-----------------------------------------+
| 1 | {"color": "blue", "lights": "led", ...} |
+----+-----------------------------------------+
| 2 | {"color": "red"} |
+----+-----------------------------------------+
| 3 | {} |
+----+-----------------------------------------+
If I want to query for a certain customization based on case-insensitive value or partial value (i.e., ILIKE), I can do something like:
SELECT * FROM "Cars" WHERE EXISTS (
SELECT 1 FROM JSONB_EACH_TEXT("customizations") WHERE "value" ~* 'BLU'
);
This pattern works fine in Postgres, but now I am trying to translate it over to Sequelize as best as I can. The important thing here is that the search term ('BLU' from the example) is passed as a parameter.
const cars = await cars.findAll({
where: // ???? Maybe something like db.fn('EXISTS', ...), but how do I bind the parameter?
});
How can I use an EXISTS query here, but bind a parameter? I know I could use db.literal(), but if I did that, I'd have to escape the search term string myself before interpolating it into the query. (Is there at least a proper method for doing this data escaping in Sequelize?)
Note that the JSONB object in customizations can have many keys, a single key, or even no keys.
Bounty Note: Answers using modern versions of Postgres are fine, but I would also like an answer for PostgreSQL v10.12, as that's all that is available with AWS Aurora Serverless. I'll happily assign a separate bounty to both answers!
If on postgresql 12+, it would be possible to use the json path expression to extract all values & cast to text in order to regex match
The raw query would be
SELECT *
FROM cars
WHERE jsonb_path_query_array(customizations, '$.*')::TEXT ~* 'BLU'
in sequelize:
where: Sequelize.where(
Sequelize.literal("jsonb_path_query_array(customizations, '$.*')::TEXT"),
Op.iRegexp,
'BLU'
)
On older versions, i would probably use a raw query with bound parameters that maps back to the Car object.
How can I use an EXISTS query here, but bind a parameter? I know I could use db.literal(), but if I did that, I'd have to escape the search term string myself before interpolating it into the query. (Is there at least a proper method for doing this data escaping in Sequelize?)
you can bind parameters to raw sql queries using either $1 or $name for positional (array) and named (object) arguments.
// cars.js
const Sequelize = require('sequelize');
const path = 'postgresql://hal:hal#localhost:5432/hal'
const sequelize = new Sequelize(path);
let Car = sequelize.define('cars', {
id: { type: Sequelize.INTEGER, primaryKey: true },
customizations: Sequelize.JSONB
});
let cars = [{ id: 1, customizations: {color: "blue", lights: "led"} },
{ id: 2, customizations: {color: "red"} },
{ id: 3, customizations: {} }]
const search_pat = 'BLU';
const stmt = `
select distinct cars.*
from cars, jsonb_each_text(customizations) kv(kk, vv)
where vv ~* $search_pattern
`;
sequelize
.sync()
.then(() => {
Car.bulkCreate(cars)
})
.then(() => {
sequelize.query(stmt, {
bind: {search_pattern: search_pat},
type: sequelize.QueryTypes.SELECT,
model: Car,
mapToModel: true
}).then(cars => {console.log(cars);})
});
executing this script (node cars.js) produces the following output:
Executing (default): CREATE TABLE IF NOT EXISTS "cars" ("id" INTEGER , "customizations" JSONB, "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL, "updatedAt" TIMESTAMP WITH TIME ZONE NOT NULL, PRIMARY KEY ("id"));
Executing (default): SELECT i.relname AS name, ix.indisprimary AS primary, ix.indisunique AS unique, ix.indkey AS indkey, array_agg(a.attnum) as column_indexes, array_agg(a.attname) AS column_names, pg_get_indexdef(ix.indexrelid) AS definition FROM pg_class t, pg_class i, pg_index ix, pg_attribute a WHERE t.oid = ix.indrelid AND i.oid = ix.indexrelid AND a.attrelid = t.oid AND t.relkind = 'r' and t.relname = 'cars' GROUP BY i.relname, ix.indexrelid, ix.indisprimary, ix.indisunique, ix.indkey ORDER BY i.relname;
Executing (default): INSERT INTO "cars" ("id","customizations","createdAt","updatedAt") VALUES (1,'{"color":"blue","lights":"led"}','2021-04-14 04:42:50.446 +00:00','2021-04-14 04:42:50.446 +00:00'),(2,'{"color":"red"}','2021-04-14 04:42:50.446 +00:00','2021-04-14 04:42:50.446 +00:00'),(3,'{}','2021-04-14 04:42:50.446 +00:00','2021-04-14 04:42:50.446 +00:00') RETURNING "id","customizations","createdAt","updatedAt";
Executing (default): select distinct cars.*
from cars, jsonb_each_text(customizations) kv(kk, vv)
where vv ~* $1
[
cars {
dataValues: {
id: 1,
customizations: [Object],
createdAt: 2021-04-14T04:42:50.446Z,
updatedAt: 2021-04-14T04:42:50.446Z
},
_previousDataValues: {
id: 1,
customizations: [Object],
createdAt: 2021-04-14T04:42:50.446Z,
updatedAt: 2021-04-14T04:42:50.446Z
},
_changed: Set(0) {},
_options: {
isNewRecord: false,
_schema: null,
_schemaDelimiter: '',
raw: true,
attributes: undefined
},
isNewRecord: false
}
]
note that i've used an alternative (and likely less efficient select statement in the example above)
You can use your original query, which IMO is the most straight-forward query for the target postgresql version (10.x), i.e.
const stmt = `
SELECT *
FROM cars
WHERE EXISTS (
SELECT 1
FROM JSONB_EACH_TEXT(customizations) WHERE "value" ~* $search_pattern
)
`;
If you modify your condition like this:
SELECT * FROM "Cars" WHERE (
SELECT "value" FROM JSONB_EACH_TEXT("customizations")) ~* 'BLU'
then you can use Sequelize.where in a conjunction with Sequelize.literal:
where: Sequelize.where(
Sequelize.literal('(SELECT "value" FROM JSONB_EACH_TEXT("customizations"))'),
Op.iRegexp,
'BLU'
)
Upd.
This solution will work only if a subquery returns 1 record.

How to put createdAt (timestamp ) manually in MongoDB

how can I add manually createdAt (timestamp ) field in MongoDB?
db.getCollection("properties").find({})
This returns all properties and this is an example:
{
"_id" : ObjectId("5e6b3269617d327b82a71723"),
"title" : "Nice and cozy apartment",
"category" : "rent",
"town" : "London",
"price" : NumberInt(3100),
"square" : NumberInt(249),
}
What I want to achieve is a field with timestamp or information about the date when a particular property is created.
Can anybody help with this?s?
just put a key in mongoose schema
createdAt:{type: Date, default: Date.now()};
//will add automatically putt entry in db whenever document created.
and use aggregate instead of find(becoz you don't have key createdAt in old document) as ,
db.orders.aggregate([
{
$addFields: {
"createdAt": { $ifNull: ["$createdAt", Date(manually whenever created)] },
}
}
])

find records where field type is string

I have this records :
{id : 1 , price : 5}
{id : 2 , price : "6"}
{id : 3 , price : 13}
{id : 4 , price : "75"}
I want to build a query that get just record who have price with type "string"
so, it will get :
{id : 2 , price : "6"}
{id : 4 , price : "75"}
You can use the $type query operator to do this:
db.test.find({price: {$type: 2}})
If you're using MongoDB 3.2+, you can also use the string alias for the type:
db.test.find({price: {$type: 'string'}})
While #JohnnyHK's answer is absolutely correct in most cases, MongoDB also returns documents where field is an array and any of the elements in that array have that type (docs). So for example, the document
{
_id: 1,
tags: ['123']
}
is returned for the query Books.find({ tags: { $type: "string" } }) as well. To prevent this, you can adjust the query to be
Books.find({
tags: {
$type: "string",
$not: {
$type: "array"
}
}
})

Mongoose Compound Index Unique + Sparse

I want to create an index which ensures, that I don't have a duplicate serialNr within the combination of a manufacturer + art. But some items don't have a serialNr. These I don't want to check/index.
Code
mySchema.index({ serialNr: 1, art: 1 , manufacturer: 1, deleted: 1}, { unique: true, sparse: true)
I tried it also with adding a partial Filter:
partialFilterExpression: { serialNr: {$ne:null} } to the index options.
Question
How can I index it that inserting:
[{art: 'a', manufacturer:'a', deleted:null}, {art: 'a', manufacturer:'a', deleted:null}] passes
and
[{serialNr: '123', art: 'a', manufacturer:'a', deleted:null}, {serialNr: '123', art: 'a', manufacturer:'a', deleted:null}]fails
I'm not trying to save:
[{serialNr: null, art: 'a', manufacturer:'a', deleted:null}, {serialNr: null, art: 'a', manufacturer:'a', deleted:null}]
The mongo docs state, that it should not be indexed if a field is missing in a compound index:
Sparse indexes selects documents to index solely based on the
existence of the indexed field, or for compound indexes, the existence
of the indexed fields.
https://docs.mongodb.org/manual/core/index-partial/
EDIT:
New Index
implantSchema.index({ serialNr: 1, art: 1 , manufacturer: 1, deleted: 1}, { unique: true, partialFilterExpression: { serialNr: {$exists:true} }})
Before I make changes on the index I drop my collection in my dev-database, to ensure that there is no old index existing. This is the new index which is returned on db.items.getIndexes()
/* 0 */
{
"0": {
"v" : 1,
"key" : {
"_id" : 1
},
"name" : "_id_",
"ns" : "LOC_db.items"
},
"1": {
"v" : 1,
"unique" : true,
"key" : {
"serialNr" : 1,
"art" : 1,
"manufacturer" : 1,
"deleted" : 1
},
"name" : "serialNr_1_art_1_manufacturer_1_deleted_1",
"ns" : "LOC_db.items",
"partialFilterExpression" : {
"serialNr" : {
"$exists" : true
}
},
"background" : true
}
}
Data
I have a pre save hook where I check the value of serialNr, which does not exist before, the dup error is returned.
When querying the database there is no doc existing which has null, "" as value for the serialNr.
The query db.items.find({ serialNr: {$exists:true} }) does not match any items.
Error
insertDocument :: caused by :: 11000 E11000 duplicate key error index: LOC_db.items.$serialNr_1_art_1_manufacturer_1_deleted_1 dup key: { : null, : "Robo", : "Olymp", : null }
Additional Information
Maybe the error is related to:
https://github.com/Automattic/mongoose/issues/2457
You were actually quite close to the solution, all you need to use this when creating index :
partialFilterExpression: { serialNr: {$exists:true} }
Be sure to have at least 3.2 version of mongo
Example :
db.col.createIndex({a:1, b:1},{unique:true, partialFilterExpression: { a: {$exists:true} })
db.col.save({a:50, b:27}) //pass
db.col.save({a:50, b:27}) //fails
db.col.save({b:27}) //pass
db.col.save({b:27}) //pass
db.col.save({b:27}) //pass
db.col.save({a:50, b:27}) //fails
db.col.save({a:50}) //pass
db.col.save({a:50}) //fails
PS : Maybe you can do little "hack" and use this : partialFilterExpression: { serialNr: {$exists:true}, serialNr: {$gte:0} }
I tried using $ne, but it was throwing me an error.

How to use indexes in aggregate

I have created the index using the createIndex and then ensure using ensureIndex
db.getCollection('artists').createIndex( { featured : NumberInt(-1), physicalCount : NumberInt(-1),digitalCount : NumberInt(-1),createdAt: NumberInt(-1) } )
db.getCollection('artists').ensureIndex( { featured : NumberInt(-1), physicalCount : NumberInt(-1),digitalCount : NumberInt(-1),createdAt: NumberInt(-1) } )
which create the index :
"8" : {
"v" : 1,
"key" : {
"featured" : -1,
"physicalCount" : -1,
"digitalCount" : -1,
"createdAt" : -1
},
"name" : "featured_-1_physicalCount_-1_digitalCount_-1_createdAt_-1",
"ns" : "global-rockstar.artists"
}
and now I want to use this index in the aggregate query . I know index not work when we use the $project or $group attribute in aggregate . I want to redesign this query so that it can use the indexes but unable to create one
db.getCollection('artists').aggregate([{
$match: query <- initially it is {}
}, {
$project: {
_id: 1,
name: 1,
genres_music: 1,
slug: 1,
country: 1,
featured: 1,
picture: 1,
fans: 1,
fans_a: 1,
credits: 1,
totalPlays: 1,
createdAt: 1,
fanCount: 1,
physicalCount: 1,
digitalCount: 1,
matches: {
$add: [{
$cond: [{
$or: [{
$eq: ['$featured', true]
}, {
$eq: ['$featured', 'on']
}]
},
3, 0
]
}, {
$size: {
$setIntersection: ['$genres_music', usergenres]
}
}]
}
}
}, {
$sort: {
matches: -1,
physicalCount : -1,
digitalCount : -1,
createdAt: -1
}
}, {
$skip: pageSize * page <---- pageSize- 15 , page= 0
}, {
$limit: parseFloat(pageSize)
}])
when I am firing the query like
db.getCollection('artists').find({}).sort({
//"featured" : -1,
"physicalCount" : -1,
"digitalCount" : -1,
"createdAt" : -1
})
this is giving the unexpected result
Please if any one know how to use the aggregate query with indexes for this query please help me out
According to MongoDB documentation:
The $match and $sort pipeline operators can take advantage of an index
when they occur at the beginning of the pipeline.
So If you can, you should put your $sort stage before $project and $group stages.
Placing a $match pipeline stage followed by a $sort stage at the start
of the pipeline is logically equivalent to a single query with a sort
and can use an index. When possible, place $match operators at the
beginning of the pipeline.
In addition, It is preferable to place $skip and $limit stages as early as possible.
Early Filtering
If your aggregation operation requires only a subset
of the data in a collection, use the $match, $limit, and $skip stages
to restrict the documents that enter at the beginning of the pipeline.
When placed at the beginning of a pipeline, $match operations use
suitable indexes to scan only the matching documents in a collection.
And finally,
For a compound index, MongoDB can use the index to support queries on
the index prefixes.

Resources