Retrieving single and multiple objects via GraphQL - node.js

I'm less than a couple of weeks into using Apollo and GraphQL, and I'd like to retrieve multiple objects via GraphQL, but it won't allow me to.
With the query as:
const GET_ALL_PURCHASES_QUERY = (statusOfPurchase) => {
return gql`
query {
getAllPurchases(statusOfPurchase: "${statusOfPurchase}") {
id
customerInformation {
customerName
customerEmailAddress
}
createdAt
updatedAt
}
}
`
}
... and in the schema:
const RootQuery = new GraphQLObjectType({
name: 'RootQueryType',
fields: {
getAllPurchases: {
type: PurchaseType,
args: {
statusOfPurchase: {
type: new GraphQLNonNull(GraphQLString)
}
},
resolve(parent, args) {
return PurchasesModel.schemaForPurchases.find({
statusOfPurchase: args.statusOfPurchase
}).limit(10)
.then(purchases => {
console.log('Schema:getAllPurchases()', purchases)
return purchases
})
}
}
}
})
Result in Node via the Terminal is:
Schema:getAllPurchases() [
{
_id: 60351a691d3e5a70d63eb13e,
customerInformation: [ [Object] ],
statusOfPurchase: 'new',
createdAt: 2021-02-23T15:08:25.230Z,
updatedAt: 2021-02-23T15:08:25.230Z,
__v: 0
},
{
_id: 60351b966de111716f2d8a6d,
customerInformation: [ [Object] ],
statusOfPurchase: 'new',
createdAt: 2021-02-23T15:13:26.552Z,
updatedAt: 2021-02-23T15:13:26.552Z,
__v: 0
}
]
Correct.
But in the application within Chrome, it's a single object with null as the value of each field.
With the query as:
const GET_ALL_PURCHASES_QUERY = () => {
return gql`
query {
getAllPurchases {
id
customerInformation {
customerName
customerEmailAddress
}
createdAt
updatedAt
}
}
`
}
... and with the appropriate changes to the schema, the result is the same as before, where I see two objects in Node but a failed single object in Chrome.
If I change: return purchases to: return purchases[0] I see the first object in Chrome with the correct values.
How am I supposed to return more than one object?

Your type for the getAllPurchases field is set to PurchaseType in the schema. You want to use new GraphQLList(PurchaseType) to have the return type be a list of purchases. That's why when you try to use the schema, it returns null if the types are bad, but correctly returns a purchase if you do return a single element.
See the graphql docs for an example of this.

Related

Update multiple objects in nested array

Question: Is it possible to update multiple objects in a nested array based on another field in the objects, using a single Mongoose method?
More specifically, I'm trying to update subscribed in each object of the Contact.groups array where the object's name value is included in groupNames. Solution 1 works, but it seems messy and inefficient to use both findOne() and save(). Solution 2 is close to working with just findOneAndUpdate(), but only the first eligible object in Contact.groups is updated. Am I able to update all the eligible objects using just findOneAndUpdate()?
Contact schema (trimmed down to relevant info):
{
phone: { type: String, unique: true },
groups: [
{
name: { type: String },
subscribed: { type: Boolean }
}
]
}
Variables I have at this point:
const phoneToUpdate = '1234567890' // Contact.phone to find
const groupNames = [ 'A', 'B', 'C' ] // Contacts.groups <obj>.name must be one of these
const subStatus = false // Contacts.groups <obj>.subscribed new value
Solution 1 (seems inefficient and messy):
Contact
.findOne({ phone: phoneToUpdate })
.then(contact => {
contact.groups
.filter(g => groupNames.includes(g.name))
.forEach(g => g.subscribed = subStatus)
contact
.save()
.then(c => console.log(c))
.catch(e => console.log(e))
})
.catch(e => console.log(e))
Solution 2 (only updates the first matching object):
Contact
.findOneAndUpdate(
{ phone: phoneToUpdate, 'groups.name': { $in: groupNames } },
{ $set: { 'groups.$.subscribed': subStatus } },
{ new: true }
)
.then(c => console.log(c))
.catch(error => console.log(error))
// Example Contact after the findOneAndUpdate
{
phone: '1234567890',
groups: [
{ name: 'A', subscribed: false },
{ name: 'B', subscribed: true } // Should also be false
]
}
You can not use $ operator since he will act as a placeholder only for the first match.
The positional $ operator acts as a placeholder for the first match of
the update query document.
What you can use is arrayFilters operator. You can modify your query like this:
Contact.findOneAndUpdate({
"phone": phoneToUpdate
},
{
"$set": {
"groups.$[elem].subscribed": subStatus
}
},
{
"arrayFilters": [
{
"elem.name": {
"$in": groupNames
}
}
]
})
Here is a working example: https://mongoplayground.net/p/sBT-aC4zW93

Mongoose unique if not null and if state

I have a unique index like this
code: {
type: String,
index: {
unique: true,
partialFilterExpression: {
code: { $type: 'string' }
}
},
default: null
},
state: { type: Number, default: 0 },
but When the state is 2 (archived) I want to keep the code, but it should be able to reuse the code, so it cannot be unique if state is 2.
Is there any away that I could accomplish this?
This is possible, though it's through a work around documented here https://jira.mongodb.org/browse/SERVER-25023.
In MongoDB 4.7 you will be able to apply different index options to the same field but for now you can add a non-existent field to separate the two indexes.
Here's an example using the work around.
(async () => {
const ItemSchema = mongoose.Schema({
code: {
type: String,
default: null
},
state: {
type: Number,
default: 0,
},
});
// Define a unique index for active items
ItemSchema.index({code: 1}, {
name: 'code_1_unique',
partialFilterExpression: {
$and: [
{code: {$type: 'string'}},
{state: {$eq: 0}}
]
},
unique: true
})
// Defined a non-unique index for non-active items
ItemSchema.index({code: 1, nonExistantField: 1}, {
name: 'code_1_nonunique',
partialFilterExpression: {
$and: [
{code: {$type: 'string'}},
{state: {$eq: 2}}
]
},
})
const Item = mongoose.model('Item', ItemSchema)
await mongoose.connect('mongodb://localhost:27017/so-unique-compound-indexes')
// Drop the collection for test to run correctly
await Item.deleteMany({})
// Successfully create an item
console.log('\nCreating a unique item')
const itemA = await Item.create({code: 'abc'});
// Throws error when trying to create with the same code
await Item.create({code: 'abc'})
.catch(err => {console.log('\nThrowing a duplicate error when creating with the same code')})
// Change the active code
console.log('\nChanging item state to 2')
itemA.state = 2;
await itemA.save();
// Successfully created a new doc with sama code
await Item.create({code: 'abc'})
.then(() => console.log('\nSuccessfully created a new doc with sama code'))
.catch(() => console.log('\nThrowing a duplicate error'));
// Throws error when trying to create with the same code
Item.create({code: 'abc'})
.catch(err => {console.log('\nThrowing a duplicate error when creating with the same code again')})
})();
This is not possible with using indexes. Even if you use a compound index for code and state there will still be a case where
new document
{
code: 'abc',
state: 0
}
archived document
{
code: 'abc',
state: 2
}
Now although you have the same code you will not be able to archive the new document or unarchive the archived document.
You can do something like this
const checkCode = await this.Model.findOne({code:'abc', active:0})
if(checkCode){
throw new Error('Code has to be unique')
}
else{
.....do something
}

Sequelize - Select on associated table

My problem:
I am creating an route which will return some informations about a group, it has an id, an user assigned and also has some documents. I just want to show how much documents exists, in SQL would be SELECT COUNT, but how can i do this in this in sequelize?
My code:
async list(req, res){
const docGroups = await DocGroup.findAll({
raw: true,
include: [{
model: User,
as: 'userAssigned'
},
{
model: Document,
as: 'Document'
}
]
}).then(groups => {
const result = groups.map(group => {
return Object.assign(
{},
{
id: group.id,
name: group.name,
userAssinged: group['userAssigned.firstName'],
docAmount: // I want to put documents' count here
}
)
})
console.log(groups)
})
}
What is printed in console.log(groups):
[
{
id: 1,
name: 'pleaseworks',
createdAt: 2020-06-10T02:38:11.531Z,
updatedAt: 2020-06-10T02:38:11.531Z,
'userAssigned.id': 1,
'userAssigned.firstName': 'Please',
'userAssigned.lastName': 'Works',
'userAssigned.email': 'pleaseworks#gmail.com',
'userAssigned.password': '$2a$08$3BA4I4dsaQ3lsHy342344b5P41v5eHWjwqv6dve28nSdqbGvhsdS',
'userAssigned.createdAt': 2020-06-10T02:37:29.062Z,
'userAssigned.updatedAt': 2020-06-10T02:37:29.062Z,
'userAssigned.groupId': null,
'Document.id': 2,
'Document.description': 'deowkdopewkdwe',
'Document.content': 'odepodkewokodwe',
'Document.groupId': 1,
'Document.createdAt': 2020-06-10T02:43:46.005Z,
'Document.updatedAt': 2020-06-10T02:43:46.005Z
}
]
If DocGroup has many Document try something like this:
{
model: Document,
attributes: [[sequelize.fn('COUNT', sequelize.col('id')), 'docAmount']]
as: 'Document'
}

How to grab field value during a MongooseModel.bulkWrite operation?

Context:
I am trying to upsert in bulk an array of data, with an additional computed field: 'status'.
Status should be either :
- 'New' for newly inserted docs;
- 'Removed' for docs present in DB, but inexistent in incoming dataset;
- a percentage explaining the evolution for the field price, comparing the value in DB to the one in incoming dataset.
Implementations:
data.model.ts
import { Document, model, Model, models, Schema } from 'mongoose';
import { IPertinentData } from './site.model';
const dataSchema: Schema = new Schema({
sourceId: { type: String, required: true },
name: { type: String, required: true },
price: { type: Number, required: true },
reference: { type: String, required: true },
lastModified: { type: Date, required: true },
status: { type: Schema.Types.Mixed, required: true }
});
export interface IData extends IPertinentData, Document {}
export const Data: Model<IData> = models.Data || model<IData>('Data', dataSchema);
data.service.ts
import { Data, IPertinentData } from '../models';
export class DataService {
static async test() {
// await Data.deleteMany({});
const data = [
{
sourceId: 'Y',
reference: `y0`,
name: 'y0',
price: 30
},
{
sourceId: 'Y',
reference: 'y1',
name: 'y1',
price: 30
}
];
return Data.bulkWrite(
data.map(function(d) {
let status = '';
// #ts-ignore
console.log('price', this);
// #ts-ignore
if (!this.price) status = 'New';
// #ts-ignore
else if (this.price !== d.price) {
// #ts-ignore
status = (d.price - this.price) / this.price;
}
return {
updateOne: {
filter: { sourceId: d.sourceId, reference: d.reference },
update: {
$set: {
// Set percentage value when current price is greater/lower than new price
// Set status to nothing when new and current prices match
status,
name: d.name,
price: d.price
},
$currentDate: {
lastModified: true
}
},
upsert: true
}
};
}
)
);
}
}
... then in my backend controller, i just call it with some route :
try {
const results = await DataService.test();
return new HttpResponseOK(results);
} catch (error) {
return new HttpResponseInternalServerError(error);
}
Problem:
I've tried lot of implementation syntaxes, but all failed either because of type casting, and unsupported syntax like the $ symbol, and restrictions due to the aggregation...
I feel like the above solution might be closest to a working scenario but i'm missing a way to grab the value of the price field BEFORE the actual computation of status and the replacement with updated value.
Here the value of this is undefined while it is supposed to point to current document.
Questions:
Am i using correct Mongoose way for a bulk update ?
if yes, how to get the field value ?
Environment:
NodeJS 13.x
Mongoose 5.8.1
MongoDB 4.2.1
EUREKA !
Finally found a working syntax, pfeeeew...
...
return Data.bulkWrite(
data.map(d => ({
updateOne: {
filter: { sourceId: d.sourceId, reference: d.reference },
update: [
{
$set: {
lastModified: Date.now(),
name: d.name,
status: {
$switch: {
branches: [
// Set status to 'New' for newly inserted docs
{
case: { $eq: [{ $type: '$price' }, 'missing'] },
then: 'New'
},
// Set percentage value when current price is greater/lower than new price
{
case: { $ne: ['$price', d.price] },
then: {
$divide: [{ $subtract: [d.price, '$price'] }, '$price']
}
}
],
// Set status to nothing when new and current prices match
default: ''
}
}
}
},
{
$set: { price: d.price }
}
],
upsert: true
}
}))
);
...
Explanations:
Several problems were blocking me :
the '$field_value_to_check' instead of this.field with undefined 'this' ...
the syntax with $ symbol seems to work only within an aggregation update, using update: [] even if there is only one single $set inside ...
the first condition used for the inserted docs in the upsert process needs to check for the existence of the field price. Only the syntax with BSON $type worked...
Hope it helps other devs in same scenario.

Add or push new object to nested mongodb document

I can't seem to find an answer to this on Stack or in the Mongoose docs. How do I added a new object into a nested document?
This is my current schema:
var SessionsSchema = mongoose.Schema({
session: {
sid: String,
dataloop: {
timeStamp: Date,
sensorValues:{
value: Number,
index: Number
}
}
}
});
Upon receiving new data from the client, I need to push into the existing session document, i've tried both $addToSet and $push but neither are giving me the correct results.
This is the $push:
Sessions.findOneAndUpdate(
{ 'session.sid': sessionID },
{
'$push:': {dataloop:{
timeStamp: datemilli,
sensorValues:{
value: pressure,
index: indexNum,
sessionTime: relativeTime
}
}
}
},
function(err,loop) {
console.log(loop);
}
)
Here is my expected output:
_id:58bb37a7e2950617355fab0d
session:Object
sid:8
dataloop:Object
timeStamp:2017-03-04 16:54:27.057
sensorValues:Object
value:134
index:18
sessionTime:0
dataloop:Object // <----------NEW OBJECT ADDED HERE
timeStamp:2017-03-04 16:54:27.059
sensorValues:Object
value:134
index:18
sessionTime:0
dataloop:Object // <----------ANOTHER NEW OBJECT
timeStamp:2017-03-04 16:54:27.059
sensorValues:Object
value:134
index:18
sessionTime:0
__v:0
If you consider to change your Schema to include a dataloop array :
var SessionsSchema = mongoose.Schema({
session: {
sid: String,
dataloop: [{
timeStamp: Date,
sensorValues: {
value: Number,
index: Number
}
}]
}
});
You could use $push on session.dataloop to add a new dataloop item :
Sessions.findOneAndUpdate({ 'session.sid': sessionID }, {
'$push': {
'session.dataloop': {
timeStamp: datemilli,
sensorValues: {
value: pressure,
index: indexNum,
sessionTime: relativeTime
}
}
}
},
function(err, loop) {
console.log(loop);
}
)

Resources