So, I'm implementing async functionality on fastAPI using sqlalchemy.ext.asyncio & asyncpg, my problem is that the result of my joined query statement is not eagerly loaded even though I specify to my SQLModel that, that table has __mapper_args__ = {"eager_defaults": True} and used the joinLoaded to the query.
this is my User class as SQLModel
class Users(SQLModel, table=True):
__tablename__: str = "users"
id: Optional[int] = Field(default=None, primary_key=True)
email: str = Field(index=True, sa_column=Column('email', String, unique=True))
password: str = Field(max_length=1024)
username: str = Field(max_length=100, nullable=True, index=True)
...some other fields here...
user_role: Optional[UserRoles] = Relationship(back_populates='user')
user_detail: Optional[UserDetails] = Relationship(back_populates='user')
__mapper_args__ = {"eager_defaults": True}
this is my UserRoles class as SQLModel
class UserRoles(SQLModel, table=True):
__tablename__: str = "user_roles"
id: Optional[int] = Field(default=None, primary_key=True)
role: str
...some other fields here...
user: List["Users"] = Relationship(back_populates='user_role')
this is my fastAPI endpoint implementing sqlalchemy.ext.asyncio
'''
Note:
#user_route.get("/get_all_with_role", response_model=List[UserWithRolesRead])
this will return
response -> 0 -> Users field required (type=value_error.missing)
response -> 0 -> UserRoles field required (type=value_error.missing)
'''
#user_route.get("/get_all_with_role")
async def get_all_with_role(
email=Depends(token_service.secure),
session: AsyncSession = Depends(session_local),
):
async with session as sess:
query = (
select(Users, UserRoles)
.join(UserRoles)
.options(joinedload(Users.user_role))
)
result = await sess.execute(query)
result = result.scalars().first() # I selected only one for the sake of debugging
print(type(result)) # returns <class 'src.models.users.Users'>
print(result)
return result
and the return of print(result) on my console terminal is:
password='qwe123' id=7 active=True created_date=datetime.datetime(2022, 4, 3, 14, 26, 10, 109696)
locked_end_date=None email='admin#admin.com' username='admin' user_role_id=4 last_login_date=None
user_role=UserRoles(id=4, created_date=datetime.datetime(2022, 2, 4, 0, 0), active=True, role='Admin')
as the printed result suggests the user_role has tuple values of UserRoles. But in return result in the last line of #user_route.get("/get_all_with_role") has the value of:
{
"password": "qwe123",
"id": 7,
"active": true,
"created_date": "2022-04-03T14:26:10.109696",
"locked_end_date": null,
"email": "admin#admin.com",
"username": "admin",
"user_role_id": 4,
"last_login_date": null
}
to summarized, the sqlalchemy query statement is properly functioning since, I can get the desired result on my terminal. But then when I try to get the result from the endpoint the the user_role is missing.
[UPDATE]
The result in previous implementation without the async functionality to the endpoint is this:
# fastAPI endpoint not async
#user_route.get("/get_all_with_role")
async def get_all_with_role(email=Depends(token_service.secure)):
_sess = db.session_local()
with _sess as sess:
query = select(Users, UserRoles).join(UserRoles)
result = sess.exec(query)
return result.first()
# Result
{
"Users": {
"password": "$2b$12$ubLo5CMeORXikXrl8gI58OexeUxgqM/HI57yk6briHi1nvmwqO8R.",
"id": 9,
"active": true,
"created_date": "2022-04-05T11:53:13.875607",
"locked_end_date": null,
"email": "0admin",
"username": "0admin",
"user_role_id": 4,
"last_login_date": null
},
"UserRoles": {
"id": 4,
"created_date": "2022-02-04T00:00:00",
"active": true,
"role": "Admin"
}
}
Related
Can any one have solution for this, i want there should be api data in this manner ??
I wanted api data in for similar state comes in one hood rather than seprate, different state data can be different obj,
data = [{
state_name:New_jersi, data:{
category:Phishing,
sub_cat_data:[{
name:SubCat1,
count:20
},
{
name:SubCat2,
count:30
}]
}
category: malware,
sub_cat_data:[{
name:SubCat1,
count:20
},
{
name:SubCat2,
count:30
}]
},
{
state_name:Washinton, data:{
category:Phishing,
data:[{
name:SubCat1,
count:20
},
{
name:SubCat2,
count:30
}]
}
}]
But may api response be:
{
"state": "South Carolina",
"state_count": 2,
"Website Compromise/Intrusion": {
"sub_category": {
"Insecure Direct Object Reference": 2,
"Memory Corruption": 2,
"SQLI": 1,
"Stack Overflow": 1,
"XSRF": 1,
"Heap Overflow": 1,
"Security Misconfiguration": 1
}
}
},
{
"state": "South Carolina",
"state_count": 1,
"Phishing": {
"sub_category": {
"Spear Phishing Attacks": 2,
"Fast Flux": 2,
"Rock fish": 2,
"Identify Theft/Social Engineering": 1,
"Phishing Redirector": 1,
"Pharming": 1,
"Exploitation of Hardware Vulnerability": 1
}
}
},
i wanted same state data be in same object buut in my case state data comes in seprate object because of data comes through category, rather that seprate.
My logic are below
cat_count = incnum.values('incident_category__cat_name','incident_category__cat_id').annotate(count=Count('incident_category__cat_id'))
subcat_count = incnum.values('incident_sub_category__sub_cat_name','incident_sub_category__cat_id','incident_sub_category__id').annotate(count=Count('incident_sub_category__cat_id'))
reporter_state_count1 = incnum.values('incident_category__cat_id','reporter__comp_individual_state','reporter__comp_individual_state__name').annotate(count=Count('incident_category__cat_id'))
for x, state_ in enumerate(reporter_state_count1):
for i, cat_ in enumerate(cat_count):
if state_['incident_category__cat_id'] == cat_['incident_category__cat_id']:
for i, cat_ in enumerate(cat_count):
if state_['incident_category__cat_id'] == cat_['incident_category__cat_id']:
arr16.append({'state':state_['reporter__comp_individual_state__name'], 'state_count':state_['count'], cat_['incident_category__cat_name']:{'sub_category':{}}})
for sub_ in subcat_count:
if cat_['incident_category__cat_id'] == sub_['incident_sub_category__cat_id']:
arr16[i][cat_['incident_category__cat_name']]['sub_category'].update({sub_['incident_sub_category__sub_cat_name']:sub_['count']})
cat_count = incnum.values('incident_category__cat_name', 'incident_category__cat_id').annotate(
count=Count('incident_category__cat_id'))
subcat_count = incnum.values('incident_sub_category__sub_cat_name', 'incident_sub_category__cat_id',
'incident_sub_category__id').annotate(count=Count('incident_sub_category__cat_id'))
reporter_state_count1 = incnum.values('incident_category__cat_id', 'reporter__comp_individual_state',
'reporter__comp_individual_state__name').annotate(
count=Count('incident_category__cat_id'))
arr16 = []
for state_ in reporter_state_count1:
state_data = {"state_name" : state_['reporter__comp_individual_state__name'], "data":[]}
for cat_ in cat_count:
if state_['incident_category__cat_id'] == cat_['incident_category__cat_id']:
sub_cat_data = [{sub_['incident_sub_category__sub_cat_name']: sub_['count']} for sub_ in subcat_count if cat_['incident_category__cat_id'] == sub_['incident_sub_category__cat_id']]
category_data = {"category": cat_['incident_category__cat_name'], "sub_cat_data": sub_cat_data}
state_data["data"].append(category_data)
arr16.append(state_data)
1 State might have multiple category, the way you are trying to make your api, it won't be able to show multiple category for a state. This is why i modify a little bit. you will find all the category in state object
Edit
Creating a dictionary which will store category_id as key and all the subcategory of that category as value
cat_to_subcat_list = {}
for cat_ in cat_count:
sub_cat_data = [{"name":sub_['incident_sub_category__sub_cat_name'],"count": sub_['count']} for sub_ in subcat_count if
cat_['incident_category__cat_id'] == sub_['incident_sub_category__cat_id']]
cat_to_subcat_list[cat_['incident_category__cat_id']] = {"category": cat_['incident_category__cat_name'], "sub_cat_data": sub_cat_data}
Createing a dictionary which will store state__name as key and a list of category object will save as value
state_data = {}
for state_ in reporter_state_count1:
if state_['reporter__comp_individual_state__name'] not in state_data:
'''This if statement is checking whether state_name exit or not.
if state_name does not exist in dictionary it'll create a empty list as it's value'''
state_data[state_['reporter__comp_individual_state__name']] = []
state_data[state_['reporter__comp_individual_state__name']].append(cat_to_subcat_list[state_['incident_category__cat_id']])
Re-formatting json as api needed
arr16 = [
{
"state_name": state_name,
"data": state_data
}for state_name, state_data in state_data.items()
]
DRF Serializer contains a group and inventory field which are many2many and foreign key. It is missing in default DRF HTML Form but available in GET view. currently, the depth field is enabled in Serializer. If i am removing depth then Foreign key is available in default HTML form, but still group many2many field is missing. I need both the fields for POST call or in DRF HTML Form.
Do i have to write some create method, but I do not want to create new record for Foreign key and many2many just want to utilize the existing field.
My Serializer class.
class MainHostSerializer(serializers.ModelSerializer):
class Meta:
model = MainHost
fields = (
'host_id',
'host_name',
'inventory',
'group'
)
# depth = 2
Raw view for default DRF HTML Form
{
"host_id": null,
"host_name": ""
}
Model Class
class MainHost(models.Model):
host_id = models.IntegerField(verbose_name='HOST ID', primary_key=True)
host_name = models.CharField(verbose_name='HOST NAME', max_length=512)
inventory = models.ForeignKey(related_name='inv_ins', on_delete=models.SET_NULL, to='hosts.MainInventory', blank=True, null=True)
group = models.ManyToManyField(MainGroup, related_name='hostgroups', through ='HostGroup')
Create Method for MainHost Serializer
def create(self, validated_data):
inv_data = validated_data.pop('inventory')
inv_res = MainInventory.objects.create(**inv_data)
group_data = validated_data.pop('group')
host_data = MainHost.objects.create(inventory = inv_res, **validated_data)
for g_data in group_data:
inv_data = g_data.pop('inv_id')
inv = MainInventory.objects.create(**inv_data)
group_res = MainGroup.objects.create(inv_id = inv, **g_data)
print(validated_data)
HostGroup.objects.create(host = host_data, group = group_res)
This was sample JSON
{
"count": 1692,
"next": "http://127.0.0.1:8000/api/mainhost/?page=2",
"previous": null,
"results": [
{
"host_id": 4087,
"host_name": "10.240.144.2",
"inventory": {
"inv_id": 91,
"inv_name": "GNS Switches (TestNet)",
"total_hosts": 539,
"total_groups": 1,
"org_name": "ABC_TestNet",
"description": "Inventory of ABC switches on Testnet",
"inv_variables": "environ: testnet"
},
"group": [
{
"group_id": 280,
"group_name": "aruba",
"total_hosts": 539,
"total_groups": 0,
"inv_id": {
"inv_id": 91,
"inv_name": "ABC Switches (TestNet)",
"total_hosts": 539,
"total_groups": 1,
"org_name": "ABC_TestNet",
"description": "Inventory of ABC switches on Testnet",
"inv_variables": "environ: testnet"
},
"description": "imported",
"group_variables": "{}",
"groupinv_name": "ABC Switches (TestNet)",
"groupinv_description": "",
"groupinv_source": "scm",
"groupinv_path": "TEC/GNS/Switches/testnet.ini"
}
],
"description": "imported",
"foreman_group": "[{'id': 280, 'name': 'aruba'}]",
"host_variables": "{}",
"ansible_facts": "{}"
}
]
}
mutation{
createPayment(p_obj={"bob": 80, "job": 100}){
<fields here>
}
}
What I could find was to accept a list of objects as input like:
[ {username: "bob", "amount": 80}, {username: "job", "amount": 100} ]
You can do something like this -
class PaymentInputType(graphene.InputObjectType):
username = graphene.String()
amount = graphene.Int()
And use the InputType inside your mutation as following.
class CreatePayment(graphene.Mutation):
class Arguments:
input = PaymentInputType(required=True)
ok = graphene.Boolean()
#staticmethod
def mutate(root, info, input):
# save the changes here
return CreatePayment(ok=True)
I use flask-marshmallow and mongoengine.
Also flask-restplus for my API server.
Here is my api.py
class BoardSchema(ma.Schema):
class Meta:
fields = ('no', 'title', 'body', 'tags', 'created_at', 'views')
board_schema = BoardSchema()
boards_schema = BoardSchema(many=True)
class ArticleList(Resource):
def get(self):
articles = Board.objects.all()
return boards_schema.jsonify(articles)
model.py
from datetime import datetime
from mongoengine import *
from config import DB_NAME
connect(DB_NAME)
class Board(Document):
d = datetime.now()
date = "{}-{}-{}".format(d.year, d.month, d.day)
no = SequenceField()
title = StringField(required=True)
body = StringField(required=True)
tags = ListField(StringField())
likes = ListField(StringField())
views = ListField(StringField())
password = StringField(required=True)
created_at = DateTimeField(default=date)
updated_at = DateTimeField(default=date)
When I access to /article, it's result like this ->
{
"body": "123",
"created_at": "2018-08-20T00:00:00+00:00",
"no": 1,
"tags": [
"MySQL",
"C"
],
"title": "\ud14c\uc2a4\ud2b8",
"views": [
"127.0.0.1"
]
}
in "views", ip will be added who read article.
But I want to count of all the list of views and include it to my result.
The result I wanted is here.
{
"body": "123",
"created_at": "2018-08-20T00:00:00+00:00",
"no": 1,
"tags": [
"MySQL",
"C"
],
"title": "\ud14c\uc2a4\ud2b8",
"views": 20
}
I'm new at flask-marshmallow so I'm so confused how can I solve this issue.
Thanks.
Maybe you can try like this:
class BoardSchemaCustom(ma.ModelSchema):
class Meta:
model = Board
views = ma.fields.method(deserialize="_custom_serializer")
def _custom_serializer(self, obj):
return len(obj.views)
Create instance of your custom schema:
custom_board_schema = BoardSchemaCustom()
and dump it:
dump, errors = custom_board_schema.schema.dump(Board.query.first())
>>> dump
i've got the same problem. and my code works after installing marshmallow-sqlalchemy
pip install marshmallow-sqlalchemy
see from offical documentation
https://flask-marshmallow.readthedocs.io/en/latest/
Below snippet would also work:
class BoardSchemaCustom(ma.ModelSchema):
class Meta:
model = Board
views = ma.fields.Function(lambda obj: len(obj.views))
I am using kibana and Elasticsearch version 5.1.1 and python version 3.6.
I have created my index like this
put_books
The function to add a user is this one :
def add_user(first_name, last_name, age, mail):
doc = {"first_name": "" + first_name, "last_name": "" + last_name, "age": age, "email": "" + mail}
global id_user
res = es.index(index="books", doc_type="user", id=id_user, body=doc)
id_user += 1
print(res['result'])
and to add preferences :
def add_preferences(preferences, i):
doc = es.get(index="books", doc_type="user", id=id_book)
res = es.update(index="books", doc_type="user", id=i, body={'doc':{"keyword_preferences": preferences}})
My problem is here : when I want to add preferences, it success but if I want to add again preferences, it replace it :
id_user = 1
nom = "nom_1"
prenom = "prenom_1"
age = 45
email = "adresse_mail_1"
add_user(prenom, nom, age, email)
add_preferences("comique", 1)
add_preferences("horreur", 1)
get_user(1)
the result is :
updated
{'first_name': 'prenom_1', 'last_name': 'nom_1', 'age': 45, 'email': 'adresse_mail_1', 'keyword_preferences': 'horreur'}
Finally, the solution was :
POST /books/user/1/_update
{
"script" : {
"inline": "ctx._source.keyword_preferences += params.preference",
"lang": "painless",
"params" : {
"preference" : ["comique"]
}
}
}
The new function is :
def add_preferences(preferences, i):
doc = es.get(index="books", doc_type="user", id=i)
res = es.update(index="books", doc_type="user", id=i, body={'doc': {'keyword_preferences': [{"preferences": preferences}]}})
The mapping is done and now I've got the result :
{'first_name': 'prenom_1', 'last_name': 'nom_1', 'age': 45, 'email': 'mail_1', 'keyword_preferences': [{'preferences': 'horreur'}]}
So, it has replaced the first preference "comique" by "horreur"
Edited. example answer for your question.
Index a doc
POST /books/user/1
{
"keyword_preferences": ["comique"]
}
Now, update a doc to append horreur in keyword_preferences key.
POST /books/user/1/_update
{
"script": "ctx._source.keyword_preferences += keyword_preferences",
"params": {
"keyword_preferences": ["horreur"]
},
"lang": "groovy"
}
This will update keyword_preferences as ["comique", "horreur"].
If update API throws as exception {"type":"script_exception","reason":"scripts of type [inline], operation [update] and lang [groovy] are disabled"}, then you need to config elasticsearch.yml. Add script.engine.groovy.inline.update: on script.groovy.sandbox.enabled: true in elasticsearch.yml and restart you elasticsearch. I hope this helps.