I am fairly new to FastAPI(migrating from Django) and I am trying to create a generic CRUD operations class that I can inherit and use across my CBV endpoints.
Something like this :
class AbstractCrud
model: Base = NotImplemented
session: Session = NotImplemented
def get_items(self, limit, **filters):
""" Read operation """
def get_item(self, pk: int):
def create_item(self, obj: BaseModel):
""" Create operation """
def update_item(self, pk: int, **params):
""" Update operation"""
def delete_item(self, pk: int):
""" Delete operation """
router = InferringRouter()
#cbv(router)
class UserAPI(AbstractCrud):
router.tags = ["User"]
router.prefix = "/users"
model = User
session: Session = Depends(get_db)
# my endpoints
#e.g. #router.get(...)
#cbv(router)
class PostAPI(AbstractCrud):
router.tags = ["Post"]
router.prefix = "/posts"
model = Post
session: Session = Depends(get_db)
# my endpoints
#e.g. #router.get(...)
I get the following error if I try to do the above:
fastapi.exceptions.FastAPIError: Invalid args for response field! Hint: check that <class 'sqlalchemy.orm.decl_api.Base'> is a valid pydantic field type
For now, I am able to achieve this as follows:
class AbstractCrud
model: Base = NotImplemented
session: Session = NotImplemented
def get_items(self, limit, **filters):
""" Read operation """
def get_item(self, pk: int):
def create_item(self, obj: BaseModel):
""" Create operation """
def update_item(self, pk: int, **params):
""" Update operation"""
def delete_item(self, pk: int):
""" Delete operation """
class UserCrud(AbstractCrud):
def __init__(self, session: Session):
self.session = session
self.model = User
class PostCrud(AbstractCrud):
def __init__(self, session: Session):
self.session = session
self.model = Post
router = InferringRouter()
#cbv(router)
class UserAPI:
router.tags = ["User"]
router.prefix = "/users"
def __init__(self, session=Depends(get_db)):
self.session = session
self.crud = UserCrud(self.session)
# my endpoints
#e.g. #router.get(...)
#cbv(router)
class PostAPI:
router.tags = ["Post"]
router.prefix = "/posts"
def __init__(self, session=Depends(get_db)):
self.session = session
self.crud = PostCrud(self.session)
# my endpoints
#e.g. #router.get(...)
Although this is working fine for me now, I can't help but think if there is a better(or correct) way to do this.
Also, Is my use of a single router variable across multiple classes correct?
Related
This is my 1st fastapi exercise. I've my old model serving code implemented with Flask as following:
class HealthCheck(Resource):
def __init__(self, **kwargs):
super(HealthCheck, self).__init__()
self._model = kwargs['model']
self._logger = kwargs['logger']
def get(self):
if self._model:
return {"status" : "healthy"}, HTTPStatus.OK
return {"status": "unavailable"}, HTTPStatus.BAD_REQUEST
def put(self):
raise MethodNotAllowed('PUT request not supported')
# similarly other methods are disabled
# In a different module, say in App class
class App():
def __init__(self, name, logger, config):
self._logger = logger
self._model = load_model(config['model_path'])
self._flask_app = Flask(name)
api = Api(self._flask_app)
# logger and model is passed to HealthCheck resource
api.add_resource(HealthCheck, "/api/healthcheck",
resource_class_kwargs={'model': self._model, 'logger': self._logger})
How do I achieve same with fastapi APIRouter?
My example fastapi implementation is following:
class HealthResult(BaseModel):
healthy: bool
health_router = fastapi.APIRouter()
#health_router.get("/healthcheck", response_model=HealthResult, name="heathcheck")
async def heartbeat() -> HealthResult:
hb = HealthResult(healthy=True)
return hb
# in the App module
class App():
def __init__(self, name, logger, config):
self._logger = logger
self._model = load_model(config['model_path'])
self._api = fastapi.FastAPI(title=name)
self._api.include_router(health_router, prefix="/api")
# how do I pass model and logger to health_router to use that in heartbeat method?
I want to avoid using any global storage for model and logger and access in health_router from there.
Also, since my fastapi object is inside App class, how can I invoke multiple worker with uvicorn in this case?
Though I don't like it, I modified my App class as following to get around the problem - still looking for cleaner solution.
class App(metaclass=Singleton):
_MODEL: Union[None, Model] = None
_LOGGER: Union[None, CustomLogger] = None
#classmethod
def setLogger(cls, logger: CustomLogger) -> None:
cls._LOGGER = logger
#classmethod
def getLogger(cls) -> CustomLogger:
return cls._LOGGER
#classmethod
def setModel(cls, model: Model) -> None:
assert model
cls._MODEL = model
#classmethod
def getModel(cls) -> Model:
return cls._MODEL
def __init__(self, name: str, logger: CustomLogger, config: YAML) -> None:
App.setLogger(logger)
model: Model = load_model(config['model_path'])
App.setModel(model)
self._api = fastapi.FastAPI(title=name)
self._api.include_router(health_router, prefix="/api")
....
class HealthResult(BaseModel):
healthy: bool
health_router = fastapi.APIRouter()
#health_router.get("/healthcheck", response_model=HealthResult, name="heathcheck")
async def heartbeat() -> HealthResult:
model: Model = App.getModel()
hb: HealthResult = HealthResult(healthy=True) if model else HealthResult(healthy=False)
return hb
from fastapi import Request
app = FastAPI()
app.share_text = 'aaa'
# in Apirouter
#router.get("/share}", request: Request)
async def read( request: Request):
#retrieve from app context
share_text = request.app.share_text
return {}
You could retrive the app context from Request
Ref: https://fastapi.tiangolo.com/advanced/using-request-directly/#use-the-request-object-directly
Ref: https://www.starlette.io/requests/
Application The originating Starlette application can be accessed via
request.app.
Other state If you want to store additional information on the request
you can do so using request.state.
For example:
request.state.time_started = time.time()
I have a database connection class that creates a connection pool. Now as the application grows and I'm adding different types of database writers, I want to move database connections to a separate class and inherit from it. So far I have this:
class ServiceDB:
#classmethod
async def init(cls, settings):
self = ServiceDB()
self.pool = await asyncpg.create_pool(
database=settings["POSTGRES_DB"],
user=settings["POSTGRES_USER"],
password=settings["POSTGRES_PASSWORD"],
host=settings["DB_HOST"],
port=settings["DB_PORT"],
)
return self
class ChildWriter(ServiceDB):
async def write_db(self, query):
# Write to specific table
pass
if __name__ == "__main__":
settings = {'info': "some connection settings"}
query = "SELECT * FROM 'table'"
connection = await ChildWriter().init(settings)
await connection.write_db(msg, query)
When I run this I get AttributeError: 'ServiceDB' object has no attribute 'write_db'. How do I properly extend ServiceDB with the write_db method?
Classmethods receive the "current class" as the first argument. Instantiate this cls, not the fixed baseclass.
class ServiceDB:
#classmethod
async def init(cls, settings):
self = cls() # cls is the *current* class, not just ServiceDB
self.pool = await asyncpg.create_pool(
database=settings["POSTGRES_DB"],
user=settings["POSTGRES_USER"],
password=settings["POSTGRES_PASSWORD"],
host=settings["DB_HOST"],
port=settings["DB_PORT"],
)
return self
Note that ideally, all attributes are set via __init__ instead of a separate classmethod constructor. The separate constructor should just pass on any attributes constructed externally.
class ServiceDB:
def __init__(self, pool):
self.pool = pool
#classmethod
async def init(cls, settings, **kwargs):
pool = await asyncpg.create_pool(
database=settings["POSTGRES_DB"],
user=settings["POSTGRES_USER"],
password=settings["POSTGRES_PASSWORD"],
host=settings["DB_HOST"],
port=settings["DB_PORT"],
)
return cls(pool=pool, **kwargs)
class ChildWriter(ServiceDB):
async def write_db(self, query): ...
if __name__ == "__main__":
settings = {'info': "some connection settings"}
query = "SELECT * FROM 'table'"
# call classmethod on class V
connection = await ChildWriter.init(settings)
await connection.write_db(msg, query)
I wanted to organize a connection pool when initializing the class with the method below
import asyncio
import asyncpg
class DBCommands:
def __init__(self, uri: str) -> None:
loop = asyncio.get_event_loop()
self.pool: asyncpg.pool.Pool = loop.run_until_complete(asyncpg.create_pool(dsn=uri))
async def get_id_admins(self) -> list:
async with self.pool.acquire():
result = await self.pool.fetch("SELECT chat_id FROM users WHERE role_user = 'admin'")
admins_id = [row[0] for row in result]
return admins_id
Since the pool should be one, with the above implementation, this will not work. I decided to use singleton, but I don’t understand how to implement this. Below is the version that I came up with. Tell me how best to solve this problem. In addition, I do not understand how best and where to close connections. I'm new to using patterns and just starting to study OOP.
import asyncio
import asyncpg
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class DBManager(metaclass=Singleton):
#classmethod
def connect(cls, uri):
loop = asyncio.get_event_loop()
return loop.run_until_complete(asyncpg.create_pool(dsn=uri))
class DBCommands:
def __init__(self, uri) -> None:
self.uri = uri
self.pool = DBManager.connect(uri)
async def get_id_admins(self) -> list:
async with self.pool.acquire():
result = await self.pool.fetch("SELECT chat_id FROM users WHERE role_user = 'admin'")
admins_id = [row[0] for row in result]
return admins_id
I have an assumption that opening and closing a pool can be added to __aenter__ and __aexit__
You can use a class attribute and create the pool the first time it's needed in an async function:
class Database:
self.pool = None
...
async def get_id_admins(self)
if self.pool is None:
self.pool = await asyncpg.create_pool(dsn=...`).
I generally use a regular class and create a single instance attached to global object (like the aiohttp application for web applications) as in:
class Database:
def __init__(self, dsn):
self.dsn = dsn
self.pool = None
async def connect(self):
"""Initialize asyncpg Pool"""
self.pool = await asyncpg.create_pool(dsn=self.dsn, min_size=2, max_size=4)
logging.info("successfully initialized database pool")
async def get_id_admins(self):
...
And use it like:
async def startup(app):
await app.database.connect()
async def shutdown(app):
await app.database.pool.close()
def main():
app = web.Application()
app.database = Database(app.config.DSN)
app.on_startup.append(startup)
app.on_shutdown.append(shutdown)
I'm currently working on a big code base and i need to send emails from any potential module, that conduct in Circular Dependencies issues in python
so i tried to use apps.get_model() from django.apps but when serializers are declared the models are not ready.
So i'm trying to create a factory function who build the class at runtime instead of launch time
from rest_framework.serializers import ModelSerializer
def make_serializer(model: str, fields: tuple, options = None, **nested_fields) -> ModelSerializer:
"""Generate a new serializer "On the fly", so the model does not have to be imported at launch time.
"""
model_object = apps.get_model(model)
input_fields = fields
if options is None:
options = {}
class Serializer(ModelSerializer):
class Meta:
model = model_object
fields = input_fields
def create(self, validated_data):
# we won't permit to create data from thoses serializers.
raise NotImplementedError
# configure nested serializers.
for nested_field in nested_fields.values():
for key, nested_serializer_class in nested_field.items():
serializer_instance = nested_serializer_class(**options.get(key, {}))
print(model, key, serializer_instance)
setattr(Serializer, key, serializer_instance)
return Serializer
my tests models looks like
class Band(Model):
name = Charfield(max_length=255)
class Influencer(Model):
entity = Charfield(max_length=255)
class Submission(Model):
influencer = ForeignKey(Influencer, ...)
class Campaign(Model):
band = ForeignKey('band.Band', ...)
submissions = ManyToMany(Submission)
and my testing function is:
def test():
serializer = make_serializer(
model='submission.Campaign',
fields=['pk', 'submissions', 'band'],
options={'submissions': {'many': True}},
nested_fields={
'submissions': make_serializer(
model='submission.Submission',
fields=('influencer',),
nested_fields={
'influencer': make_serializer('influencer.Influencer', ('entity',))
},
),
'band': make_serializer('band.Band', ('name',))
}
)
return serializer
instead of having my fields correly with test()(Campaign.objects.last()).data i only got "pks" and my serialiser looks like:
Serializer():
pk = IntegerField(label='ID', read_only=True)
submissions = PrimaryKeyRelatedField(many=True, queryset=Submission.objects.all())
band = PrimaryKeyRelatedField(allow_null=True, queryset=Band.objects.all(), required=False)
i except and output like:
{
"pk": 1,
"band": {
"name": "BobMarley",
},
"submissions": [
{
"influencer": {"entity": "The influencer's name"}
}
]
}
but i got a ReturnDict containing:
{
"pk": 1,
"band": 523,
"submissions": [6, 7, 8]
}
thanks for your time
well after many headcaches i've found out that i CAN'T setattr on a class after it's declaration, so i use a trick based on a dict
def make_serializer(model: str, fields: tuple, options = None, **nested_fields) -> ModelSerializer:
"""Generate a new serializer "On the fly", so the model does not have to be imported at launch time.
"""
name = f'Serializer_{model}'
model_object = apps.get_model(model)
input_fields = fields
if options is None:
options = {}
def create(self, validated_data):
# we won't permit to create data from thoses serializers.
raise NotImplementedError
class Meta:
model = model_object
fields = input_fields
attrs = {"Meta": Meta}
# configure nested serializers.
for key, nested_serializer_class in nested_fields.items():
attrs[key] = nested_serializer_class(**options.get(key, {}))
attrs['create'] = create
return type(ModelDictSerializer)(name, (ModelDictSerializer,), attrs)
the syntax is something like:
campaign_serializer = make_serializer(
model='submission.Campaign',
fields=['pk', 'submissions', 'band'],
options={'submissions': {'many': True}},
submissions=make_serializer(
model='submission.Submission',
fields=('influencer',),
influencer=make_serializer('influencer.Influencer', ('entity',))
),
band=make_serializer('band.Band', ('name',))
)
and it's working like a charm:
Serializer_submission.Campaign(<Campaign: Campaign object (9665)>):
pk = IntegerField(label='ID', read_only=True)
submissions = Serializer_submission.Submission(many=True):
influencer = Serializer_influencer.Influencer():
entity = CharField(allow_blank=True, max_length=255, required=False)
band = Serializer_band.Band():
name = CharField(max_length=255)
i hope this will help someone else
I have started a project using Django. Where I used add multiple sub-topics under one main topic by taking staticid. When I am giving same staticid to multiple sub-topics, I am getting the error below (get() returned more than one Sub_Topic -- it returned 3!).
Model:
class Sub_Topic(models.Model):
IMPORTANCE_SCORE = (
('LOW','Low'),
('NORMAL', 'Normal'),
('HIGH','High'),
)
staticid = models.ForeignKey(SID,on_delete=models.CASCADE, blank=True, default=None, null=True)
sub_topic = models.CharField(max_length=250)
Num_Of_Sub_subTopics = models.PositiveIntegerField(default=0)
Num_Of_Questions = models.PositiveIntegerField(default=0)
importance = models.CharField(max_length=6, choices= IMPORTANCE_SCORE, default='LOW')
complexity = models.PositiveIntegerField(default=0)
prerequisite = models.CharField(max_length=250)
def __str__(self):
return self.sub_topic
View:
class Sub_TopicDetailView(generics.RetrieveUpdateDestroyAPIView):
"""
GET sub_topic/:id/
PUT sub_topic/:id/
DELETE sub_topic/:id/
"""
queryset = Sub_Topic.objects.all()
serializer_class = Sub_TopicSerializer
def get(self, request, *args, **kwargs):
try:
a_sub_topic = self.queryset.get(staticid=kwargs["staticid"])
return Response(Sub_TopicSerializer(a_sub_topic).data)
except Sub_Topic.DoesNotExist:
return Response(
data={
"message": "Sub_Topic with id: {} does not exist".format(kwargs["staticid"])
},
status=status.HTTP_404_NOT_FOUND
)
#validate_request_data
def put(self, request, *args, **kwargs):
try:
a_sub_topic = self.queryset.get(staticid=kwargs["staticid"])
serializer = Sub_TopicSerializer()
updated_sub_topic = serializer.update(a_sub_topic, request.data)
return Response(Sub_TopicSerializer(updated_sub_topic).data)
except Sub_Topic.DoesNotExist:
return Response(
data={
"message": "Sub_Topic with id: {} does not exist".format(kwargs["staticid"])
},
status=status.HTTP_404_NOT_FOUND
)
Error:
get() returned more than one Sub_Topic -- it returned 3!
How do I overcome this?
If you have a main topic (say, "donuts"), and many subtopics within that ("plain donuts", "chocolate donuts", "vanilla donuts", ...), you cannot reference a subtopic by just saying "donuts", you have to be more specific.
Your sub-topic views should accept a sub-topic ID, not the main topic ID. Try changing this:
a_sub_topic = self.queryset.get(staticid=kwargs["staticid"])
# 'staticid' is the foreign key of the main topic: it is
# the same for many sub-topics!
to this:
a_sub_topic = self.queryset.get(id=kwargs["id"])
# 'id' is the primary key field generated automatically by Django:
# it's unique for every sub-topic
If instead you want to display all sub-topics for a given topic, then you should use filter() instead of get():
sub_topics = self.queryset.filter(staticid=kwargs["staticid"])