FastAPI Query with dataclass not working with alias - python-3.x

the following FastAPI code is producing unexpected behaviour to me:
import uvicorn
from fastapi import FastAPI, Depends, Query
from typing import Optional
from pydantic.dataclasses import dataclass
app = FastAPI()
#dataclass
class Catz:
qqq: Optional[str] = Query(None, alias="q")
#app.get("/productz/")
def search_products(query: Catz = Depends(Catz) ):
products = [{"name": "Computer"}, {"name": "HDD"}]
if not query.qqq:
query.qqq = ""
return {"query": query, "results": [product for product in products if query.qqq in product["name"]]}
#dataclass
class Cats:
qqq: Optional[str] = Query(None )
#app.get("/products/")
def search_products(query: Cats = Depends(Cats) ):
products = [{"name": "Computer"}, {"name": "HDD"}]
if not query.qqq:
query.qqq = ""
return {"query": query, "results": [product for product in products if query.qqq in product["name"]]}
if __name__ == "__main__":
uvicorn.run("main:app", port=11978, log_level="info", reload=True)
when I use the service via curl, I get the following outputs:
the expected behaviour with the endpoint /products/ that has no aliases
>> curl -X 'GET' 'http://localhost:11978/products/?qqq=H' -H 'accept: application/json' -H 'api-version: 1.0' ; echo
{"query":{"qqq":"H"},"results":[{"name":"HDD"}]}
not the expected behaviour with the endpoint /productz/ that has no aliases (regardless of me using a query parameter with its own name or with the alias I have in the code)
>> curl -X 'GET' 'http://localhost:11978/productz/?qqq=H' -H 'accept: application/json' -H 'api-version: 1.0' ; echo
{"query":{"qqq":""},"results":[{"name":"Computer"},{"name":"HDD"}]}
>> curl -X 'GET' 'http://localhost:11978/productz/?q=H' -H 'accept: application/json' -H 'api-version: 1.0' ; echo
{"query":{"qqq":""},"results":[{"name":"Computer"},{"name":"HDD"}]}
any idea why that would be?

Do not import dataclass from pydantic.dataclasses - it should be imported from Python's own built-in dataclasses module:
from fastapi import FastAPI, Depends, Query
from fastapi.exceptions import RequestValidationError
from dataclasses import dataclass
from typing import Optional
app = FastAPI()
#dataclass
class Catz:
qqq: Optional[str] = Query(None, alias="q")
#app.get("/productz/")
def search_products(query: Catz = Depends()):
products = [{"name": "Computer"}, {"name": "HDD"}]
if not query.qqq:
query.qqq = ""
return {"query": query, "results": [product for product in products if query]}
Outputs for /productz?q=123:
{"query":{"qqq":"123"},"results":[{"name":"Computer"},{"name":"HDD"}]}

Related

Why the error occured when i tried to send a POST request with QWebEngineHttpRequest (PyQt5)?

I try to get JSON Object from source, but getting only errors.
Now I'm trying to understand what I'm doing wrong.
This code works well with other resources.
Maybe It`s not problem with code, but with web-source.
import sys
import json
from PyQt5.QtCore import QByteArray, QUrl
from PyQt5.QtWidgets import QApplication
from PyQt5.QtWebEngineCore import QWebEngineHttpRequest
from PyQt5.QtWebEngineWidgets import QWebEnginePage, QWebEngineSettings
from pprint import pprint
def on_load_finished():
"""Handle response"""
engine.toPlainText(handle_to_html) # get json
def handle_to_html(html):
"""Handle html"""
print(html)
QApplication.quit()
if __name__ == "__main__":
app = QApplication(sys.argv)
engine = QWebEnginePage()
url = "https://sportsapi.betway.com/api/Events/V2/GetEvents"
request = QWebEngineHttpRequest(url=QUrl(url), method=QWebEngineHttpRequest.Post)
request.setHeader(QByteArray(b"Content-Type"), QByteArray(b"application/json"))
payload = {
"LanguageId": 1,
"ClientTypeId": 2,
"BrandId": 3,
"JurisdictionId": 1,
"ClientIntegratorId": 1,
"ExternalIds": [9032262, 9038528, 9037778],
"MarketCName": "win-draw-win",
"ScoreboardRequest": {"ScoreboardType": 3, "IncidentRequest": {}},
"BrowserId": 3,
"OsId": 3,
"ApplicationVersion": "",
"BrowserVersion": "97.0.4692.99",
"OsVersion": "NT 10.0",
"SessionId": "null",
"TerritoryId": 227,
"CorrelationId": "06779075-21e2-4ba8-8e91-d71a981621fe",
"VisitId": "d1088cdf-13a8-42d0-be90-b34fd1332c36",
"ViewName": "sports",
"JourneyId": "833a4c0c-3354-499f-9d52-949df6d159f9",
}
request.setPostData(bytes(json.dumps(payload), "utf-8"))
engine.load(request)
engine.loadFinished.connect(on_load_finished)
app.exec_()
Errors are looks like
This XML file does not appear to have any style information associated with it. The document tree is shown below.
<Error>
<Message>An error has occurred.</Message>
</Error>
Maybe It`s not problem with code, but with web-source.

Adding python logging to FastApi endpoints, hosted on docker doesn't display API Endpoints logs

I have a fastapi app on which I want to add python logging. I followed the basic tutorial and added this, however this doesn't add API but just gunicorn logging.
So I have a local server hosted using docker build so running server using docker-compose up and testing my endpoints using api client (Insomnia, similar to postman).
Below is the code where no log file is created and hence no log statements added.
My project str is as follows:
project/
src/
api/
models/
users.py
routers/
users.py
main.py
logging.conf
"""
main.py Main is the starting point for the app.
"""
import logging
import logging.config
from fastapi import FastAPI
from msgpack_asgi import MessagePackMiddleware
import uvicorn
from api.routers import users
logger = logging.getLogger(__name__)
app = FastAPI(debug=True)
app.include_router(users.router)
#app.get("/check")
async def check():
"""Simple health check endpoint."""
logger.info("logging from the root logger")
return {"success": True}
Also, I am using gunicorn.conf that looks like this:
[program:gunicorn]
command=poetry run gunicorn -c /etc/gunicorn/gunicorn.conf.py foodgame_api.main:app
directory=/var/www/
autostart=true
autorestart=true
redirect_stderr=true
And gunicorn.conf.py as
import multiprocessing
bind = "unix:/tmp/gunicorn.sock"
workers = multiprocessing.cpu_count() * 2 + 1
worker_class = "uvicorn.workers.UvicornWorker"
loglevel = "debug"
errorlog = "-"
capture_output = True
chdir = "/var/www"
reload = True
reload_engine = "auto"
accesslog = "-"
access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
This is my output terminal for the above API endpoint on docker:
Could anyone please guide me here? I am new to FastApi so some help will be appreciated.
Inspired by #JPG's answer, but using a pydantic model looked cleaner.
You might want to expose more variables. This config worked good for me.
from pydantic import BaseModel
class LogConfig(BaseModel):
"""Logging configuration to be set for the server"""
LOGGER_NAME: str = "mycoolapp"
LOG_FORMAT: str = "%(levelprefix)s | %(asctime)s | %(message)s"
LOG_LEVEL: str = "DEBUG"
# Logging config
version = 1
disable_existing_loggers = False
formatters = {
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": LOG_FORMAT,
"datefmt": "%Y-%m-%d %H:%M:%S",
},
}
handlers = {
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stderr",
},
}
loggers = {
LOGGER_NAME: {"handlers": ["default"], "level": LOG_LEVEL},
}
Then import it into your main.py file as:
from logging.config import dictConfig
import logging
from .config import LogConfig
dictConfig(LogConfig().dict())
logger = logging.getLogger("mycoolapp")
logger.info("Dummy Info")
logger.error("Dummy Error")
logger.debug("Dummy Debug")
logger.warning("Dummy Warning")
Which gives:
I would use dict log config
create a logger config as below,
# my_log_conf.py
log_config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": "%(levelprefix)s %(asctime)s %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
},
"handlers": {
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stderr",
},
},
"loggers": {
"foo-logger": {"handlers": ["default"], "level": "DEBUG"},
},
}
Then, load the config using dictConfig function as,
from logging.config import dictConfig
from fastapi import FastAPI
from some.where.my_log_conf import log_config
dictConfig(log_config)
app = FastAPI(debug=True)
Note: It is recommended to call the dictConfig(...) function before the FastAPI initialization.
After the initialization, you can use logger named foo-logger anywhere in your code as,
import logging
logger = logging.getLogger('foo-logger')
logger.debug('This is test')

Implementing Typescript interfaces in Python

I'm looking for some advice on what's the best way of implementing a set of data value only 'interfaces' in Python that are equivalent to their typescript counterpart (we've got a project where we use both, and want to enforce a consistent interface for their communication, which would be via serialising the python into json to pull into the TS component)
The interfaces will be compositions to keep things modular and simple.
Given a set of TS interfaces defined as:
interface TestOutput {
phantom: string
testDateTime: datetime
author: string
result: boolean
report_summaryFile?: string // the '?' means this field is optional
// ... more values
series: Array<Series>
soloImages: Array<Images>
}
interface Series {
number: number
filter: string
kernel: string
// ... more values
images: Array<TestImage>
}
I was thinking of using dataclasses and doing the following:
from dataclasses import dataclass
from typing import List
import datetime
#dataclass
class TestSeries:
seriesNum: int
modality: str
description: str = ''
#dataclass
class TestOutput:
phantom: str
testDateTime: datetime.datetime
author: str
result: bool
series: List[TestSeries]
soloImages: List[Images]
report_summaryFile: str = ''
Is dataclasses the best approach for this?
pydantic is a good library.
I did something similar, but only for dataclasses - ValidatedDC:
from dataclasses import dataclass
from typing import List
from validated_dc import ValidatedDC
import json
#dataclass
class Series(ValidatedDC):
series_num: int
modality: str
description: str = ''
#dataclass
class Output(ValidatedDC):
phantom: str
date_time: str
author: str
result: bool
series: List[Series]
report_summary_file: str = ''
# For example, by API we got a JSON string:
input_json_string = '''
{
"phantom": "test_phantom",
"date_time": "2020.01.01",
"author": "Peter",
"result": true,
"series": [{
"series_num": 1,
"modality": "test_modality"
}]
}
'''
# Load the string into the dictionary:
input_data = json.loads(input_json_string)
# Then create a dataclass to check the types of values and for the
# convenience of further work with data:
output = Output(**input_data)
# Since valid data were obtained, there are no errors
assert output.get_errors() is None
# Let's say we got data with an error:
input_data['series'][0]['series_num'] = '1' # The string is not an integer!
output = Output(**input_data)
assert output.get_errors()
print(output.get_errors())
# {
# 'series': [
# InstanceValidationError(
# value_repr="{'series_num': '1', 'modal...}",
# value_type=<class 'dict'>,
# annotation=<class '__main__.Series'>, exception=None,
# errors={
# 'series_num': [
# BasicValidationError(
# value_repr='1', value_type=<class 'str'>,
# annotation=<class 'int'>, exception=None
# )
# ]
# }
# ),
# ListValidationError(
# item_index=0, item_repr="{'series_num': '1', 'modal...}",
# item_type=<class 'dict'>, annotation=<class '__main__.Series'>
# )
# ]
# }
See here for more details:
https://github.com/EvgeniyBurdin/validated_dc

Convert a key-value representing string to a list

I have a string say something like
'Content-Type: application/json' \
'Postman-Token: a47537e5-b4b0-4915-93c8-92acf4b21e70' \
'cache-control: no-cache' \
I want it in a format like
['Content-Type' : 'application/json','Postman-Token' : 'a47537e5-b4b0-4915-93c8-92acf4b21e70','cache-control' : 'no-cache']
Here is a code for you:
def tokens = 'Content-Type: application/json Postman-Token: a47537e5-b4b0-4915-93c8-92acf4b21e70 cache-control: no-cache'.minus(":").replaceAll(":", "") .split(" ")
def map2 = [ : ]
def i = 0
0.step(tokens.length, 2) {
map2.put(tokens[i], tokens[i+1])
i = i + 2
}
println JsonOutput.toJson(map2)
The result:
{"Content-Type":"application/json","Postman-Token":"a47537e5-b4b0-4915-93c8-92acf4b21e70","cache-control":"no-cache"}

How to grammatically enable the issue tracker of a given repository I own through the GitHub API?

I have bunch of repository forks, and I would like to enable all their issue trackers. I am not sure why, GitHub comes with them disabled by default and I had forgot to enable them when forking.
Now would be too much work enable their issues tracker one by one, then, I though I could write a program to do this. For now, I managef to get a list of all repositories I own, with the following code:
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import os
import shlex
import json
import subprocess
current_directory = os.path.dirname( os.path.realpath(__file__) )
print( 'directory walk %s', current_directory )
token = "Authorization: token mynicetoken102312312541230240021470300250230"
user_name = "myusername"
def run_command(absolute_path, command_name):
command = shlex.split( command_name )
print( 'command: %s' % command )
command_line_interface = subprocess.Popen( command, stdout=subprocess.PIPE, cwd=absolute_path )
output = command_line_interface.communicate()[0]
print( "\n%s" % output.decode('utf-8') )
return output
def main():
result = run_command( current_directory, "curl -H '%s' https://api.github.com/users/%s/repos" % ( token, user_name ) )
result_json = json.loads( result.decode('utf-8') )
for repository_data in result_json:
repository_full_name = repository_data['full_name']
print( "Processing{:s}".format( repository_full_name ) )
# Now, what do?
run_command( current_directory, "curl -H '%s' https://api.github.com/%s/misterX" % ( token, repository_full_name ) )
if __name__ == "__main__": main()
I think the only thing missing is the complete the last line:
# Now, what do?
run_command( current_directory, "curl -H '%s' https://api.github.com/%s/misterX" % ( token, repository_full_name ) )
After finding How do I rename a GitHub repository via their API? I manage to build the following code:
# Now, what do?
full_command = \
r"""
curl
-H "Authorization: Token %s"
-H "Content-Type: application/json"
-H "Accept: application/json"
-X PATCH
--data '{ "has_issues": true }'
https://api.github.com/repos/:%s
""" % ( token, repository_full_name )
print( 'full_command: %s' % full_command )
run_command( current_directory, full_command )
But GitHub says:
{
"message": "Not Found",
"documentation_url": "https://developer.github.com/v3/repos/#edit"
}
Their API page does not help much: https://developer.github.com/v3/repos/#edit
References:
How to retrieve the list of all github repositories of a person?
https://github.com/settings/tokens GitHub token with full repository access
The answer I used on How do I rename a GitHub repository via their API? was wrong. It was using https://api.github.com/repos/:owner/repo, but it should be https://api.github.com/repos/owner/repo. After fixing that, GitHub kept saying:
{
"message": "Validation Failed",
"errors": [
{
"resource": "Repository",
"code": "custom",
"field": "name",
"message": "name is too short (minimum is 1 character)"
}
],
"documentation_url": "https://developer.github.com/v3/repos/#edit"
}
Then, I added "name": "repository_name" to the json, and it worked. This is this new code:
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import os
import shlex
import json
import subprocess
import shutil
"""
Iterates through all repositories from a user and enable the issue tracker.
"""
# GitHub token with full repository access
# https://github.com/settings/tokens
token = "8217398127859182039802175098213389019766"
user_name = "username"
current_directory = os.path.dirname( os.path.realpath(__file__) )
print( 'directory walk %s' % current_directory )
# The maximum count of repositories to to process when calling this batch script.
maximum_process_limit = 1000
def run_command(absolute_path, command_name):
command = shlex.split( command_name )
print( 'command: %s' % command )
command_line_interface = subprocess.Popen(
command, stdout=subprocess.PIPE, cwd=absolute_path )
output = command_line_interface.communicate()[0]
# print( "%s" % output )
# print( "\n%s" % output.decode('utf-8') )
return output
def main():
page_index = 1
while process_repositories_page( page_index ):
page_index += 1
def process_repositories_page(page_index):
global maximum_process_limit
items_per_page = 100
repositories_text = run_command( current_directory,
"curl -H '%s' https://api.github.com/users/%s/repos?per_page=%s&page=%s" % (
token, user_name, items_per_page, page_index ) )
repositories_json = json.loads( repositories_text.decode('utf-8') )
for repository_data in repositories_json:
print( "Processing repository: %s" % repository_data['full_name'] )
if maximum_process_limit <= 0: return
maximum_process_limit -= 1
full_command = \
r"""
curl
-H "Authorization: Token {token}"
-H "Content-Type: application/json"
-H "Accept: application/json"
-X PATCH
--data '{data}'
https://api.github.com/repos/{full_name}
""".format(
token=token,
data=json.dumps(
{
"name": repository_data['name'],
"has_issues": True
}
),
full_name=repository_data['full_name']
)
print( 'full_command: %s' % full_command )
result = run_command( current_directory, full_command )
print( 'result: %s' % result.decode('utf-8') )
return len( repositories_json ) == items_per_page
if __name__ == "__main__":
main()
New references:
Programmatically enable Github Pages for a repository
Escape double quotes for JSON in Python
Github API v3 doesn't show all user repositories

Resources