I am creating a API endpoints using flask-sqlalchemy and marshmallow in python. For example I have two collections/tables one is items and other is stores. Items will have attributes like item_id, item_name, item_price and available stores_list. Store will have attributes like store_id, store_name, store_location and available items_list. I require the following JSON response when i request list of items.
[
{
item_id:1,
item_name:"Laptop",
item_price:"20",
store_list:[
{
store_id:1,
store_name:"ABC",
store_location:"USA"
},
{
store_id:2,
store_name:"BBC",
store_location:"USA"
},
{
store_id:3,
store_name:"CBC",
store_location:"USA"
}
]
},
{
item_id:2,
item_name:"Laptop",
item_price:"20",
store_list:[
{
store_id:1,
store_name:"ABC",
store_location:"USA"
},
{
store_id:2,
store_name:"BBC",
store_location:"USA"
},
{
store_id:3,
store_name:"CBC",
store_location:"USA"
}
]
}
......... and so on
]
I require the following JSON response when i request list of stores.
[
{
store_id:1,
store_name:"ABC",
store_location:"USA",
items_list:[
{
items_id:1,
items_name:"Laptop",
items_price:"65"
},
{
items_id:2,
items_name:"Keyboard",
items_price:"56"
},
{
items_id:3,
items_name:"Mouse",
items_price:"56"
}
]
},
{
store_id:2,
store_name:"BBC",
store_location:"UK",
items_list:[
{
items_id:1,
items_name:"Laptop",
items_price:"23"
},
{
items_id:2,
items_name:"BBC",
items_price:"Speaker"
},
{
items_id:3,
items_name:"Mouse",
items_price:"24"
}
]
}
......... and so on
]
So far I have tried the following
#ITEMS MODEL
from requests import Response
from flask import request, url_for
from datetime import datetime
from typing import List
from db import db
from models.store import Stores
#Bartiny Ingredients Generic Types Model
class Items(db.Model):
__tablename__ = "items"
item_id = db.Column(db.Integer, primary_key=True)
item_name = db.Column(db.String(100), nullable=False,)
item_price = db.Column(db.String(10), nullable=False,)
store_lsit = db.relationship('Stores', backref=db.backref('items'))
#classmethod
def find_by_name(cls, name: str) -> "Items":
return cls.query.filter_by(gen_type_name=name).first()
#classmethod
def find_by_id(cls, _id: int) -> "Items":
return cls.query.filter_by(id=_id).first()
#classmethod
def find_all(cls) -> List["Items"]:
return cls.query.all()
def save_to_db(self) -> None:
db.session.add(self)
db.session.commit()
def delete_from_db(self) -> None:
db.session.delete(self)
db.session.commit()
STORE MODEL
from requests import Response
from flask import request, url_for
from datetime import datetime
from typing import List
from db import db
from models.items import Items
#Bartiny Ingredients Generic Types Model
class Stores(db.Model):
__tablename__ = "stores"
store_id = db.Column(db.Integer, primary_key=True)
store_name = db.Column(db.String(100), nullable=False,)
store_locations = db.Column(db.String(10), nullable=False,)
items_list = db.relationship('Items', backref=db.backref('stores'))
#classmethod
def find_by_name(cls, name: str) -> "Stores":
return cls.query.filter_by(gen_type_name=name).first()
#classmethod
def find_by_id(cls, _id: int) -> "Stores":
return cls.query.filter_by(id=_id).first()
#classmethod
def find_all(cls) -> List["Stores"]:
return cls.query.all()
def save_to_db(self) -> None:
db.session.add(self)
db.session.commit()
def delete_from_db(self) -> None:
db.session.delete(self)
db.session.commit()
SCHEMAS
# Items Schema
from ma import ma
from marshmallow import pre_dump
from models.item import Items
class ItemsSchema(ma.ModelSchema):
class Meta:
model = Items
# Store Schema
from ma import ma
from marshmallow import pre_dump
from models.store import Stores
class StoresSchema(ma.ModelSchema):
class Meta:
model = Stores
Resources
# Store Resource
from flask_restful import Resource
from models.store import Stores
from schemas.store import StoresSchema
store_list_schema = StoreSchema(many=True)
class StoreList(Resource):
#classmethod
def get(cls):
return {"stores": store_list_schema.dump(Stores.find_all())}, 200
# Items Resource
from flask_restful import Resource
from models.item import Items
from schemas.item import ItemsSchema
item_list_schema = ItemsSchema(many=True)
class StoreList(Resource):
#classmethod
def get(cls):
return {"items": item_list_schema.dump(Items.find_all())}, 200
The following is the code for app starting
from flask import Flask, jsonify
from flask_restful import Api
from marshmallow import ValidationError
from db import db
from ma import ma
from resources.item import Item, ItemList
from resources.store import Store, StoreList
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///data.db"
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
api = Api(app)
#app.before_first_request
def create_tables():
db.create_all()
#app.errorhandler(ValidationError)
def handle_marshmallow_validation(err):
return jsonify(err.messages), 400
jwt = JWTManager(app)
api.add_resource(StoreList, "/stores")
api.add_resource(ItemList, "/items")
if __name__ == "__main__":
db.init_app(app)
ma.init_app(app)
app.run(port=5000, debug=True)
Looks like jsonify is your friend...
https://www.google.com/search?q=flask+sqlalchemy+to+json
Related
Views.py
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
from rest_framework import viewsets
import requests
import gdown
from pydub import AudioSegment
import speech_recognition as sr
from .serializers import *
from .models import *
import time
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from urllib.request import urlopen
class VoiceViewSet(viewsets.ModelViewSet):
queryset = Voiceapi.objects.all()
serializer_class = VoiceSerializer
datas = Voiceapi.objects.values()
print(datas)
def post(self,request):
vc_api = Voiceapi.objects.all()
serializer = VoiceSerializer(vc_api,many=True)
for i in datas:
try:
print("Audio File-->",i['name'])
audio_url = i['name']
audio_id = i['id']
output = '/home/venpep/voicetotext/messages/media/sample2.ogg'
gdown.download(audio_url, output, quiet=False)
src = "/home/venpep/voicetotext/messages/media/sample2.ogg"
# time.sleep(15)
dst = "/home/venpep/voicetotext/messages/media/test.wav"
sound = AudioSegment.from_ogg(src)
sound.export(dst, format="wav")
# time.sleep(15)
def VoiceRecognizer(audio,audio_id):
r = sr.Recognizer()
with sr.AudioFile(audio) as source:
audio_text = r.listen(source)
try:
text = r.recognize_google(audio_text)
print(text)
except:
print('Audio Not Clear')
audio = "/home/venpep/voicetotext/messages/media/test.wav"
VoiceRecognizer(audio,audio_id)
except:
print("Not audio file")
Models.py
from django.db import models
class Voiceapi(models.Model):
id = models.IntegerField(primary_key=True)
name = models.CharField(max_length=200)
voice_text = models.CharField(max_length=200,default="voice_data")
Serializer.py
from .models import *
class VoiceSerializer(serializers.HyperlinkedModelSerializer):
# specify model and fields
class Meta:
model = Voiceapi
fields = ('id', 'name', 'voice_text')
When I post my data the "text" from views.py should insert into voice_text field in models.py database where I have given a default value. Is there any solution I can insert the text data inside voice_text field when post request is done.
Right Now my output is :
{
"id": 8,
"name": "https://storage.googleapis.com/wtleu/assets/1166882/inbox/919944470015/5505314129517179.ogg",
"voice_text": "voice_data"
}
Required output:
{
"id": 8,
"name": "https://storage.googleapis.com/wtleu/assets/1166882/inbox/919944470015/5505314129517179.ogg",
"voice_text": "This is the text of the voice message"
}
I've tried everything but I cannot figure out why my images for an Avatar won't save to the media folder in Django.
I am happy the front end is passing form data to the AvatarAPIView and I get the following when I print out the data being passed to the view.
<QueryDict: {'myFile': [<InMemoryUploadedFile: 1965.jpg (image/jpeg)>]}>
[07/Feb/2021 10:48:54] "PUT /api/profile/avatar/ HTTP/1.1" 200 31
view.py
from profiles.api.serializers import (UserDisplaySerializer,
SubscriptionSerializer,
AvatarSerializer)
from profiles.models import CustomUser
from rest_framework import status, viewsets
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.generics import UpdateAPIView, GenericAPIView
from rest_framework import mixins
from rest_framework.parsers import MultiPartParser, FileUploadParser, FormParser
from django.http import HttpResponse
class CurrentUserAPIView(APIView):
def get(self, request):
serializer = UserDisplaySerializer(request.user)
return Response(serializer.data)
def patch(self, request):
serializer = UserDisplaySerializer(request.user, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UserUpdateAPIView(UpdateAPIView):
queryset = CustomUser.objects.all()
serializer_class = UserDisplaySerializer
class AvatarAPIView(APIView):
parser_classes = (MultiPartParser, FormParser)
def get(self, request):
serializer = AvatarSerializer(request.user)
return Response(serializer.data)
def put(self, request, format=None):
serializer = AvatarSerializer(
request.user, data=request.data)
print(request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
serializer.py
from rest_framework import serializers
from profiles.models import CustomUser, Subscription
class SubscriptionSerializer(serializers.ModelSerializer):
class Meta:
model = Subscription
exclude = ('id', 'user', )
class UserDisplaySerializer(serializers.ModelSerializer):
subscription = SubscriptionSerializer(read_only=True, many=False)
class Meta:
model = CustomUser
exclude = ('password',)
class AvatarSerializer(serializers.ModelSerializer):
class Meta:
model = CustomUser
fields = ('avatar',)
urls.py
from django.urls import path
from django.conf.urls import include
from profiles.api.views import (CurrentUserAPIView,
UserUpdateAPIView,
AvatarAPIView,
)
from rest_framework import routers
urlpatterns = [
path("user/", CurrentUserAPIView.as_view(), name="current-user"),
path("user/update/<int:pk>", UserUpdateAPIView.as_view()),
path("avatar/", AvatarAPIView.as_view(), name='user-avatar'),
]
models.py
from django.db import models
from django.contrib.auth.models import AbstractUser
class CustomUser(AbstractUser):
bio = models.CharField(max_length=240, blank=True)
city = models.CharField(max_length=30, blank=True)
avatar = models.ImageField(null=True, blank=True)
I can upload files from the Django admin no problems. It's just when running through the serializer I have issues. Particularly because the data passes is_valid() and a code 200 is issued.
I'm half a day deep and I've got nothing.
not sure if your user is logged in or not;
as your formdata shows:
<QueryDict: {'myFile': [<InMemoryUploadedFile: 1965.jpg (image/jpeg)>]}> [07/Feb/2021 10:48:54] "PUT /api/profile/avatar/ HTTP/1.1" 200 31
In your form data, the "input name" of image is "myFile" while the name of imagefield in your Model and Serializer is "avatar".
I am trying to read data from a kafka topiv which has been serialized using google's protobuf.
I compiled the proto files using protoc which generated pb2 files.
Now i am trying to use faust and create a stream processor but i can't find the correct way to use the pb2 files as key_serializer and value_serializer.
Here is what i have tried:
import faust
from proto.topic_pb2 import topic
app = faust.App(
'faust-consumer',
broker='kafka://',
store="memory://",
cache="memory://",
)
schema = faust.Schema(
## key_type=topic.PK,
## value_type=topic,
key_serializer=topic.PK,
value_serializer=topic,
)
topic = app.topic(
'topic',
schema=schema
)
#app.agent(topic)
async def consume(topic):
async for event in topic:
print(event)
if __name__ == "__main__":
app.main()
Does anybody have any idea how to used the pb2 in the serializers?
Man, I was trying to do the same the past week. After struggling I finally got something working - not the best way - but it works well enough.
So initially I used this python compiler: https://github.com/danielgtaylor/python-betterproto to generate the *.py files with dataclasses / type hinting.
Then, I was able to create Faust.Record classes dynamically by using a helper:
import abc
import inspect
from typing import Type
import betterproto
import faust
GENERATED_SUFFIX = "__FaustRecord_Auto"
def _import_relative_class(module: str, klass_name: str):
resolved_import = __import__(module, fromlist=[klass_name])
klass = getattr(resolved_import, klass_name)
return klass
def _is_record(attype: Type):
return (
inspect.isclass(attype)
and isinstance(attype, betterproto.Message)
or isinstance(attype, abc.ABCMeta)
)
def _build_record_annotations(klass: Type):
annotations = {}
for atname, attype in klass.__annotations__.items():
if _is_record(attype):
annotations[atname] = make_faust_record(attype)
elif isinstance(attype, str):
subklass = _import_relative_class(klass.__module__, attype)
annotations[atname] = make_faust_record(subklass)
else:
annotations[atname] = attype
return annotations
def make_faust_record(klass: Type):
type_name = f"{klass.__name__}{GENERATED_SUFFIX}"
record_type = type(type_name, (faust.Record, klass), {})
record_type.__annotations__ = _build_record_annotations(klass)
record_type._init_subclass()
return record_type
Now you can use it like:
import faust
from proto.your_models import YourModel # Import your generated proto here
from faust_converter import make_faust_record
app = faust.App(
'faust-consumer',
broker='kafka://',
store="memory://",
cache="memory://",
)
model_record = make_faust_record(YourModel)
topic = app.topic(
'topic',
value_type=model_record
)
#app.agent(topic)
async def consume(topic):
async for event in topic:
print(event)
if __name__ == "__main__":
app.main()
I was also experimenting with using Protobuf with Faust.
Mentioned below is the solution using Faust Serialiser Codecs.
faust-protobuf https://github.com/hemantkashniyal/faust-protobuf
proto_serializer.py
from faust.serializers import codecs
from typing import Any
from google.protobuf import json_format
from google.protobuf.json_format import MessageToJson
from google.protobuf.json_format import MessageToDict
from google.protobuf import text_format
from google.protobuf.text_format import MessageToString
from google.protobuf.text_format import MessageToBytes
class ProtobufSerializer(codecs.Codec):
def __init__(self, pb_type: Any):
self.pb_type = pb_type
super(self.__class__, self).__init__()
def _dumps(self, pb: Any) -> bytes:
return pb.SerializeToString()
def _loads(self, s: bytes) -> Any:
pb = self.pb_type()
pb.ParseFromString(s)
return pb
app.py
import faust
from google.protobuf.json_format import MessageToJson
from .proto.greetings_pb2 import Greeting
from .proto_serializer import ProtobufSerializer
app = faust.App(
'faust-consumer',
broker='kafka://', # TODO: update kafka endpoint
store="memory://",
cache="memory://",
)
greetings_schema = faust.Schema(
key_serializer=ProtobufSerializer(pb_type=Greeting),
value_serializer=ProtobufSerializer(pb_type=Greeting),
)
topic = app.topic(
'greetings',
schema=greetings_schema
)
#app.agent(topic)
async def consume(topic):
async for event in topic:
print(MessageToJson(event))
#app.timer(5)
async def produce():
for i in range(10):
data = Greeting(hello="world", message=i)
await consume.send(value=data)
if __name__ == "__main__":
app.main()
I was able to do it by creating a Serializer class as so:
import faust
from abc import ABCMeta, abstractmethod
from google.protobuf.json_format import MessageToDict
from faust.serializers.codecs import Codec
from importlib import import_module
def get_proto(topic_name, only_pk=False):
if not hasattr(get_proto, "topics"):
setattr(get_proto, "topics", dict())
get_proto.topics[topic_name] = import_module(
"protodef.{}_pb2".format(topic_name)
).__getattribute__(topic_name.split(".")[-1])
if only_pk:
return getattr(get_proto, "topics").get(topic_name).PK
else:
return getattr(get_proto, "topics").get(topic_name)
class ProtoSerializer(Codec, metaclass=ABCMeta):
#abstractmethod
def only_key(self):
...
def as_proto(self, topic_name):
self._proto = get_proto(topic_name, self.only_key())
return self
def _loads(self, b):
data = MessageToDict(
self._proto.FromString(b),
preserving_proto_field_name=True,
including_default_value_fields=True,
)
# remove the key object from the unserialized message
data.pop("key", None)
return data
def _dumps(self, o):
# for deletes
if not o:
return None
obj = self._proto()
# add the key object to them message before serializing
if hasattr(obj, "PK"):
for k in obj.PK.DESCRIPTOR.fields_by_name.keys():
if k not in o:
raise Exception(
"Invalid object `{}` for proto `{}`".format(o, self._proto)
)
setattr(obj.key, k, o[k])
for k, v in o.items():
if hasattr(obj, k):
setattr(obj, k, v)
else:
ghost.debug(
"Invalid value-attribute `%s` for proto `%s`", k, self._proto
)
return obj.SerializeToString()
class ProtoValue(ProtoSerializer):
def only_key(self):
return False
class ProtoKey(ProtoSerializer):
def only_key(self):
return True
and then use it as follows:
import faust
from utils.serializer import ProtoKey, ProtoValue
app = faust.App(
'faust-consumer',
broker='kafka://',
store="memory://",
cache="memory://",
)
topic = app.topic(
'topic',
key_serializer=ProtoKey().as_proto('topic'),
value_serializer=ProtoValue().as_proto('topic')
)
#app.agent(topic)
async def consume(topic):
async for event in topic:
print(event)
if __name__ == "__main__":
app.main()
I am trying to do the following:
#patch('uuid.uuid4', autospec=True)
def test_generate_adid(self, patched_uuid, app_api):
patched_uuid.return_value = "9e51ab81-6d65-4b81-af3b-8f7f49d69ba7"
adid = app_api.generate_adid()
assert adid == "9e51ab81-6d65-4b81-af3b-8f7f49d69ba7"
Where app_api is a fixture of the class under test.
However, in my app_api class, uuid4() is not getting patched and keeps returning a uuid other than the one I am trying to force. Here is what the generate_adid() instance method looks like:
from uuid import uuid4
def generate_adid(self):
adid = str(uuid4())
return adid
The failing unit test error:
AssertionError: assert '90b29e86-e3b0-40aa-8971-f868f90cb009' == '9e51ab81-6d65-4b81-af3b-8f7f49d69ba7'
I have consulted this post: How to mock uuid generation in a test case? but still am having no luck.
What am I doing wrong? Thanks to all of those who reply in advance.
EDIT: Here is the full code:
from requests import Session
from random import uniform
from hashlib import md5
from hmac import new
from uuid import uuid4
from json import dumps
class AppApi:
def __init__(self, account):
self.account = account
self.session = Session()
def generate_adid(self):
adid = str(uuid4())
return adid
Test Case:
from src import AppApi
from pytest import fixture
from unittest.mock import patch
from json import loads
ACCOUNT = {
"email": "user#email.com",
"username": "user",
"password": "s3cr3t"
}
#fixture
def app_api():
app_api = AppApi(ACCOUNT)
yield app_api
class TestAppApi:
#patch('uuid.uuid4')
def test_generate_adid(self, patched_uuid, app_api):
patched_uuid.return_value = "9e51ab81-6d65-4b81-af3b-8f7f49d69ba7"
adid = app_api.generate_adid()
assert adid == "9e51ab81-6d65-4b81-af3b-8f7f49d69ba7"
In your example you're patching the uuid4() function in the uuid module rather than the function uuid4() in the module which you're trying to test. Take a look at Python unnit.test docs where to patch
Using your example above you need to patch the uuid4() imported into the src module. You need to use #patch("src.uuid4")
from src import AppApi
from pytest import fixture
from unittest.mock import patch
from json import loads
ACCOUNT = {
"email": "user#email.com",
"username": "user",
"password": "s3cr3t"
}
#fixture
def app_api():
app_api = AppApi(ACCOUNT)
yield app_api
class TestAppApi:
#patch('src.uuid4')
def test_generate_adid(self, patched_uuid, app_api):
patched_uuid.return_value = "9e51ab81-6d65-4b81-af3b-8f7f49d69ba7"
adid = app_api.generate_adid()
assert adid == "9e51ab81-6d65-4b81-af3b-8f7f49d69ba7"
Hope this helps!
I am having trouble creating a customizable swagger schema in the Django Rest Framework. I have read pages of documentation, but have not found a clear cut example on how to generate swagger annotations in python.
I am aware that swagger/schema documentation is readily generated when using ViewSets in Django. However, I am solely using APIViews and want to write a customized schema. I have tried creating a CoreAPI schema but am unaware of how to implement it. I am enclosing some of my sample code and some screenshots as well. The screen shots go from what I have to what I want.
Sample code:
urls.py
from django.conf.urls import url, include
from rest_framework.urlpatterns import format_suffix_patterns
from Views import SampleView as sv
from rest_framework_swagger.views import get_swagger_view
from rest_framework.documentation import include_docs_urls
from rest_framework.renderers import CoreJSONRenderer
from rest_framework.schemas import get_schema_view
schema_view enter code here= get_swagger_view(
title='Sample API')
urlpatterns = [
url(r'^sample/$', pv.SampleList.as_view()),
url(r'^sample/(?P<id>[a-f\d]{24})/$', sv.SampleDetail.as_view()),
url('^schema/$', schema_view),
]
urlpatterns = format_suffix_patterns(urlpatterns)
views.py
from rest_framework.views import APIView
from Manager.SampleManager import SampleManager as sm
_sampleManager = sm()
class SampleList(APIView):
"""
get:
Return a list of all the existing samples.
post:
Create a new sample.
"""
def get(self, request, format=None):
return _sampleManager.getAll()
def post(self, request, format=None):
return _sampleManager.create( request)
class SampleDetail(APIView):
"""
get:
Get a sample.
put:
Update a sample.
delete:
Delete a sample.
"""
def get(self, request, id, format =None):
return _sampleManager.getById( id)
def put(self, request, id, format =None):
return _sampleManager.update( request, id)
def delete(self, request, id, format =None):
return _sampleManager.deleteById( id)
Serializers.py
from rest_framework_mongoengine.serializers import DocumentSerializer
from .modles import Sample, SampleInner
from Serializers.SampleInnerSerializer import SampleInnerSerializer
class SampleSerializer(DocumentSerializer):
other = SampleInnerSerializer(many=True)
class Meta:
model = Sample
fields = '__all__'
def create(self, validated_data):
samples = validated_data.pop('other')
created_instance = super(SampleSerializer, self).create(validated_data)
for sample_data in samples:
created_instance.other.append(SampleInner(**sample_data))
created_instance.save()
return created_instance
def update(self, instance, validated_data):
samples = validated_data.pop('other')
updated_instance = super(SampleSerializer, self).update(instance, validated_data)
for sample_data in samples:
updated_instance.other.append(SampleInner(**sample_data))
updated_instance.save()
return updated_instance
Schema.py
import coreapi
from rest_framework.decorators import api_view, renderer_classes
from rest_framework import renderers, response
schema = coreapi.Document(
title='Sample API',
content={
'sample': coreapi.Link(
url='/sample/',
action='post',
fields=[
coreapi.Field(
name='from',
required=True,
location='query',
description='City name or airport code.'
),
coreapi.Field(
name='to',
required=True,
location='query',
description='City name or airport code.'
),
coreapi.Field(
name='date',
required=True,
location='query',
description='Flight date in "YYYY-MM-DD" format.'
)
],
description='Create partner'
)
}
)
#api_view()
#renderer_classes([renderers.CoreJSONRenderer])
def schema_view(request):
return response.Response(schema)
There is two solution for you in this senario, One "go with the GenricApiView" Two "Create Custom row Schema"
let's go with
-- >solution one.
urls.py
schema_view = get_swagger_view(title='Test All API')
urlpatterns = [
path('swagger2/', schema_view),
]
view.py
class LoginAPIView(generics.GenericAPIView):
serializer_class = LoginSerializer
permission_classes = [permissions.AllowAny]
def post(self, request):
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
return Response(serializer.data, status=status.HTTP_200_OK)
Result
--> Solution two:
urls.py
configuration is same as before
views.py
class BlacklistTokenAdding(APIView):
permission_classes = [permissions.AllowAny]
schema = ManualSchema(fields=[
coreapi.Field(
"first_field",
required=True,
location="body",
schema=coreschema.String()
),
coreapi.Field(
"second_field",
required=True,
location="body",
schema=coreschema.String()
),
])
def post(self, request, format='json'):
try:
refresh_token = request.data["refresh_token"]
token = RefreshToken(refresh_token)
token.blacklist()
return Response(status=status.HTTP_200_OK)
except Exception as e:
return Response(status=status.HTTP_400_BAD_REQUEST)
//Note the first_field & second_field is to demonstration you can add here as much field as you want.
Result