Download a database under Excel format - excel

I am creating an application in which there is a button to download the database. I am working in the back with FastAPI and MongoDB.
filtered_db = db.collection.find(base_query)
docs = []
async for doc in docs:
docs.append(Document(**doc).dict())
df = pd.DataFrame(docs)
return Response(content=df, media_type="text/csv")
Here is a sample of my code, filtering database that is in MongoDB, then using a model and transforming it into a dataframe. But this is not working, could you help me?
I have the error: "AttributeError: 'DataFrame' object has no attribute 'encode'"

First, if you are trying to append the retrieved filtered_db data to a docs list. You're instead reading from an empty list docs
It should be
filtered_db = db.collection.find(base_query)
docs = []
async for doc in filtered_db:
If you are trying to export the data to a .csv file then you can use the DataFrame.to_csv function
return df.to_csv('output.csv')
If you want your API to show the .csv file in the Response, you can do it like this
return Response(content=df.to_csv(), media_type="text/csv")
By the way, here is a complete FastAPI example
from fastapi import FastAPI, Response
from pydantic import BaseModel
import pandas as pd
app = FastAPI()
class Document(BaseModel):
id: int
column1: str
column2: str
filtered_db = [
{"id": "1", "column1": "c1-value1", "column2": "c2-value1"},
{"id": "2", "column1": "c1-value2", "column2": "c2-value2"}
]
async def read_data(data: list):
docs = []
for doc in data:
docs.append(Document(**doc).dict())
df = pd.DataFrame(data=docs)
return df
#app.get("/xlsx")
async def get_excel():
df = await read_data(filtered_db)
# export data to csv
# df.to_csv('output.csv')
return Response(content=df.to_csv(), media_type="text/csv")

Related

How to upload multiple images and multiple data fields in django rest framework

Read:How to upload multiple files in django rest framework
read:django rest framework for multiple image upload
But still unable to follow as proper description is not given
hi,
i am creating a rest api and new to django rest framework. I need help with few points mentioned below:
How to create Foreign key correctly.I have two tables one where images will be uploaded and one where data will be saved, i want id of these two tables should be foreign key so that when i select data from table 1 i can make query and get images related to id of table 1 from images table
I have created serializers but they say when i run my code to check errors using serializers.error
{'image': ['Incorrect type. Expected pk value, received bytes.'], 'img_id':
['Invalid pk "1" - object does not exist.']}
I am writing a post api to i need to save data and images first
How can i achieve this
my code
models
from django.db import models
# Create your models here.
class ImageModels(models.Model):
media = models.ImageField(upload_to="images")
class categoryRegistration(models.Model):
name = models.CharField(max_length=100)
image=models.ManyToManyField(ImageModels,related_name="file_content",blank=True,null=True)
description = models.TextField()
price = models.IntegerField()
img_id = models.ForeignKey(ImageModels,on_delete=models.CASCADE)
serializer
from rest_framework import serializers
from apps.categorymanagement.models import categoryRegistration
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = categoryRegistration
fields = ("id",'name', 'image', 'description', 'price',"img_id")
extra_kwargs = {
"image":{
"required":False,
},
}
urls:
from django.conf.urls import url
from apps.categorymanagement import views
urlpatterns = [
url( r'^object/categories$',views.category_list ,name="categoryList"),
url( r'^categories/data/(?P<pk>[0-9]+)$',views.category_detail,name="categoryDetails" ),
url( r'^categories/released$',views.category_released,name="categoryReleased" )
]
views:
from django.http.response import JsonResponse
from rest_framework.parsers import JSONParser
from rest_framework.response import Response
from rest_framework import status
from apps.categorymanagement.models import ImageModels, categoryRegistration
from apps.categorymanagement.serializers import CategorySerializer
from rest_framework.decorators import api_view
# all the views written here are fuctions based
#api_view(['GET','POST','DELETE'])
def category_list(request):
# get list of all categories
if request.method=="POST":
files = request.FILES.getlist("file_content")
form_data = {}
form_data["name"] = request.data["name"]
form_data["description"] = request.data["description"]
form_data["price"] = request.data["price"]
form_data["img_id"] = request.data["img_id"]
for images in files:
form_data["image"] = images
serializer = CategorySerializer(data=form_data)
print(serializer.is_valid())
print(serializer.errors)
return Response('ok')

Modern apis with FastAPI - Redis Caching

I'm trying to implement the redis caching in my API for the first time it is a simple fastapi application which is using openweather api to query some weather information and my intention is to cache each json response in the redis server. I'hve made it to work with single key city but with this approach validating query parameters not working with the error handling I put in the place.
caching.py
import sys
from datetime import timedelta
import json
import redis
from services.openweather_service import get_report
def redis_connect() -> redis.client.Redis:
try:
client = redis.Redis(
host="localhost",
port=6379,
db=0,
)
ping = client.ping()
if ping is True:
return client
except redis.ConnectionError:
print("Connection Error!")
sys.exit(1)
client = redis_connect()
def get_routes_from_cache(key: str) -> str:
"""Data from redis."""
val = client.get(key)
return val
def set_routes_to_cache(key: str, value: str) -> bool:
"""Data to redis."""
state = client.setex(
key,
timedelta(hours=24),
value=value,
)
return state
async def route_optima(city: str, state: str, country: str, units=None) -> dict:
location = {"city": city, "state": state, "country": country, "units": units}
# First it looks for the data in redis cache
data = get_routes_from_cache(key=json.dumps(location))
# print(data)
# print(type(data))
# If cache is found then serves the data from cache
if data is not None:
data = data.decode("UTF-8")
data_dict = json.loads(data)
print(data_dict)
print(type(data_dict))
data["cache"] = True
return data
else:
# If cache is not found then sends request to the OpenWeather API
data = await get_report(city, state, country, units)
# This block sets saves the respose to redis and serves it directly
data["cache"] = False
data = json.dumps(data)
state = set_routes_to_cache(key=json.dumps(location), value=json.dumps(data))
if state is True:
return json.dumps(data)
return data
Then I took a different approach by making the query params location = {"city": city, "state": state, "country": country, "units": units} as a key and the json response as a value but when app try to get the response from the cache here it got weird like right after passing the query params dict into the json.dumps it gives me the bytes object and so I decode it with decode("utf-8") but instead of coming back as a dict it gives me <class 'str'> type object. There I am lost... can anyone help me out here?
weather_api.py
from typing import Optional
import fastapi
from fastapi import Depends
from models.location import Location
from infrastructure.caching import route_optima
from models.validation_error import ValidationError
router = fastapi.APIRouter()
#router.get("/api/weather/{city}")
async def weather(loc: Location = Depends(), units: Optional[str] = "metric"):
return await route_optima(loc.city, loc.state, loc.country, units)
And if I am using the wrong approach here then please point to the best approach to take here.
As you are using Fastapi, it is better to use aioredis to leverage the async functionality.
It turns out I was returning the wrong data it should be like this
# If cache is found then serves the data from cache
if data is not None:
data = data.decode("UTF-8")
data_dict = json.loads(data)
print(data_dict)
print(type(data_dict))
data_dict["cache"] = True
return data_dict
and everything works fine.

fastapi how to read nested json as dictionary?

I am trying to receive the following JSON:
{
"va": "{1: 5, 2:1, 3:5}"
}
in my main.py I have the following:
from typing import Optional, Dict
from fastapi import FastAPI
from pydantic import BaseModel
class rq(BaseModel):
va: Dict[str, str]
app = FastAPI(debug=True)
#app.post("/hello")
async def create_item(rq: rq):
return 1
but I get
"msg": "value is not a valid dict",
"type": "type_error.dict"
how may I receive va as dict to iterate over it?
When you create a model, every field is actually a key-value pair, so with your example it expects something like this:
{
"va": {"some":"value"}
}
But what you send is
"va": str
So i don't know how you send the value but you are definitely sending a str instead of a Dict[str, str]

Getting Error While Inserting JSON data to DynamoDB using Python

HI,
I am trying to put the json data into AWS dynamodb table using AWS Lambda , however i am getting an error like below. My json file is uploaded to S3 bucket.
Parameter validation failed:
Invalid type for parameter Item, value:
{
"IPList": [
"10.1.0.36",
"10.1.0.27"
],
"TimeStamp": "2020-04-22 11:43:13",
"IPCount": 2,
"LoadBalancerName": "internal-ALB-1447121364.us-west-2.elb.amazonaws.com"
}
, type: <class 'str'>, valid types: <class 'dict'>: ParamValidationError
Below i my python script:-
import boto3
import json
s3_client = boto3.client('s3')
dynamodb = boto3.resource('dynamodb')
def lambda_handler(event, context):
bucket = event['Records'][0]['s3']['bucket']['name']
json_file_name = event['Records'][0]['s3']['object']['key']
json_object = s3_client.get_object(Bucket=bucket,Key=json_file_name)
jsonFileReader = json_object['Body'].read()
jsonDict = json.loads(jsonFileReader)
table = dynamodb.Table('test')
table.put_item(Item=jsonDict)
return 'Hello'
Below is my json content
"{\"IPList\": [\"10.1.0.36\", \"10.1.0.27\"], \"TimeStamp\": \"2020-04-22 11:43:13\",
\"IPCount\": 2, \"LoadBalancerName\": \"internal-ALB-1447121364.us-west-2.elb.amazonaws.com\"}"
Can someone help me, how can insert data to dynamodb.
json.loads(jsonFileReader) returns string, but table.put_item() expects dict. Use json.load() instead.

How can I get data from a mongodb collection using pymongo in django using get method?

I have one mongodb database and I have connected that db with pymongo in django. I am new to django, I am trying to get if the entered data present in the collection or not, if present return that record using get method
import pymongo
from pymongo import MongoClient
db_name = 'student_db'
client = MongoClient('localhost', 27017)
db_obj = client[db_name]
collection=db_obj['mongo_app_student']
#api_view(['GET'])
def test(request):
data = request.data
for x in collection.find():
if data in x:
print('entered a right value')
return Response(data)
TypeError at /test
unhashable type: 'dict'
I am getting this error when i am trying to get the output in postman. please help
First you Should use a POST request for that and since find() return a cursor, you're trying to iterate on a cursor. I'm not sure that's a good idea. And assuming request.data is a dict() try using == for comparison with x
Also Try casting what you get from mongo in a list like this :
import pymongo
from pymongo import MongoClient
db_name = 'student_db'
client = MongoClient('localhost', 27017)
db_obj = client[db_name]
collection=db_obj['mongo_app_student']
#api_view(['GET', 'POST'])
def test(request):
response_data = None
if request.method == 'POST':
for x in list(collection.find()):
if data == x:
print('entered a right value')
response_data = data
return Response(response_data )
Let me know how it goes.

Resources