django except matching query does not exist - python-3.x

I'm trying to use try and except in django; everything work perfect if the user exist but if the user doesn't exist instead of returning NULL; my function keep returning:
Userprofile matching query does not exist.
I know the user is not exist in table; I just want to not return anything instead of showing error page.
from django import template
from album.models import Album
from django.shortcuts import get_object_or_404, render
register = template.Library()
#register.inclusion_tag('album/user_album.html')
def userAlbumFunction(id):
try:
albums = Album.objects.filter(user_id = id)
except Album.DoesNotExist:
albums = None
return {'albums' : albums}

try:
albums = Album.objects.get(user_id = id)
except Album.DoesNotExist:
albums = None
pass

Related

How to consume url parameters in Django views

Getting an error always
The current path, <code>api/det/1</code>, didn’t match any of these.
My urls.py
url(r'^api/det/<int:id>',views.DetailsAPI.as_view(),name='DetailsAPI')
My views.py
class DetailsAPI(APIView):
def get(self,id):
filter_list=Details.objects.all()
#filter_list = Details.objects.get(id=id)
envid = self.kwargs['id']
df = read_frame(filter_list)
df_det = df.loc[df['Id'] == int(id)]
df_final=df_det.to_json(orient='records')
return HttpResponse(df_final, content_type = 'application/json')
I'm sure there is some simple stuff that i'm missing and i can't get it to work with whatever syntax i try.. Any suggestions?
Changing the url to the below one worked.
url(r'^api/det/(?P<id>\d+)',views.DetailsAPI.as_view(),name='DetailsAPI')

Sync to Async Django ORM queryset foreign key property

Seemingly simple situation:
Django model has foreign key:
class Invite(models.Model):
inviter = models.ForeignKey(User, on_delete=models.CASCADE)
...
In async context, I do:
# get invite with sync_to_async decorator, then
print(invite.inviter)
Get async's favorite error:
You cannot call this from an async context - use a thread or sync_to_async
print(sync_to_async(invite.inviter)) # -> throws the same error
Sure, I can do:
#sync_to_async
def get_inviter(self, invite):
return invite.inviter
But, this is senile, if I have to do this for every queryset property call.
Is there a sane way to handle this?
Perhaps, there is a way to do this for all calls like that at once?
Yes, resolve the extra fields using select_related:
# Good: pick the foreign_key fields using select_related
user = await Invite.objects.select_related('user').aget(key=key).user
Your other string non-foreign such as strings and ints attributes should already
exist on the model.
Won't work, (although they feel like they should)
# Error django.core.exceptions.SynchronousOnlyOperation ... use sync_to_async
user = await Model.objects.aget(key=key).user
# Error (The field is actually missing from the `_state` fields cache.
user = await sync_to_async(Invite.objects.get)(key=key).user
Other examples for research
A standard aget, followed by a foreign key inspection yields a SynchronousOnlyOperation error.
I have a string key, and a ForeignKey user to the standard user model.
class Invite(models.Model):
user = fields.user_fk()
key = fields.str_uuid()
An example with alternatives that mostly don't work:
Invite = get_model('invites.Invite')
User = get_user_model()
def _get_invite(key):
return Invite.objects.get(key=key)
async def invite_get(self, key):
# (a) works, the related field is populated on response.
user = await Invite.objects.select_related('user').aget(key=key).user
async def intermediate_examples(self, key):
# works, but is clunky.
user_id = await Invite.objects.aget(key=key).user_id
# The `user_id` (any `_id` key) exists for a FK
user = await User.objects.aget(id=user_id)
async def failure_examples(self, key):
# (b) does not work.
user = await sync_to_async(Invite.objects.get)(key=key).user
invite = await sync_to_async(Invite.objects.get)(key=key)
# (c) these are not valid, although the error may say so.
user = await invite.user
user = await sync_to_async(invite.user)
# same as the example (b)
get_invite = sync_to_async(_get_invite, thread_sensitive=True)
invite = get_invite(key)
user = invite.user # Error
# (d) Does not populate the additional model
user = await Invite.objects.aget(key=key).user # Error
print(sync_to_async(invite.inviter)) # -> throws the same error
That's because it's equivalent to:
i = invite.inviter # -> throws the error here
af = sync_to_async(i)
print(af)
The correct usage is:
f = lambda: invite.inviter
af = sync_to_async(f)
i = await af()
print(i)
# As a one-liner
print(await sync_to_async(lambda: invite.inviter)())
Is there a sane way to handle this?
Perhaps, there is a way to do this for all calls like that at once?
(Disclaimer: Not tested in production.)
With nest_asyncio, you could do this:
def do(f):
import nest_asyncio
nest_asyncio.apply()
return asyncio.run(sync_to_async(f)())
print(do(lambda: invite.inviter))
Or take it even further:
class SynchronousOnlyAttributeHandler:
def __getattribute__(self, item):
from django.core.exceptions import SynchronousOnlyOperation
try:
return super().__getattribute__(item)
except SynchronousOnlyOperation:
from asgiref.sync import sync_to_async
import asyncio
import nest_asyncio
nest_asyncio.apply()
return asyncio.run(sync_to_async(lambda: self.__getattribute__(item))())
class Invite(models.Model, AsyncUnsafeAttributeHandler):
inviter = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
...
# Do this even in async context
print(invite.inviter)
Does something like this work? Instead of invite.inviter you do await async_resolve_attributes(invite, "inviter")
#sync_to_async
def async_resolve_attributes(instance, *attributes):
current_instance = instance
for attribute in attributes:
current_instance = getattr(current_instance, attribute)
resolved_attribute = current_instance
return resolved_attribute

No CustomerOrder matches the query

I am trying the generate PDF's for customers who successfully pays for an item but I keep getting this error.
No CustomerOrder matches the query
Below are my codes.
views.py
#staff_member_required
def admin_order_pdf(request, order_id):
order = get_object_or_404(CustomerOrder, id=order_id)
html = render_to_string('orders/pdf.html', {'order': order})
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = f'filename=order_{order_id}.pdf'
weasyprint.HTML(string=html).write_pdf(response, stylesheets=[weasyprint.CSS(settings.STATIC_ROOT + 'css/pdf.css')])
return response
urls.py
urlpatterns = [
path('', views.order_payout, name='order_payout'),
path('admin/order/<int:order_id>/pdf', views.admin_order_pdf, name='admin_order_pdf'),
path('confirm/', views.confirm_order, name='confirm_order'),
]
The get_object_or_404 method returns the message 'No object matches the given query' when it is unable to find any objects in the specified model using the given filter, which is id=order_id in this case.
Check this section from the documentation for more information on the function.

How do I access user in Django Async view?

I'm trying to access user but getting an error when the view is async.
Code:
from django.http import JsonResponse
async def archive(request):
user = request.user
return JsonResponse({'msg': 'success'})
error message:
django.myproject.exceptions.SynchronousOnlyOperation: You cannot call this from an async context - use a thread or sync_to_async.
What I tried:
from django.http import JsonResponse
from asgiref.sync import sync_to_async
async def archive(request):
# user = sync_to_async(request.user)
# user = sync_to_async(request.user)()
# user = await sync_to_async(request.user)
user = await sync_to_async(request.user)()
return JsonResponse({'msg': 'success'})
Still getting the same error.
I want to access the user to check he/she has permission to archive a file.
EDIT:
I eventually figured out that I had to move it into a temporary method and run that as sync_to_async. I did this below:
def _check_user(request):
user = request.user
''' Logic here '''
return
async def archive(request):
await sync_to_async(_check_user, thread_sensitive=True)(request=request)
''' Logic here '''
And this seems to work, But not sure if this is the correct way of doing it?
Try this:
from django.http import JsonResponse
from asgiref.sync import async_to_sync, sync_to_async
#sync_to_async
def archive(request):
user = request.user
return JsonResponse({'msg': 'success'})
I don't know if it's really async, I'm trying to fix this problem too.
I've found something: https://www.valentinog.com/blog/django-q/
If the first option not work, see this link.

Attempting login with Scrapy-Splash

Since i am not able to login to https://www.duif.nl/login, i tried many different methods like selenium, which i successfully logged in, but didnt manage to start crawling.
Now i tried my luck with scrapy-splash, but i cant login :(
If i render the loginpage with splash, i see following picture:
Well, there should be a loginform, like username and password, but scrapy cant see it?
Im sitting here like a week in front of that loginform and losing my will to live..
My last question didnt even get one answer, now i try it again.
here is the html code of the login-form:
When i login manual, i get redirected to "/login?returnUrl=", where i only have these form_data:
My Code
# -*- coding: utf-8 -*-
import scrapy
from scrapy_splash import SplashRequest
from scrapy.spiders import CrawlSpider, Rule
from ..items import ScrapysplashItem
from scrapy.http import FormRequest, Request
import csv
class DuifSplash(CrawlSpider):
name = "duifsplash"
allowed_domains = ['duif.nl']
login_page = 'https://www.duif.nl/login'
with open('duifonlylinks.csv', 'r') as f:
reader = csv.DictReader(f)
start_urls = [items['Link'] for items in reader]
def start_requests(self):
yield SplashRequest(
url=self.login_page,
callback=self.parse,
dont_filter=True
)
def parse(self, response):
return FormRequest.from_response(
response,
formdata={
'username' : 'not real',
'password' : 'login data',
}, callback=self.after_login)
def after_login(self, response):
accview = response.xpath('//div[#class="c-accountbox clearfix js-match-height"]/h3')
if accview:
print('success')
else:
print(':(')
for url in self.start_urls:
yield response.follow(url=url, callback=self.parse_page)
def parse_page(self, response):
productpage = response.xpath('//div[#class="product-details col-md-12"]')
if not productpage:
print('No productlink', response.url)
for a in productpage:
items = ScrapysplashItem()
items['SKU'] = response.xpath('//p[#class="desc"]/text()').get()
items['Title'] = response.xpath('//h1[#class="product-title"]/text()').get()
items['Link'] = response.url
items['Images'] = response.xpath('//div[#class="inner"]/img/#src').getall()
items['Stock'] = response.xpath('//div[#class="desc"]/ul/li/em/text()').getall()
items['Desc'] = response.xpath('//div[#class="item"]/p/text()').getall()
items['Title_small'] = response.xpath('//div[#class="left"]/p/text()').get()
items['Price'] = response.xpath('//div[#class="price"]/span/text()').get()
yield items
In my "prework", i crawled every internal link and saved it to a .csv-File, where i analyse which of the links are product links and which are not.
Now i wonder, if i open a link of my csv, it opens an authenticated session or not?
I cant find no cookies, this is also strange to me
UPDATE
I managed to login successfully :-) now i only need to know where the cookies are stored
Lua Script
LUA_SCRIPT = """
function main(splash, args)
splash:init_cookies(splash.args.cookies),
splash:go("https://www.duif.nl/login"),
splash:wait(0.5),
local title = splash.evaljs("document.title"),
return {
title=title,
cookies = splash:get_cookies(),
},
end
"""
I don't think using Splash here is the way to go, as even with a normal Request the form is there: response.xpath('//form[#id="login-form"]')
There are multiple forms available on the page, so you have to specify which form you want to base yourself on to make a FormRequest.from_response. Best specify the clickdata as well (so it goes to 'Login', not to 'forgot password'). In summary it would look something like this:
req = FormRequest.from_response(
response,
formid='login-form',
formdata={
'username' : 'not real',
'password' : 'login data'},
clickdata={'type': 'submit'}
)
If you don't use Splash, you don't have to worry about passing cookies - this is taken care of by Scrapy. Just make sure you don't put COOKIES_ENABLED=False in your settings.py

Resources