I am trying to understand aiohttp a little better. Can someone check why my code is not printing the response of the request, instead it just prints the coroutine.
import asyncio
import aiohttp
import requests
async def get_event_1(session):
url = "https://stackoverflow.com/"
headers = {
'content-Type': 'application/json'
}
response = await session.request('GET', url)
return response.json()
async def get_event_2(session):
url = "https://google.com"
headers = {
'content-Type': 'application/json'
}
response = await session.request('GET', url)
return response.json()
async def main():
async with aiohttp.ClientSession() as session:
return await asyncio.gather(
get_event_1(session),
get_event_2(session)
)
loop = asyncio.get_event_loop()
x = loop.run_until_complete(main())
loop.close()
print(x)
Output:
$ python async.py
[<coroutine object ClientResponse.json at 0x10567ae60>, <coroutine object ClientResponse.json at 0x10567aef0>]
sys:1: RuntimeWarning: coroutine 'ClientResponse.json' was never awaited
How do i print the responses instead?
The error message you received is informing you that a coroutine was never awaited.
You can see from the aiohttp documentation that response.json() is a also a coroutine and therefore must be awaited. https://docs.aiohttp.org/en/stable/client_quickstart.html#json-response-content
return await response.json()
Related
Hii guys I am using asyncio with nest_asyncio but I am always getting coroutine never awaited
import asyncio
import tracemalloc
import aiohttp
import nest_asyncio
import json
tracemalloc.start()
async def request_call(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
assert response.status == 200
data = await response.read()
data = json.loads(data.decode("utf-8"))
return data
def get_json(url):
loop = asyncio.get_event_loop()
nest_asyncio.apply(loop)
result = loop.run_until_complete(request_call(url))
return result
async def get2():
url = "https://reqres.in/api/users?page=2"
return get_json(url)
async def get1():
return get2()
async def snap():
return get1()
def data():
result = asyncio.run(snap())
print(result)
data()
Output :
<coroutine object get1 at 0x0471AD28> async.py:37: RuntimeWarning:
coroutine 'get1' was never awaited Data() Object allocated at (most
recent call last): File "async.py", lineno 31
return get1()
I cannot understand what is the problem and what is the fix?
Python=3.8.6 aiohttp=3.7.3 nest-asyncio=1.4.3
Just need to add await on certain function call
import asyncio
import tracemalloc
import aiohttp
import nest_asyncio
import json
tracemalloc.start()
async def request_call(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
assert response.status == 200
data = await response.read()
data = json.loads(data.decode("utf-8"))
return data
def get_json(url):
loop = asyncio.get_event_loop()
nest_asyncio.apply(loop)
result = loop.run_until_complete(request_call(url))
return result
async def get2():
url = "https://reqres.in/api/users?page=2"
return get_json(url)
async def get1():
return await get2()
async def snap():
return await get1()
def data():
result = asyncio.run(snap())
print("AAAA")
print(result)
data()
I'm trying to handle an asynchronous HTTP request. I call the async_provider() function from another module and with the resulting response.text() I perform subsequent tasks.
It only works if all requests are successful. But I can't handle any exceptions for failed requests (whatever the reason for the exception). Thank you for your help.
Here is the relevant part of the code:
import asyncio
import aiohttp
# i call this function from another module
def async_provider():
list_a, list_b = asyncio.run(main())
return list_a, list_b
async def fetch(session, url):
# session.post request cases
if url == "http://...1":
referer = "http://...referer"
user_agent = (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) "
"AppleWebKit/605.1.15 (KHTML, like Gecko) "
"Version/12.1 Safari/605.1.15"
)
payload = {'key1': 'value1', 'key2': 'value2'}
async with session.post(
url, data=payload, headers={"Referer": referer, "User-Agent": user_agent}
) as response:
if response.status != 200:
response.raise_for_status()
return await response.text()
# session.get request cases
else:
async with session.get(url) as response:
if response.status != 200:
response.raise_for_status()
return await response.text()
async def fetch_all(session, urls):
results = await asyncio.gather(
*[asyncio.create_task(fetch(session, url)) for url in urls]
)
return results
async def main():
urls = ["http://...1", "http://...2", "http://...3"]
async with aiohttp.ClientSession() as session:
response_text_1, response_text_2, response_text_3 = await fetch_all(
session, urls
)
# some task with response text
Any exception breaks all requests
Check "return_exceptions" flag on gather.
results = await asyncio.gather(
*[asyncio.create_task(fetch(session, url)) for url in urls],
return_exceptions=True
)
It will return you list of finished tasks. You can then use their Task.result() or
Task.exception() methods to reraise or check if there was exception.
I try to receive data from two endpoints in same time. But if websocket stop to send messages I won't receive data from request from "https://www.blabla.com". What is the best way for solving this problem?
import asyncio
import aiohttp
URL = 'wss://www.some_web_socket.io'
async def get_some_data(session):
url = "https://www.blabla.com"
async with session.get(url) as response:
data = await response.text()
return data
async def ws_handler(url):
async with aiohttp.ClientSession() as session:
async with session.ws_connect(url) as ws:
msg = await ws.receive()
while True:
some_data_from_get_request = await get_some_data(session)
msg_from_websocket = await ws.receive()
if msg.type == aiohttp.WSMsgType.TEXT:
print(stream_data)
print(some_data_from_get_request)
def _main():
asyncio.run(ws_handler(URL))
if __name__ == "__main__":
_main()
This code serializes the return values of HTTP and websocket communication:
while True:
some_data_from_get_request = await get_some_data(session)
msg_from_websocket = await ws.receive()
To be able to detect either of the two coroutines returning, you can use asyncio.wait(..., return_when=asyncio.FIRST_COMPLETED):
http_fut = asyncio.ensure_future(get_some_data(session))
ws_fut = asyncio.ensure_future(ws.receive())
pending = {http_fut, ws_fut}
while pending:
_done, pending = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
if http_fut.done():
some_data_from_get_request = http_fut.result()
...
if ws_fut.done():
msg_from_websocket = ws_fut.result()
...
import random
import asyncio
import json
import aiohttp
import sys
import urllib
from lxml.html.soupparser import parse
from aiohttp import ClientSession
from threading import Thread
def ttest():
async def fetch(url, session):
headers = {
'Host': 'example.com'
}
cookies2 = {
'test': 'test'
}
data = '{"test":"test"}'
async with session.post(url, data=data, headers=headers, cookies=cookies2) as response:
return await response.read()
async def bound_fetch(sem, url, session):
async with sem:
html = await fetch(url, session)
print(html)
async def run(r):
url = "https://test.com"
tasks = []
sem = asyncio.Semaphore(1000)
async with aiohttp.ClientSession() as session:
for i in range(r):
task = asyncio.ensure_future(bound_fetch(sem, url, session))
tasks.append(task)
responses = asyncio.gather(*tasks)
await responses
number = 1
loop = asyncio.get_event_loop()
future = asyncio.ensure_future(run(number))
loop.run_until_complete(future)
ttest()
This is the error: TypeError: _request() got an unexpected keyword argument 'cookies'
I want use cookies like you see in the code, but i can not, can anyone help me?
The feature was added on aiohttp GitHub master but not released yet.
Please either install aiohttp from GitHub or wait for a while for aiohttp 3.5 release.
I hope to publish it in a few days.
I have an Issue with asyncio I can't really get me head around.
take this working example (with Python 3.6+ because of string interpolation)
import asyncio
import aiohttp
import async_timeout
import json
async def fetch(session, url):
async with async_timeout.timeout(10):
async with session.get(url) as response:
return await response.text()
async def get_bittrex_marketsummary(currency_pair):
url = f'https://bittrex.com/api/v1.1/public/getmarketsummary?market={currency_pair}'
async with aiohttp.ClientSession() as session:
response = await fetch(session, url)
return json.loads(response)
class MyCryptoCurrency:
def __init__(self):
self.currency = "BTC-ETH"
self.last_price = None
asyncio.ensure_future(self.get_last_price())
async def get_last_price(self):
self.last_price = await get_bittrex_marketsummary(self.currency)
async def main():
eth = MyCryptoCurrency()
print(eth.last_price)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
while this runs and doesn't throw any Exceptions, it doesn't get the result from the api request and so ... doesn't work :P
If I try to use f.e. loop.run_until_complete(get_bittrex_marketsummary()) I get "event loop is already running" error - which kind of makes sense.
Any hints how to solve this properly?
Thx in advance!
ok, after talking about this in #python channel on freenode I got the answer "don't do async I/O in __init__", so here is the working version:
import asyncio
import aiohttp
import async_timeout
import json
async def fetch(session, url):
async with async_timeout.timeout(10):
async with session.get(url) as response:
return await response.text()
async def get_bittrex_marketsummary(currency_pair):
url = f'https://bittrex.com/api/v1.1/public/getmarketsummary?market={currency_pair}'
async with aiohttp.ClientSession() as session:
response = await fetch(session, url)
return json.loads(response)
class MyCryptoCurrency:
def __init__(self):
self.currency = "BTC-ETH"
self.last_price = None
async def get_last_price(self):
self.last_price = await get_bittrex_marketsummary(self.currency)
async def main():
eth = MyCryptoCurrency()
await eth.get_last_price()
print(eth.last_price)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())