asyncio.create_task with function decorator - python-3.x

I want these code blocks works as predicted, is there any suggestions?
def task(func: Any):
#wraps(func)
async def wrapped(*args: Any) -> Task[Any]:
return create_task(func(*args))
return wrapped
# Create task without using create_task method
#task
async def printe_task():
await sleep(1)
print("Hello")
async def printer():
await sleep(1)
print("Hello")
task = create_task(printer())
await task
# This method should be same as above!
await printer_task() # <--------------

Ups, I just need to make wrapper sync instead of async 🤦‍♂️
def task(func: Any):
#wraps(func)
def wrapped(*args: Any) -> Task[Any]: # <--- async keyword removed!
return create_task(func(*args))
return wrapped
# Create task without using create_task method
#task
async def printe_task():
await sleep(1)
print("Hello")
async def printer():
await sleep(1)
print("Hello")
task = create_task(printer())
await task
# This method should be same as above!
await printer_task() # <--------------

Related

Best way to avoid warning about un-run coroutines that are not-yet run by cancelled tasks?

In the following, the coroutinerunIt() is created and provided as a parameter to delegate(...) - which is turned into a Task that is canceled before runIt executes:
import asyncio
async def cancelTaskTest():
async def runIt():
print("RunIt ran")
async def delegate(coro):
await coro
task = asyncio.create_task(delegate(runIt()))
task.cancel()
if __name__=='__main__':
asyncio.run(cancelTaskTest())
Produces the unwanted warning:
/usr/lib/python3.10/asyncio/base_events.py:1881: RuntimeWarning: coroutine 'cancelTaskTest.<locals>.runIt' was never awaited
handle = self._ready.popleft()
RuntimeWarning: Enable tracemalloc to get the object allocation traceback
I'm aware that runIt did not run. I don't want a warning about it - what's the best way to avoid this.
Simplest method would be to remove () in runIt and call it in runAfterTimeout():
import asyncio
async def cancelTaskTest():
async def runIt():
print("RunIt ran")
async def delegate(asyncFunc):
coro = asyncFunc() # <-- put () here
await coro
task = asyncio.create_task(delegate(runIt)) # <-- removed () in runIt
task.cancel()
if __name__ == "__main__":
asyncio.run(cancelTaskTest())
EDIT: To add parameters to RunIt, just create plain lambda::
import asyncio
async def cancelTaskTest():
async def runIt(p1, p2):
print(f"RunIt({p1}, {p2})")
async def delegate(coro):
await coro()
task = asyncio.create_task(delegate(lambda: runIt(1, 2)))
task.cancel()
if __name__ == "__main__":
asyncio.run(cancelTaskTest())

Concurrency Loop in python

My Scenario:- Start, Wait, Start, Stop or Kill
Starting the first event & waiting for some time.
If I reach the waiting time, I need to start the second event & return both event result.
But, if the first event completed before waiting time.
No need to start the second event.
Return the first event result
Ex:-
import asyncio
async def some_task():
print('io start')
await asyncio.sleep(2)
print('io end')
return "hello"
async def callback(loop):
await asyncio.sleep(4)
if loop.is_running():
print('doing other things')
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop2 = asyncio.get_event_loop()
a = loop.create_task(some_task())
b = loop2.create_task(callback(loop))
result = loop.run_until_complete(a)
loop2.run_until_complete(b)
loop.close()
loop2.close()
The variables loop and loop2 will get the same (main) event loop, which is why it will always still be running. Here is my approach:
import asyncio
async def async_main():
result = None
async def some_task():
print('io start')
await asyncio.sleep(6)
print('io end')
result = "hello"
return result
async def callback():
await asyncio.sleep(4)
if result is None:
print('doing other things')
awaited = await asyncio.gather(
asyncio.create_task(some_task()),
asyncio.create_task(callback()),
)
return awaited
asyncio.run(async_main())
Asyncio can be very confusing and I do not recommend it for non-experts, so here is an alternative using threading instead of asyncio:
import threading
import time
def do_tasks():
result = None
def some_task():
nonlocal result
print('io start')
time.sleep(2)
print('io end')
result = "hello"
def callback():
time.sleep(4)
if result is None:
print('doing other things')
threading.Thread(target=some_task).start()
threading.Thread(target=callback).start()
do_tasks()

Why does asyncio.create_task not run the method?

Code example:
async def download_page(session, url):
print(True)
async def downloader_init(session):
while True:
url = await download_queue.get()
task = asyncio.create_task(download_page(session, url))
print(task)
print(f"url: {url}")
async def get_urls(url):
while True:
try:
url = find_somewhere_url
await download_queue.put(url)
except NoSuchElementException:
break
return True
async def main():
async with aiohttp.ClientSession(headers=headers) as session:
get_urls_task = asyncio.create_task(get_urls(url))
downloader_init_task = asyncio.create_task(downloader_init(session))
asyncio.gather(get_urls_task, downloader_init_task)
if __name__ == "__main__":
asyncio.get_event_loop().run_until_complete(main())
Output:
<Task pending coro=<download_page() running at main.py:69>>
url: https://someurl.com/example
<Task pending coro=<download_page() running at main.py:69>>
url: https://someurl.com/example
<Task pending coro=<download_page() running at main.py:69>>
url: https://someurl.com/example
Why is the method download_page not executed?
The strange thing is that the script just ends its work, there are no errors anywhere.
downloader_init should work endlessly, but it does not.
In download_queue, method get_urls adds links as it finds them, after which it stops working.
downloader_init should immediately execute as soon as a new link appears in the queue, but it starts its work only when get_urls has completed its work.
Try this instead:
Note: Your problem wasn't with the task creation, it was because
there wasn't an await at the asyncio.gather part.
import asyncio
import aiohttp
async def download_page(session, url):
# Dummy function.
print(f"session={session}, url={url}")
async def downloader_init(session):
while True:
url = await download_queue.get()
task = asyncio.create_task(download_page(session, url))
print(f"task={task}, url={url}")
async def get_urls(url):
while True:
try:
url = find_somewhere_url()
await download_queue.put(url)
except NoSuchElementException:
break
async def main():
async with aiohttp.ClientSession(headers=headers) as session:
get_urls_task = asyncio.create_task(get_urls(url))
downloader_init_task = asyncio.create_task(downloader_init(session))
# Use await here to make it finish the tasks.
await asyncio.gather(get_urls_task, downloader_init_task)
if __name__ == "__main__":
# Use this as it deals with the loop creation, shutdown,
# and other stuff for you.
asyncio.run(main()) # This is new in Python 3.7

RuntimeError when running coroutine from __init__

Here's a sample code.
class Foo:
def __init__(self):
self._run_coro()
def _run_coro(self):
async def init():
bar = #some I/O op
self.bar = bar
loop = asyncio.get_event_loop()
loop.run_until_complete(init())
async def spam(self):
return await #I/O op
async def main():
foo = Foo()
await foo.spam()
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
When I run this code, I get following exception:
RuntimeError: This event loop is already running
If I initialize Foo outside main, the code runs without any exception. I want to initialize Foo such that during initialization it runs a coroutine which creates a class attribute bar.
I am unable to figure how to do it correctly. How can I run a coroutine from __init__.
Any help would be highly appreciated.
class Foo:
def __init__(self):
self.session = requests.Session()
self.async_session = None
#I guess this can be done to initialize it.
s = self.init_async_session()
try:
s.send(None)
except StopIteration:
pass
finally:
s.close()
async def init_async_session(self):
#ClientSession should be created inside a coroutine.
self.async_session = aiohttp.ClientSession()
What would be the right way to initialize self.async_session
If some method uses something asynchronous it should be explicitly defined as asynchronous either. This is a core idea behind asyncio: make you write code a way you always know if some arbitrary method may do something asynchronous.
In your snippet you want to do async thing (bar I/O) inside sync method __init__ and asyncio prohibits it. You should make _run_coro async and initialize Foo asynchronously, for example, using __await__ method:
import asyncio
class Foo:
def __await__(self):
return self._run_coro().__await__()
async def _run_coro(self): # real async initializer
async def init():
await asyncio.sleep(1) # bar I/O
self.bar = 123
await init()
return self
async def spam(self):
return await asyncio.sleep(1) # I/O op
async def main():
foo = await Foo()
await foo.spam()
asyncio.run(main()) # instead of two lines in Python 3.7+
You may be interested in reading this answer to understand better how asyncio works and how to handle it.
Upd:
s = self.init_async_session()
try:
s.send(None)
Don't do such things: generator's method are only details of implementation in regard of coroutines. You can predict how coroutine will react on calling .send() method and you can rely on this behavior.
If you want to execute coroutine use await, if you want to start it "in background" use task or other functions from asyncio doc.
What would be the right way to initialize self.async_session
When it comes to aiohttp.ClientSession it should not only be created, but properly closed also. Best way to do it is to use async context manager as shown in aiohttp doc.
If you want to hide this operation inside Foo you can make it async manager either. Complete example:
import aiohttp
class Foo:
async def __aenter__(self):
self._session = aiohttp.ClientSession()
await self._session.__aenter__()
return self
async def __aexit__(self, *args):
await self._session.__aexit__(*args)
async def spam(self):
url = 'http://httpbin.org/delay/1'
resp = await self._session.get(url)
text = await resp.text()
print(text)
async def main():
async with Foo() as foo:
await foo.spam()
asyncio.run(main())
Upd2:
You can combine ways to init/close object from above to achive result you like. As long as you keep in mind both operations are asynchronous and thus should be awaited, everything should be fine.
One more possible way:
import asyncio
import aiohttp
class Foo:
def __await__(self):
return self._init().__await__()
async def _init(self):
self._session = aiohttp.ClientSession()
await self._session.__aenter__()
return self
async def close(self):
await self._session.__aexit__(None, None, None)
async def spam(self):
url = 'http://httpbin.org/delay/1'
resp = await self._session.get(url)
text = await resp.text()
print(text)
async def main():
foo = await Foo()
try:
await foo.spam()
finally:
await foo.close()
asyncio.run(main())
Here's my solution.
class Session:
def __init__(self, headers):
self._headers = headers
self._session = requests.Session()
self._async_session = None
async def _init(self):
self._session = aiohttp.ClientSession(headers=headers)
async def async_request(self, url):
while True:
try:
async with self._async_session.get(url) as resp:
resp.raise_for_status()
return await resp.text()
except aiohttp.client_exceptions.ClientError:
#retry or raise
except AttributeError:
if isinstance(self._async_session, aiohttp.ClientSession):
raise
await self._init()
def request(self, url):
return self._session.get(url).text
async def close(self):
if isinstance(self._async_session, aiohttp.ClientSession):
await self._session.close()
async def main():
session = Session({})
print(await session.async_request('https://httpstat.us/200')
await session.close()
asyncio.run(main())
I can initialize the Session class and make synchronous as well as asynchronous requests. I do not have to explicitly call await session._init() to initialize self._async_session as when session._async_request is called and self._async_session is None, then await session._init() will be called and the request will be retried.

Check if asyncio shielded coroutine ran

If I have the following code sample
async def coro():
# Cancelled error could be raised here
await asyncio.sleep(1)
# Or here
await asyncio.shield(
another_coro()
)
# Or here
async def wait_on_it(loop):
f = loop.create_task(coro())
# Pretend f may or may not happen, I just sleep in this example
await asyncio.sleep(1)
if not f.done():
f.cancel() # Will raise CancelledError when some await finishes in coro()
How can I determine whether or not the shielded task actually ran? I have important logic that must be run iff the shielded task did run. Maybe shielding that function is not the correct method?
coro() can transfer the information to the caller by modifying a mutable object it receives from the caller:
class Ref:
def __init__(self, **kwargs):
self.__dict__.update(**kwargs)
async def coro(run_ref):
await asyncio.sleep(1)
run_ref.ran_another_coro = True
await asyncio.shield(another_coro())
async def wait_on_it(loop):
run_ref = Ref(ran_another_coro=False)
f = loop.create_task(coro(run_ref))
await asyncio.sleep(1)
if not f.done():
f.cancel()
if run_ref.ran_another_coro:
# ... another_coro() was started
Since asyncio.shield can't suspend, if wait_on_it observes a true value of run_ref.ran_another_coro, then another_coro() is guaranteed to have been started.

Resources