I have function called get_active_allowed_systems_by_poll where I want to call it in the background 10 times/minute and refresh new systems that I have gotten in last 10 seconds.
import asyncio
from threading import Thread
async def create_state_machine_at_init(app):
worker_loop = asyncio.new_event_loop()
worker = Thread(target=start_db_worker, args=(worker_loop,))
worker.start()
worker_loop.call_soon_threadsafe(get_active_allowed_systems_by_poll, app, 30)
async def get_active_allowed_systems_by_poll(app, interval=10):
params = {
param: key
for param, key
in app.config.get_active_allowed_systems_by_poll_params.items()
}
params['interval'] = interval
operation = prepare_exec(
app.config.get_active_allowed_systems_by_poll,
**params
)
ACTIVE_ALLOWED_SYSTEMS
ACTIVE_ALLOWED_SYSTEMS = (await app['database'].execute(operation)).all()
return ACTIVE_ALLOWED_SYSTEMS
def start_db_worker(loop):
"""Switch to new event loop and run forever"""
asyncio.set_event_loop(loop)
loop.run_forever()
/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/events.py:145: RuntimeWarning: coroutine 'get_active_allowed_systems_by_poll' was never awaited
Related
I'm running an async function(download files) in a separate process. The data between processes is
passed using multiprocessing queues.
in main file/module:
to_download_mpq = Queue()
downloaded_mpq = Queue()
...................
pd = Process(target=async_download_items,
args=(to_download_mpq, downloaded_mpq))
pd.start()
pd.join()
print("end_process")
in a separate file:
# transfer data between async queues and multiprocessing queues when is ready
async def download_piping(to_download_q, downloaded_q, to_download_mpq, downloaded_mpq, sentinel):
to_download_sentinel = False
downloaded_sentinel = False
while not (to_download_sentinel and downloaded_sentinel):
if not to_download_sentinel:
to_download_item = to_download_mpq.get()
await to_download_q.put(to_download_item)
await asyncio.sleep(0.1)
if to_download_item == sentinel:
to_download_sentinel = True
if not downloaded_sentinel:
if downloaded_q.empty():
await asyncio.sleep(2)
else:
downloaded_item = await downloaded_q.get()
downloaded_mpq.put(downloaded_item)
await asyncio.sleep(0)
if downloaded_item == sentinel:
downloaded_sentinel = True
async def download_tasks(to_download_mpq, downloaded_mpq, workers, sentinel=END_QUEUE_SENTINEL, queue_size=50):
downloader = download.AsyncDownloader() # async downloader class using aiohttp
to_download_lq = asyncio.Queue(queue_size)
downloaded_lq = asyncio.Queue(queue_size)
task_download = asyncio.create_task(downloader.download_files(to_download_q=to_download_lq,
downloaded_q=downloaded_lq,
download_workers=workers,
sentinel=sentinel))
task_piping = asyncio.create_task(download_piping(to_download_lq, downloaded_lq,
to_download_mpq, downloaded_mpq,
sentinel=sentinel))
await asyncio.gather(task_download, task_piping)
def async_download_items(to_download_mpq, downloaded_mpq, workers=50):
loop = asyncio.get_event_loop()
print(loop)
loop.run_until_complete(download_tasks(to_download_mpq, downloaded_mpq, workers=workers))
loop.close()
print("end async")
The async tasks, loop finishes, "end async" is printed but the process hangs. The data form "downloaded_mpq" is used in another process.
Are the queues keeping doesn't allow the process to close or can be something else?
I want to run a coroutine in a different thread and get the result that the coroutine returns.
class Main:
def __init__(self, result_from_io_task=None):
self._io_task_result = result_from_io_task
async def io_task(self):
await asyncio.sleep(2)
return "slept of 2s"
def non_async_func(self):
#This can't be made async.
if not self._io_task_result:
#run io_task and get its result
#event loop will be running in the main thread so I can fire the task
task = asyncio.create_task(self.io_task)
#can't await task since I am in non-async func and I cannot
#return from non_async_func until and unless I know what
#self.io_task has returned. Tried following but my app hangs forever.
while not task.done():
pass
I also tried, but it doesn't work "
def run_in_thread(coro, loop):
output = []
def run():
fut = asyncio.run_coroutine_threadsafe(coro, loop)
output.append(fut)
thr = Thread(target=run)
thr.start()
return output
async def main():
main_obj = Main(result_from_io_task=None)
v = main_obj.non_async_func()
How can I spawn a new thread and run the given coroutine using event loop running in main thread
Unfortunately, my codebase depends on python < 3.8 and asyncio.to_thread is not available in python 3.7
Based on the example of my answer, I'm introducing another implementation of the asynchronous decorator that does not use asyncio.to_thread() function but uses ThreadPoolExecutor instead.
import asyncio
import requests
import concurrent.futures
def asynchronous(func):
async def wrapper(*args, **kwargs):
with concurrent.futures.ThreadPoolExecutor() as executor:
future = executor.submit(func, *args, **kwargs)
return future.result()
return wrapper
#asynchronous
def request(url):
with requests.Session() as session:
response = session.get(url)
try:
return response.json()
except requests.JSONDecodeError:
return response.text
async def main():
task = asyncio.create_task(request("https://google.com/"))
print("waiting for response...")
result = await task
print(result)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.close()
I am new to asynchronous functions and threads, and I am trying to return a series of values obtained from a Web socket to pass to another thread where synchronous code is executing. In the code, I also use a multi Web socket approach. Below I show you the code:
"""
This code is designed to run an asynchronous loop
with asyncio in a separate thread. This allows mixing
a synchronous code with an asynchronous one.
"""
import asyncio
from datetime import datetime
from threading import Thread
import websockets
from typing import Tuple, List, Iterable
import json
import time
URLS = [
"wss://stream.binance.com:9443/ws/xrpusdt#kline_1m",
"wss://stream.binance.com:9443/ws/btcusdt#kline_1m",
]
def start_background_loop(loop: asyncio.AbstractEventLoop):
asyncio.set_event_loop(loop)
loop.run_forever()
async def IndividualSubscription(url: str):
"""An individual subscription to each WebSocket is created"""
async with websockets.connect(url) as websocket:
data = await websocket.recv()
data = json.loads(data)
print('\n', data)
return data
async def Subscriptions(URLS: Iterable[str]):
"""All concurrent tickets are subscribed and all are combined
in a single coroutine."""
while True:
task = [asyncio.create_task(SuscripcionIndividual(url)) for url in URLS]
# All tasks are run in parallel
await asyncio.gather(*tareas)
#return tareas
def main():
loop = asyncio.new_event_loop()
t = Thread(target=start_background_loop, args=(loop,), daemon=True)
t.start()
task = asyncio.run_coroutine_threadsafe(Suscripciones(URLS), loop)
for i in task.result():
print(f"{i}")
#return tareas
def function():
for i in range(100):
print("This is out of asynchronous ", i)
time.sleep(1)
if __name__ == "__main__":
main()
T2 = Thread(target=function,)
T2.start()
I tried to just put return to the async code, but by doing this the async loop only runs once and not continuously as I would expect. Also, I've tried the method .result() over .create_task() . Is it possible to return values from an asynchronous function?
If you want interoperability between synchronous and asynchronous code you need to design some communication mechanism that won't block the thread running async code. Queues are commonly used for communication between threads, janus library implements queues compatible with threads running async code, it does so by exposing sync queue interface to sync code and async queue interface to async code.
Your code is a little chaotic, so I cleaned it up just to show communication between sync thread (main thread) and async thread (background thread running asyncio loop)
import asyncio
from datetime import datetime
from threading import Thread
import websockets
from typing import Tuple, List, Iterable
import json
import time
import janus # pip install janus
URLS = [
"wss://stream.binance.com:9443/ws/xrpusdt#kline_1m",
"wss://stream.binance.com:9443/ws/btcusdt#kline_1m",
]
def start_background_loop(loop: asyncio.AbstractEventLoop):
asyncio.set_event_loop(loop)
loop.run_forever()
async def IndividualSubscription(url: str):
"""An individual subscription to each WebSocket is created"""
async with websockets.connect(url) as websocket:
return json.loads(await websocket.recv())
async def Subscriptions(URLS: Iterable[str], results: asyncio.Queue):
"""All concurrent tickets are subscribed and all are combined
in a single coroutine."""
while True:
tasks = [asyncio.create_task(SuscripcionIndividual(url)) for url in URLS]
for task in await asyncio.gather(*tasks):
await results.put(task.result())
def async_main(results: asyncio.Queue):
asyncio.run(Subscriptions(URLS, results))
if __name__ == "__main__":
results = janus.Queue(100) # max size of 100
async_thread = Thread(target=async_main, args=(results.async_q,))
async_thread.daemon = True # exit if main thread exits
async_thread.start()
while True:
print(f"[sync thread] got result from async thread: {results.sync_q.get()}")
I have a problem at work where I have to wait for 10 seconds when InstrInstallSucceeded event comes in, without blocking the main thread, I should wait for InstrInstallFailed to appear, so in other words 'ToolOn', 'ToolOn', 'ToolOn' should appear without any wait.
import asyncio
from threading import Thread
import time
FLAG = True
async def sleep_loop(t, event):
global FLAG
print(event)
if event == 'InstrInstallSucceeded':
# spwan a seperate thread here such that
# toolon events are not blocked by the sleep
await asyncio.sleep(t)
FLAG = True
if event == 'InstrInstallFailed':
# and I want to update the FLAG whenever I see event == 'InstrInstallFailed'
FLAG = False
async def keep_print():
print(f'Beginning FLAG:: {FLAG}')
while FLAG:
pass
print(f'End FLAG:: {FLAG}')
def start_loop(loop, t):
print("in start loop")
asyncio.set_event_loop(loop)
for i in ['InstrInstallSucceeded', 'ToolOn','ToolOn', 'ToolOn', 'InstrInstallFailed']:
loop.run_until_complete(asyncio.sleep(1))
loop.run_until_complete(sleep_loop(t, i))
loop = asyncio.get_event_loop()
new_loop = asyncio.new_event_loop()
t = Thread(target=start_loop, args=(new_loop,10))
t.start()
coro = keep_print()
loop.run_until_complete(coro)
output
in start loop
Beginning FLAG:: True
Executing <Task pending coro=<sleep() running at /Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/tasks.py:482> wait_for=<Future pending cb=[<TaskWakeupMethWrapper object at 0x1043f2be8>()] created at /Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/base_events.py:284> cb=[_run_until_complete_cb() at /Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/base_events.py:185] created at /Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/base_events.py:452> took 0.118 seconds
InstrInstallSucceeded
ToolOn
ToolOn
ToolOn
InstrInstallFailed
End FLAG:: False
Executing <Task finished coro=<keep_print() done, defined at fut.py:21> result=None created at /Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/base_events.py:452> took 15.756 seconds
EDIT: using python 3.6.7
import asyncio
async def dispatch_event(event, alert):
print(event)
if event == 'InstrInstallSucceeded':
# spawn a coroutine if you need something done in parallel
#asyncio.create_task(xxx())
await asyncio.sleep(10)
if event == 'InstrInstallFailed':
await asyncio.sleep(.5)
# alert the watcher(s) of the event that was dispatched
alert.last_event = event
alert.set()
async def keep_print(alert):
while True:
print(f'Beginning FLAG:: {alert.last_event}')
await alert.wait()
alert.clear()
print(f'End FLAG:: {alert.last_event}')
async def main():
alert = asyncio.Event()
alert.last_event = None
# spawn keep_print in the "background"
loop = asyncio.get_event_loop()
t = loop.create_task(keep_print(alert))
for i in ['InstrInstallSucceeded', 'ToolOn','ToolOn', 'ToolOn', 'InstrInstallFailed']:
await asyncio.sleep(1)
await dispatch_event(i, alert)
await asyncio.sleep(1)
t.cancel()
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
edit as suggested by #user418.....
async def dispatch_event(event,alert):
alert.last_event = event
alert.set()
print(event)
if event == 'InstrInstallSucceeded':
# spawn a coroutine if you need something done in parallel
#asyncio.create_task(xxx())
await asyncio.sleep(10)
if event == 'InstrInstallFailed':
await asyncio.sleep(.5)
# alert the watcher(s) of the event that was dispatched
Threads and asyncio don't go together, except in specific circumstances (e.g. the implementation of run_in_executor). Instead of spawning new threads, spawn new coroutines.
For example:
import asyncio
async def dispatch_event(event, alert):
print(event)
if event == 'InstrInstallSucceeded':
# spawn a coroutine if you need something done in parallel
#asyncio.create_task(xxx())
await asyncio.sleep(1)
if event == 'InstrInstallFailed':
await asyncio.sleep(.5)
# alert the watcher(s) of the event that was dispatched
alert.last_event = event
alert.set()
async def keep_print(alert):
while True:
print(f'Beginning FLAG:: {alert.last_event}')
await alert.wait()
alert.clear()
print(f'End FLAG:: {alert.last_event}')
async def main():
alert = asyncio.Event()
alert.last_event = None
# spawn keep_print in the "background"
t = asyncio.create_task(keep_print(alert))
for i in ['InstrInstallSucceeded', 'ToolOn','ToolOn', 'ToolOn', 'InstrInstallFailed']:
await asyncio.sleep(1)
await dispatch_event(i, alert)
await asyncio.sleep(1)
t.cancel()
asyncio.run(main())
an asyncio program has two task that produce messages which are put on a queue, another task consume the queue.
one producer produce periodic task.
the other producer has to be synced with the consumer, it has to await till its own message have been consumed
import asyncio
import logging
import sys
logging.basicConfig( stream=sys.stdout,format='%(asctime)-5s: %(funcName)-15s: %(message)s',datefmt='%I:%M:%S',level=logging.INFO)
logger = logging.getLogger()
async def sync_producer(queue):
for x in range(5):
item = f"sync producer{x}"
logger.info(f"{item} ")
await queue.put(item)# <= at this point I want to await that the message have been consumed
logger.info(f"sync producer finish")
async def periodic_producer(queue):
x=0
while True:
await asyncio.sleep(1)
item = f"periodic producer {x}"
logger.info(f"{item} ")
queue.put_nowait(item)
x+=1
async def consumer(queue):
while True:
item = await queue.get()
logger.info(f"{item}")
queue.task_done()
await asyncio.sleep(1)
async def main():
queue = asyncio.Queue()
consumer_task = asyncio.create_task(consumer(queue))
periodic_producer_task = asyncio.create_task(periodic_producer(queue))
producer_task = asyncio.create_task(sync_producer(queue))
await producer_task
periodic_producer_task.cancel()
await queue.join()
consumer_task.cancel()
asyncio.run(main())
The example does not work as i want beacause await queue.put(item) does'not await queue task_done().
A possible workaround could be to put on the queue (event,item) where event = asyncio.Event() and then await event. Is that a "good" workaraound?