asyncio get result from coroutine - python-3.x

I have a task make communication between coroutines with help asyncio and python3.
Please tell me how to do it,if one coroutine,in while tru cycle , return value at different intervals, and the other coroutines receives this data
import asyncio
#asyncio.coroutine
def write(future):
i=0
while True:
yield from asyncio.sleep(1)
future.set_result('data: '.format(i))
i+=1
def got_result(future):
print(future.result())
loop = asyncio.get_event_loop()
future = asyncio.Future()
asyncio.ensure_future(write(future))
future.add_done_callback(got_result)
try:
loop.run_forever()
finally:
loop.close()

The solution was found with the help of the asyncio.Queue()
import asyncio
#asyncio.coroutine
def get_work(task, work_queue):
while not work_queue.empty():
print(task)
queue_item = yield from work_queue.get()
print('{0} grabbed item: {1}'.format(task, queue_item))
yield from asyncio.sleep(0.5)
asyncio.async(get_work(task, work_queue))
# #asyncio.coroutine
i = 0
async def do_work(task, work_queue):
global i
print(task)
while work_queue.empty():
work_queue.put_nowait(i)
i += 1
await asyncio.sleep(2)
break
# asyncio.async(do_work())
print("Dfgdfg")
asyncio.ensure_future(do_work(task, work_queue))
if __name__ == "__main__":
queue_obj = asyncio.Queue()
loop = asyncio.get_event_loop()
tasks = [
asyncio.async(do_work('Run do_work', queue_obj)),
asyncio.async(get_work('Run get_work', queue_obj))]
loop.run_until_complete(asyncio.wait(tasks))
loop.run_forever()

Related

Multiprocessing pool executing synchronous

I need an asynchronous parent process to handover function calls to a process pool.
The imports are to time consuming to spawn a new worker/process every time. So I thought to put tasks in an asyncio.queue and have a consumer listen to it and hand them off to the workers. (Sort of like how Gunicorn works but I don't want to run a webserver in order to make the calls).
However the function call seems to only be executed if I call res.get() on the response of pool.apply_async() but then it just runs as if I would call a normal synchronous for-loop.
This is my code:
#!/usr/bin/env python
import os
import time
import multiprocessing as mp
import asyncio
def f(x: list) -> int:
print(f'the pid of this process is: {os.getpid()}')
time.sleep(1)
return len(x)
def callback_func(x):
print(f'this is the callback function')
print(x)
async def consumer(queue):
with mp.Pool(processes=4) as pool:
while True:
x = await queue.get()
if x == 'stop':
break
# this makes it seem to run synchronous:
res = pool.apply_async(f, (x,))
print(res.get(), x, os.getpid())
# if I run this instead, both f() and callback_func
# are not executed.
#res = pool.apply_async(f, (x,), callback_func)
#print(x, os.getpid())
queue.task_done()
print(f'consumed')
async def producer(queue):
for i in range(20):
await queue.put([i,i+1,i+2])
# await asyncio.sleep(0.5)
await queue.put('stop')
async def main():
queue = asyncio.Queue()
input_coroutines = [consumer(queue), producer(queue)]
for f in asyncio.as_completed(input_coroutines):
try:
result = await f
print(result)
except Exception as e:
print('caught exception')
print(e)
if __name__ == "__main__":
asyncio.run(main())
What am I doing wrong?

Trying to understand asyncio with Python

I am trying to run some concurrent tasks with asyncio. Currently, i got the following example:
import asyncio
from time import sleep
from signal import SIGINT, SIGTERM, signal
async def f():
print("Got in F")
await asyncio.sleep(10)
print("Finished Sleep in F")
return "f"
async def g():
print("Got in G")
await asyncio.sleep(20)
print("Finished Sleep in G")
return "g"
async def count_timer():
for i in range(20):
print(i)
sleep(1)
async def main():
task_g = asyncio.create_task(g())
task_f = asyncio.create_task(f())
await task_g
await task_f
task_counter = asyncio.create_task(count_timer())
await task_counter
return
if __name__ == "__main__":
import time
s = time.perf_counter()
asyncio.run(main())
elapsed = time.perf_counter() - s
print(f"{__file__} executed in {elapsed:0.2f} seconds.")
What Im trying to do is to call the counter_timer function after the f and g functions were called, but still run all three concurrently.
Thank you in advance,
Lucas Delfino Nogueira.

asyncio, multiprocessing and websockets not returning a result

I am trying to get websockets, asyncio and multiprocess to work together. I have been stuck on this for 2 days and could appreciate some help.
I have searched for websockets asyncio and multiprocessing on stackoverflow as well as general internet searches. I have found threading examples, which I can make work.
import asyncio
import websockets
import threading
class Connection():
def __init__(self):
self.loop = asyncio.new_event_loop()
sock_thread = threading.Thread(target=self.new_loop)
sock_thread.start()
self.x = 0
async def connect_to_socket(self):
self.websocket = await websockets.connect('ws://demos.kaazing.com/echo')
await self.websocket.send("hello")
response = await self.websocket.recv()
print(response)
async def listen_to_socket(self):
while True:
await asyncio.sleep(0)
print('Listening for a message...')
while self.x < 5:
message = await self.websocket.recv()
print("< {}".format(message))
print('\n\n')
print(self.x)
self.x += 1
self.task.cancel()
self.loop.close()
def stop(self):
print('canceling task\n\n')
self.x = 0
self.task.cancel()
def new_loop(self):
self.task = self.loop.create_task(self.connect_to_socket())
self.loop.run_forever()
def make_task(self):
self.task = self.loop.create_task(self.listen_to_socket())
if __name__ == '__main__':
conn=Connection()
This works with no issues. I have seen examples where multiprocessing opens a process in an event loop, this is not what I want. I want to ope However, this is not what I want. I want to open a new process and run an event loop in the new process. Inside the event loop, I want to run my sockets. I want to free my main process from listening to sockets and use a child process to listen to the sockets while I do computationally expensive work on my main process.
When I try the following code. I get nothing.
import asyncio
import websockets
import multiprocessing
class Connection(multiprocessing.Process):
def __init__(self, tasks, results):
super().__init__()
self.tasks = tasks
self.results = results
self.loop = asyncio.new_event_loop()
print('create event loop')
self.x = 0
self.task = self.loop.create_task(self.test())
print('done with connecting')
#connect to socket and get response
async def test(self):
self.ws = await websockets.connect('ws://demos.kaazing.com/echo')
await self.websocket.send("hello")
response = await self.websocket.recv()
print(response)
#listen to socket long term after connection
async def listen_to_socket(self):
while True:
await asyncio.sleep(0)
print('Listening for a message...')
while self.x < 5:
await self.websocket.send("hello")
message = await self.websocket.recv()
print("< {}".format(message))
print('\n\n')
print(self.x)
self.x += 1
self.results.put(message)
self.task.cancel()
self.loop.close()
#stop task
def stop(self):
print('canceling task\n\n')
self.x = 0
self.task.cancel()
# listen to socket long term
#I have not called this as I can't even get a response from test()
def make_task(self):
self.task = self.loop.create_task(self.listen_to_socket())
if __name__ == '__main__':
tasks = multiprocessing.JoinableQueue()
results = multiprocessing.Queue()
process = Connection(tasks, results)
if tasks.empty():
print('empty')
else:
print(tasks.get())
I expect to connect with the socket and receive a response. However, I get nothing. I get no error messages,no printout from the connection, I get an empty queue and that's all. How do I get the return values from my websocket?
I am still new enough, I am not sure what I am doing wrong. Any advice would help me out.
Thank you
Anyone interested, I got this to work. It is very much a work in progress and I am adding to it, and since this is for me and relatively simple, I didn't comment it.
I started with the code from this answer and built on it.
Python3 Websockets / Multithreading issue
import asyncio
import websockets
import sys
import time
import multiprocessing
class connect():
def __init__(self, results, tasks):
self.x = 0
self.results = results
self.tasks = tasks
self.loop = asyncio.new_event_loop()
async def commander_start(self):
while not self.tasks.empty():
self.uri = self.tasks.get()
self.tasks.task_done()
self.ws = await websockets.connect(self.uri)
while True:
await asyncio.sleep(0.1)
print('Listening for a message...')
while self.x < 5:
await self.ws.send("hello")
message = await self.ws.recv()
message = message+str(self.x)
print("< {}".format(message))
print('\n\n')
print(self.x)
self.x += 1
self.results.put(message)
self.ws.close()
self.x = 0
print('ws clsed')
self.task.cancel()
await asyncio.sleep(1)
self.loop.close()
def run_commander(self):
self.task = self.loop.create_task(self.commander_start())
self.loop.run_forever()
def main(self):
self.commander = multiprocessing.Process(target=self.run_commander)
self.commander.start()
time.sleep(3)
self.commander.kill()
print('is alive:', self.commander, self.commander.is_alive())
if __name__ == "__main__":
size_q = 10
tasks = multiprocessing.JoinableQueue(maxsize=size_q)
results = multiprocessing.Queue(maxsize=size_q)
conn = connect(results,tasks)
tasks.put('ws://demos.kaazing.com/echo')
conn.main()
print('tasks2 put')
tasks.put('wss://echo.websocket.org')
conn.main()
if not results.empty():
for x in range(size_q):
print(results.get())
There is a bunch I am going to change and improve, I just wanted the base system to work so I could build from there, so that anyone that uses this will need to modify it to suit their needs. For instance, I spawn a new process and kill it, instead of running a continuous process and giving it work to do, I also am trying to figure out the specifics of the joinable queue and how to use it to add jobs after the process and event loop has been created.

how to check if asyncio loop has any associated sockets

asyncio.Task.all_tasks() gives a list of all tasks for an event loop, but I can't find anything similar for sockets, and in particular, datagram sockets associated with a loop?
The absence of sockets & tasks could then signal "end of life" for the loop.
The question is, in the following example, what to put in loop_not_empty() that makes it return False when the task set is empty and there are no associated sockets (ie after two seconds)
Example:
import asyncio
import socket
import threading
class Handler(asyncio.Protocol):
def connection_made(self, transport):
self.transport = transport
print("connection made")
def datagram_received(self, data, addr):
if data == b'die':
print("shutting down")
self.transport.abort()
#asyncio.coroutine
def sometask():
yield from asyncio.sleep(1)
print("task done")
def loop_not_empty(l):
# if asyncio.Task.all_tasks() == set() and WHAT_GOES_HERE
# return False
return True
def main():
a,b = socket.socketpair(socket.AF_UNIX, socket.SOCK_DGRAM)
l = asyncio.get_event_loop()
asyncio.ensure_future(sometask(), loop=l)
asyncio.ensure_future(l.create_datagram_endpoint(Handler, sock=a), loop=l)
threading.Timer(2, lambda: b.send(b'die')).start()
while loop_not_empty(l):
l.run_until_complete(asyncio.sleep(1, loop=l))
main()
Here is a solution that uses a simple class and asyncio.Event() to count the number of active jobs and signals the loop to stop when all jobs are done:
import asyncio
import random
class UseCounter:
def __init__(self, loop=None):
self.loop = loop
self.event = asyncio.Event(loop=loop)
self.n = 0 # The number of active jobs
def __enter__(self):
self.enter()
def __exit__(self, exc_type, exc_val, exc_tb):
self.exit()
def enter(self):
self.n += 1
def exit(self):
self.n -= 1
if self.n == 0:
self.event.set()
async def wait(self):
return await self.event.wait()
async def my_coroutine(counter, term):
with counter:
print("start", term)
n = random.uniform(0.2, 1.5)
await asyncio.sleep(n)
print("end", term)
loop = asyncio.get_event_loop()
counter = UseCounter(loop)
terms = ["apple", "banana", "melon"]
for term in terms:
asyncio.ensure_future(my_coroutine(counter, term))
loop.run_until_complete(counter.wait())
loop.close()
For your example above, add .enter() to connection_made() and .exit() to connection_lost().

How to use aiopg pool in multi-threaded application?

I have a python 3.4.3, postgreSQL 9.4, aiopg-0.7.0. An example of multi-threaded applications, was taken from this site. How to use the pool? The thread hangs when the operation of the select.
import time
import asyncio
import aiopg
import functools
from threading import Thread, current_thread, Event
from concurrent.futures import Future
class B(Thread):
def __init__(self, start_event):
Thread.__init__(self)
self.loop = None
self.tid = None
self.event = start_event
def run(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
self.tid = current_thread()
self.loop.call_soon(self.event.set)
self.loop.run_forever()
def stop(self):
self.loop.call_soon_threadsafe(self.loop.stop)
def add_task(self, coro):
"""this method should return a task object, that I
can cancel, not a handle"""
def _async_add(func, fut):
try:
ret = func()
fut.set_result(ret)
except Exception as e:
fut.set_exception(e)
f = functools.partial(asyncio.async, coro, loop=self.loop)
if current_thread() == self.tid:
return f() # We can call directly if we're not going between threads.
else:
# We're in a non-event loop thread so we use a Future
# to get the task from the event loop thread once
# it's ready.
fut = Future()
self.loop.call_soon_threadsafe(_async_add, f, fut)
return fut.result()
def cancel_task(self, task):
self.loop.call_soon_threadsafe(task.cancel)
#asyncio.coroutine
def test(pool, name_task):
while True:
print(name_task, 'running')
with (yield from pool.cursor()) as cur:
print(name_task, " select. ")
yield from cur.execute("SELECT count(*) FROM test")
count = yield from cur.fetchone()
print(name_task, ' Result: ', count)
yield from asyncio.sleep(3)
#asyncio.coroutine
def connect_db():
dsn = 'dbname=%s user=%s password=%s host=%s' % ('testdb', 'user', 'passw', '127.0.0.1')
pool = yield from aiopg.create_pool(dsn)
print('create pool type =', type(pool))
# future.set_result(pool)
return (pool)
event = Event()
b = B(event)
b.start()
event.wait() # Let the loop's thread signal us, rather than sleeping
loop_db = asyncio.get_event_loop()
pool = loop_db.run_until_complete(connect_db())
time.sleep(2)
t = b.add_task(test(pool, 'Task1')) # This is a real task
t = b.add_task(test(pool, 'Task2'))
while True:
time.sleep(10)
b.stop()
Not return result in 'yield from cur.execute("SELECT count(*) FROM test")'
Long story short: you cannot share aiopg pool object from different event loops.
Every aiopg.Pool is coupled to event loop. If you don't specify loop parameter explicitly it is taken from asyncio.get_event_loop() call.
So it your example you have a pool coupled to event loop from main thread.
When you execute db query from separate thread you trying to accomplish it by executing thread's loop, not the main one. It doesn't work.

Resources