How to defin time out on python 3 process? - python-3.x

I'm trying to use process in order to define a time for a function.
I sucessfully created a simple process and run it:
from multiprocessing import Process
import time
stemp = list()
#simple function for testing purpose
def f(name):
print('hello, ', name)
stemp.append(name)
#define a process and run it
p = Process(target=f, args=('bob',))
p.run()
But when I try to use p.join to define a timeout (see code bellow). I get the error message "AttributeError: 'Process' object has no attribute '_target'"
if __name__ == '__main__':
# We create a Process
p = Process(target=f, args=('bob',))
# We start the process and we block for 5 seconds.
p.start()
p.join(timeout=5)
p.run()
# We terminate the process.
p.terminate()
Any idea, what I'm doing wrong?

Related

stop an endless function with threading

how do you kill an endless function in Python?
I would like the function to execute for 5 seconds and then be stopped, but thread.terminate() doesn't seem to work, I get the following error
AttributeError: 'Thread' object has no attribute 'terminate'
here is the code
import threading, time
def endless():
while True:
pass
p = threading.Thread(target=endless, name="endless")
p.start()
time.sleep(5)
if p.is_alive():
p.terminate()
p.join()
As mentioned in the comment, use Process if you want to force terminate the callback function.
from multiprocessing import Process, freeze_support
import time
def endless():
while True:
pass
if __name__ == '__main__': # required for windows
freeze_support()
p = Process(target=endless)
p.start()
time.sleep(5)
if p.is_alive():
p.terminate()
p.join()

How to stop a function outside of it in python

I want to know how to stop a running function outside of it. Here is how it should be:
def smth():
time.sleep(5) # Just an example
smth.stop()
Thanks for your help
Here's an example using the multiprocessing library:
from multiprocessing import Process
import time
def foo():
print('Starting...')
time.sleep(5)
print('Done')
p = Process(target=foo) #make process
p.start() #start function
time.sleep(2) #wait 2 secs
p.terminate() #kill it
print('Killed')
Output:
Starting...
Killed
Basically, what this code does is:
Create a process p which runs the function foo when started
Wait 2 seconds to simulate doing other stuff
End the process p with p.terminate()
Since p never passes time.sleep(5) in foo, it doesn't print 'Done'
Run this code online

Python freezes when accessing string value in subprocess

I spent nearly the whole day with this and came to the end of my knowledge:
I want to change a shared multiprocessing.Value string in the subprocess, but python hangs as soon as the subprocess is trying to change the shared value.
Below an example code:
from multiprocessing import Process, Value, freeze_support
from ctypes import c_wchar_p
def test(x):
with x.get_lock():
x.value = 'THE TEST WORKED'
return
if __name__ == "__main__":
freeze_support()
value = Value(c_wchar_p, '')
p = Process(target=test, args = (value,))
p.start()
print(p.pid)
# this try block is to also allow p.run()
try:
p.join()
p.terminate()
except:
pass
print(value.value)
What I tried and does not work:
I tried ctypes c_wchar_p and c_char_p, but both result in the same freezing.
I tried also without x.get_lock()
I tried also without freeze_support()
What works (but does not help):
Using a float as the shared value (value = Value('d',0) and x.value = 1).
Running the Process without starting a subprocess (replace p.start() with p.run() )
I am using Windows 10 64 bit and Python 3.6.4 (Spyder, but also tried outside of Spyder).
Any help welcome!
A shared pointer won't work in another process because the pointer is only valid in the process in which it was created. Instead, use an array:
import multiprocessing as mp
def test(x):
x.value = b'Test worked!'
if __name__ == "__main__":
x = mp.Array('c',15)
p = mp.Process(target=test, args = (x,))
p.start()
p.join()
print(x.value)
Output:
b'Test worked!'
Note that array type 'c' is specialized and returns a SynchronizedString vs. other types that return SynchronizedArray. Here's how to use type 'u' for example:
import multiprocessing as mp
from ctypes import *
def test(x):
x.get_obj().value = 'Test worked!'
if __name__ == "__main__":
x = mp.Array('u',15)
p = mp.Process(target=test, args = (x,))
p.start()
p.join()
print(x.get_obj().value)
Output:
Test worked!
Note that operations on the wrapped value that are non-atomic such as += that do read/modify/write should be protected with a with x.get_lock(): context manager.

Python multiprocessing script partial output

I am following the principles laid down in this post to safely output the results which will eventually be written to a file. Unfortunately, the code only print 1 and 2, and not 3 to 6.
import os
import argparse
import pandas as pd
import multiprocessing
from multiprocessing import Process, Queue
from time import sleep
def feed(queue, parlist):
for par in parlist:
queue.put(par)
print("Queue size", queue.qsize())
def calc(queueIn, queueOut):
while True:
try:
par=queueIn.get(block=False)
res=doCalculation(par)
queueOut.put((res))
queueIn.task_done()
except:
break
def doCalculation(par):
return par
def write(queue):
while True:
try:
par=queue.get(block=False)
print("response:",par)
except:
break
if __name__ == "__main__":
nthreads = 2
workerQueue = Queue()
writerQueue = Queue()
considerperiod=[1,2,3,4,5,6]
feedProc = Process(target=feed, args=(workerQueue, considerperiod))
calcProc = [Process(target=calc, args=(workerQueue, writerQueue)) for i in range(nthreads)]
writProc = Process(target=write, args=(writerQueue,))
feedProc.start()
feedProc.join()
for p in calcProc:
p.start()
for p in calcProc:
p.join()
writProc.start()
writProc.join()
On running the code it prints,
$ python3 tst.py
Queue size 6
response: 1
response: 2
Also, is it possible to ensure that the write function always outputs 1,2,3,4,5,6 i.e. in the same order in which the data is fed into the feed queue?
The error is somehow with the task_done() call. If you remove that one, then it works, don't ask me why (IMO that's a bug). But the way it works then is that the queueIn.get(block=False) call throws an exception because the queue is empty. This might be just enough for your use case, a better way though would be to use sentinels (as suggested in the multiprocessing docs, see last example). Here's a little rewrite so your program uses sentinels:
import os
import argparse
import multiprocessing
from multiprocessing import Process, Queue
from time import sleep
def feed(queue, parlist, nthreads):
for par in parlist:
queue.put(par)
for i in range(nthreads):
queue.put(None)
print("Queue size", queue.qsize())
def calc(queueIn, queueOut):
while True:
par=queueIn.get()
if par is None:
break
res=doCalculation(par)
queueOut.put((res))
def doCalculation(par):
return par
def write(queue):
while not queue.empty():
par=queue.get()
print("response:",par)
if __name__ == "__main__":
nthreads = 2
workerQueue = Queue()
writerQueue = Queue()
considerperiod=[1,2,3,4,5,6]
feedProc = Process(target=feed, args=(workerQueue, considerperiod, nthreads))
calcProc = [Process(target=calc, args=(workerQueue, writerQueue)) for i in range(nthreads)]
writProc = Process(target=write, args=(writerQueue,))
feedProc.start()
feedProc.join()
for p in calcProc:
p.start()
for p in calcProc:
p.join()
writProc.start()
writProc.join()
A few things to note:
the sentinel is putting a None into the queue. Note that you need one sentinel for every worker process.
for the write function you don't need to do the sentinel handling as there's only one process and you don't need to handle concurrency (if you would do the empty() and then get() thingie in your calc function you would run into a problem if e.g. there's only one item left in the queue and both workers check empty() at the same time and then both want to do get() and then one of them is locked forever)
you don't need to put feed and write into processes, just put them into your main function as you don't want to run it in parallel anyway.
how can I have the same order in output as in input? [...] I guess multiprocessing.map can do this
Yes map keeps the order. Rewriting your program into something simpler (as you don't need the workerQueue and writerQueue and adding random sleeps to prove that the output is still in order:
from multiprocessing import Pool
import time
import random
def calc(val):
time.sleep(random.random())
return val
if __name__ == "__main__":
considerperiod=[1,2,3,4,5,6]
with Pool(processes=2) as pool:
print(pool.map(calc, considerperiod))

Python 3 control multiprocessing

I am trying to write function using Python multiprocessing that i can control it and pass "command" to cleanly terminate the process.
I looked for few examples and tried it out ,but didn't seems to work fro me
So basically i need to to run separate process function code that doing some while loop action
and when needed stop it by passing somehow command and exit
Please advice
Thanks
example 1
from multiprocessing import Process, Queue
def start_process(queue):
while True:
try:
m = queue.get()
if m == 'exit':
print ('cleaning up worker...')
# add here your cleaning up code
break
else:
print (m)
except KeyboardInterrupt:
print ('ignore CTRL-C from worker')
if __name__ == '__main__':
queue = Queue()
process = Process(target=start_process, args=(queue,))
process.start()
queue.put(12)
try:
process.join()
except KeyboardInterrupt:
print ('wait for worker to cleanup...')
queue.put('exit')
process.join()
example 2
import multiprocessing
import time
class MyProcess(multiprocessing.Process):
def __init__(self, ):
multiprocessing.Process.__init__(self)
self.exit = multiprocessing.Event()
def run(self):
while not self.exit.is_set():
pass
print ("You exited!")
def shutdown(self):
print ("Shutdown initiated")
self.exit.set()
if __name__ == "__main__":
process = MyProcess()
process.start()
print ("Waiting for a while")
time.sleep(3)
process.shutdown()
time.sleep(3)
print ("Child process state: %d" % process.is_alive())
both examples works fine for me - perhaps you're misunderstanding how they should work?
in the first example, when the main thread runs, it starts the child and sends 12. then it waits to join the child. at that point everything is stalled because the child is waiting for 'exit'. but if you then hit ctrl-C the 'exit' is sent, the child exits, and the second join is successful:
> python3.3 example1.py
12
^Cignore CTRL-C from worker
wait for worker to cleanup...
cleaning up worker...
>
if you just want the parent to send 'exit' and then for everything to end, use:
def start_process(queue):
while True:
try:
m = queue.get()
if m == 'exit':
print ('cleaning up worker...')
# add here your cleaning up code
break
else:
print (m)
except KeyboardInterrupt:
print ('ignore CTRL-C from worker')
print('goodbye cruel world')
if __name__ == '__main__':
queue = Queue()
process = Process(target=start_process, args=(queue,))
process.start()
queue.put(12)
print ('sending exit')
queue.put('exit')
process.join()
which gives:
> python3.3 my-example.py
sending exit
12
cleaning up worker...
goodbye cruel world
>
your second example also works (with the indentation fixed):
> python3.3 example2.py
Waiting for a while
Shutdown initiated
You exited!
Child process state: 0
>
(just wait a little). not sure what else you could have expected here.

Resources