Why the following program does not end on IDLE? - python-3.x

I have the following python program using threads. I am unable to understand why it does not terminate after execution. Suggest possible reasons and how to overcome this problem. Here is the code -
import time
from threading import *
lock1 = Lock()
def func(string):
for i in range(5):
lock1.acquire()
print(string)
lock1.release()
time.sleep(0.1)
t1 = Thread(target = func, args = ('Hello from t1',))
t2 = Thread(target = func, args = ('Hello from t2',))
t1.start()
t2.start()
print(t1.name)

The reason is simple, it is not ending because it is not making an exit from main thread. Moreover, it may run on IDLE but will not run on shell.

Related

Python 3 threading , queue

I'm facing with a "little" problem regarding adding data to queue.Queue from a thread.
Environment data: Ubuntu 18.04 / Python 3.6.7 / Python 3.8.5
In the rows below I will post my simplified code. Any help will be appreciated.
from threading import Thread,Lock
from queue import Queue
from random import randint
thread = None
thread_lock = Lock()
q = Queue()
def worker(number):
random_number= [str(randint(1,999)),number]
q.put(random_number)
def first_function(iteration):
global thread
some_list=[]
with thread_lock:
threads=[]
if thread is None:
for iterator in range(iteration):
thread = Thread(target=worker,args=(iterator,))
threads.append(thread)
thread.start()
for thread_ in threads:
thread_.join()
thread = None
while not q.empty():
some_list.append(q.get())
return (some_list)
print(first_function(10))
print(first_function(5))
My second call will return an empty list. Please give me an idea .
The problem is with "thread = None" after the method is executed first time on thread is append an <Thread ... > and in second call when you verify
"if thread is None", suprize is not None.

How to stop a specific Thread among others?

I'm using threads for a project which look like this :
thread1 = Thread(target=function, args=('x','y',1,2))
thread2 = Thread(target=function, args=('a','b',1,2))
thread1.start()
thread2.start()
Everything is working but I wanted to add an option to my code. To kill my threads i'm currently using While X==True in my targeted function. So when I want to kill a Thread i have to pass While==False.
The issue is doing that kill all the threads who use this function.
So how can i kill only thread1 without doing the same for thread2 if both were running together and using the same targeted function ?
Thank you !
Below a simplified example of what i'm actually doing
def test_thread(freq):
starttime=time.time()
while RUN==True:
try:
if 1==1:
print('1')
sleep(freq - ((time.time() - starttime) % freq))
except Exception as Ex:
print(Ex)
pass
RUN = True
run_test = Thread(target=test_thread, args=(20))
run_test.start()
You could pass a different, mutable object as an argument to each of the two threads:
class Stopper:
def __init__(self):
self.flag = True
def ok_to_keep_going(self):
return self.flag
def stop_now(self):
self.flag = False
def test_thread(freq, stopper):
...
while stopper.ok_to_keep_going():
...
if __name__ == '__main__':
t1_stopper = Stopper()
t2_stopper = Stopper()
t1 = Thread(target=test_thread, args=(T1_FREQ, t1_stopper))
t2 = Thread(target=test_thread, args=(T2_FREQ, t2_stopper))
t1.start()
t2.start()
Now you can stop thread 1 by calling t1_stopper.stop_now(), or stop thread 2 by calling t2_stopper.stop_now()
Or, for fewer lines of code:
def test_thread(freq, stopper):
...
while stopper[0]:
...
if __name__ == '__main__':
t1_stopper = [True]
t2_stopper = [True]
t1 = Thread(target=test_thread, args=(T1_FREQ, t1_stopper))
t2 = Thread(target=test_thread, args=(T2_FREQ, t2_stopper))
t1.start()
t2.start()
Now you stop thread t1 by setting t1_stopper[0]=False.

how to stop input() from main thread in python3

What should be written in kill_input() instead of pass to stop input() and terminate the program?
#!/usr/bin/env python3
import threading, time
running = True
def kill_input():
pass
def input_reader():
while running:
print(input())
t = threading.Thread(target = input_reader)
t.start()
time.sleep(2)
kill_input()
print('bye')
Solved with setting the thread to daemon.
t.daemon = True
t.start()
If there are no hanging non-daemon threads it will terminate automatically.

Python 3 threads performance issue

I created two threads and executed them in parallel but astonishingly it took more time (33.5 secs) than sequential execution (29.4 secs). Please advice what am doing wrong?
def write_File(fName):
start = timeit.default_timer()
print('writing to {}!\n'.format(fName))
with open(fName, 'a') as f:
for i in range(0, 10000000):
f.write("aadadadadadadadadadadadada" + str(i));
end = timeit.default_timer()
print(end - start)
print('Fn exit!')
start = timeit.default_timer()
t1 = Thread(target=write_File, args=('test.txt',))
t1.start()
t2 = Thread(target=write_File, args=('test1.txt',))
t2.start()
t2.join()
end = timeit.default_timer()
print(end - start)
input('enter to exit')
You aren't doing anything wrong. You fell victim to Python's global interpreter lock. Only one thread can use the interpreter at a time so really under the hood of CPython programs multiple cores have to share one instance of the python interpreter.
Python threads switch when one goes to sleep or is awaiting I/O. So you would achieve a performance benefit from tasks such as
def do_connect():
s = socket.socket()
s.connect(('python.org', 80)) # drop the GIL
for i in range(2):
t = threading.Thread(target=do_connect)
t.start()

Two queues: the script doesn't exit

I wrote a script that uses 2 queues and 3 types of worker: producer, consumer (CPU-bound task), writer (I need to write the results sequentially).
This is the simplified version of my code:
from queue import Queue
from threading import Thread
def compute_single_score(data):
#do lots of calculations
return 0.0
def producer(out_q, data_to_compute):
while stuff:
data = data_to_compute.popitem()
out_q.put(data)
out_q.put(_sentinel)
def consumer(in_q, out_q):
while True:
data = in_q.get()
if data is _sentinel:
in_q.put(_sentinel)
break
out_q.put([data[0], compute_single_score(*data)])
in_q.task_done()
def writer(in_q):
while True:
data = in_q.get()
if data is _sentinel:
in_q.put(_sentinel)
break
in_q.task_done()
if __name__ == '__main__':
_sentinel = object()
jobs_queue = Queue()
scores_queue = Queue()
t1 = Thread(target=producer, args=(jobs_queue, data_to_compute,))
t2 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t3 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t4 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t5 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t6 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t7 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t8 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t9 = Thread(target=writer, args=(scores_queue,))
t1.start(); t2.start(); t3.start(); t4.start(); t5.start(); t6.start(); t7.start(); t8.start(); t9.start()
jobs_queue.join()
scores_queue.join()
print('File written')
It immediately prints out 'File written', instead waiting for the queues to be empty. Consequently the script doesn't exit although all the calculations are performed. Two threads seem to remain active.
Thanks a lot for your support.
It does wait for queues to be empty. But since putting things in queue happens in threads then it reaches .join() line faster then .put() happens. So when it does reach .join() queues are empty.
Now I'm not sure what you are trying to achieve simply because a producer has a while stuff loop. I assume that you want to continue processing until this condition is true. In particular you have to wait until t1 thread quits, i.e.
t1.start(); t2.start(); t3.start(); t4.start(); t5.start(); t6.start(); t7.start(); t8.start(); t9.start()
t1.join() # <-- this is important
jobs_queue.join()
scores_queue.join()
print('File written')
Otherwise you won't be able to synchronize it.
Side note 1: due to GIL there is no point in creating CPU bound threads. If your threads are not doing any IO (and they don't) then it will perform better when single-threaded. Well at least multiple consumer threads are pointless.
Side note 2: Do not use commas. It's not pythonic. Instead do this:
threads = []
threads.append(Thread(target=producer, args=(jobs_queue, data_to_compute,)))
threads.append(Thread(target=writer, args=(scores_queue,)))
for i in range(10):
threads.append(Thread(target=consumer, args=(jobs_queue,scores_queue,)))
for t in threads:
t.start()
threads[0].join()
Side note 3: You should handle case when queues are empty. data = in_q.get() will block forever meaning that your script won't quit (unless threads are marked as daemon). You should do for example:
try:
data = in_q.get(timeout=1)
except queue.Empty:
# handle empty queue here, perhaps quit if t1 is not alive
# otherwise just continue the loop
if not t1.is_alive(): # <-- you have to pass t1 to the thread
break
else:
continue
and then join all threads at the end (see side note 2) of the main thread:
for t in threads:
t.start()
for t in threads:
t.join()
print('File written')
And now you don't even have to join queues.
This is the code I used in the end (according to the requirements illustrated before):
from multiprocessing import JoinableQueue
from multiprocessing import Process
def compute_single_score(data):
#do lots of calculations
return 0.0
def producer(out_q, data_to_compute):
while stuff:
data = data_to_compute.popitem()
out_q.put(data)
def consumer(in_q, out_q):
while True:
try:
data = in_q.get(timeout=5)
except:
break
out_q.put([data[0], compute_single_score(*data)])
in_q.task_done()
def writer(in_q):
while True:
try:
data = in_q.get(timeout=5)
except:
break
#write
in_q.task_done()
if __name__ == '__main__':
jobs_queue = JoinableQueue()
scores_queue = JoinableQueue()
processes = []
processes.append(Process(target=producer, args=(jobs_queue, data_to_compute,)))
processes.append(Process(target=writer, args=(scores_queue,)))
for i in range(10):
processes.append(Process(target=consumer, args=(jobs_queue,scores_queue,)))
for p in processes:
p.start()
processes[1].join()
scores_queue.join()
print('File written')
I hope it will be of help for somebody else.

Resources