I am trying to grasp the Lock in multithreading module in python. But for some reason it is not locking the objects and lets the next thread run without waiting for the lock to release.
Here is the code:
from threading import Thread, Lock
import time
database_value = 0
def increase(lock):
global database_value
lock.acquire()
local_copy = database_value
local_copy += 1
time.sleep(0.1)
database_value = local_copy
lock.release()
if __name__ == '__main__':
lock = Lock()
print('start value',database_value)
thread1 = Thread(target =increase, args = (lock,))
thread2 = Thread(target =increase, args = (lock,))
print('start')
#start
thread1.start()
thread2.start()
#join
print('join')
thread1.join()
thread1.join()
print('end value', database_value)
The Output I am expecting is:
start value 0
start
join
end value 2
But the Output I get:
start value 0
start
join
end value 1
At the join step, you wait for thread1 instead of thread2.
#join
print('join')
thread1.join()
thread1.join() # Should be thread2
If you change it below, it will work.
#join
print('join')
thread1.join()
thread2.join()
Starting my script off with:
for i in range(threads):
t = Thread(target=getSizes, args=(i,))
t.start()
Then when one of the threads is able to get the variables needed for the other functions it does:
for i in range(threads):
t = Thread(target=cart, args=(i, sizes, prod_name, product_id))
t.start()
Is there any way to till all threads started on getSizes() and then start new threads on cart()?
If your worker function does work in a loop, it can use a common resource like an Event to signal when work is complete and it should return. Here is an example
import threading
import time
import random
def getSizes(done_event):
while not done_event.is_set():
print("get size")
if random.randint(0, 20) == 10:
print("got size")
done_event.set()
do_cart()
else:
time.sleep(random.random())
print("sizes done")
def do_getSizes():
event = threading.Event()
threads = []
for i in range(5):
t = threading.Thread(target=getSizes, args=(event,))
t.start()
threads.append(t)
for t in threads:
t.join()
def cart():
print("I haz the cartz")
def do_cart():
time.sleep(1)
threads = []
for i in range(5):
t = threading.Thread(target=cart)
t.start()
threads.append(t)
for t in threads:
t.join()
do_getSizes()
Hi guys, I am struggling trying to stop a thread, I don't get any error but my thread don't stop. I would appreciate some help. I have a button who call my function lev which should stop the thread when I turned my button off. Follow the part of the code below:
exitFlag = 0
def levt():
print("Executando")
while ((app.frames[Acionamento].var.get()==2) and exitFlag==0):
print("o thread")
t1fvm = time.time() #Tempo final luminĂ¡ria esquerda
n1mv = (t1fvm-t1ivm)*0.6
global levm
levm = levm+n1mv
print(levm)
def lev():
app.frames[Acionamento].esquerdaFrame.vendasFrame.luminaria_esquerdaFramev.label6["text"] = "LuminĂ¡ria A"
global exitFlag
global thread2
thread2 = Thread(target=levt)
if GPIO.input(17):
GPIO.output(17, GPIO.LOW)
app.frames[Acionamento].esquerdaFrame.vendasFrame.luminaria_esquerdaFramev.lev_button["text"]="Desligado"
if thread2.isAlive():
exitFlag = 1
else:
global t1ivm
t1ivm = time.time()
GPIO.output(17, GPIO.HIGH)
app.frames[Acionamento].esquerdaFrame.vendasFrame.luminaria_esquerdaFramev.lev_button["text"]="Ligado"
thread2.start()
Since thread2 is always initiated with new thread inside lev(), so if thread2.isAlive() will be always evaluated as False and exitFlag will never be set to 1. Therefore the running thread will never be stopped.
Move thread2 = Thread(target=levt) to line before thread2.start():
def lev():
...
#thread2 = Thread(target=levt) # moved to line before thread2.start()
if GPIO.input(17):
...
else:
...
thread2 = Thread(target=levt)
thread2.start()
I have a code something like this,
import threading
class Mythread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
print('do some processing')
if __name__=='__main__':
while Ture:
val = raw_input('next thread')
t = MyThread()
t.start()
t.join()
The question is how can I carry on with main function without blocking the main because t.join() stop the main until t does not finish?
You should put code in the "code" tag or else it's not really readable.
And you just have to do something like that.
if name == 'main':
#Thread creation
allThreads = []
while True:
val = raw_input('next thread')
newThread = MyThread()
newThread.start()
allThreads.append(newThread)
#You can do something here
#Waiting for all threads to stop
for thread in allThreads:
thread.join()
I wrote a script that uses 2 queues and 3 types of worker: producer, consumer (CPU-bound task), writer (I need to write the results sequentially).
This is the simplified version of my code:
from queue import Queue
from threading import Thread
def compute_single_score(data):
#do lots of calculations
return 0.0
def producer(out_q, data_to_compute):
while stuff:
data = data_to_compute.popitem()
out_q.put(data)
out_q.put(_sentinel)
def consumer(in_q, out_q):
while True:
data = in_q.get()
if data is _sentinel:
in_q.put(_sentinel)
break
out_q.put([data[0], compute_single_score(*data)])
in_q.task_done()
def writer(in_q):
while True:
data = in_q.get()
if data is _sentinel:
in_q.put(_sentinel)
break
in_q.task_done()
if __name__ == '__main__':
_sentinel = object()
jobs_queue = Queue()
scores_queue = Queue()
t1 = Thread(target=producer, args=(jobs_queue, data_to_compute,))
t2 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t3 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t4 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t5 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t6 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t7 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t8 = Thread(target=consumer, args=(jobs_queue,scores_queue,))
t9 = Thread(target=writer, args=(scores_queue,))
t1.start(); t2.start(); t3.start(); t4.start(); t5.start(); t6.start(); t7.start(); t8.start(); t9.start()
jobs_queue.join()
scores_queue.join()
print('File written')
It immediately prints out 'File written', instead waiting for the queues to be empty. Consequently the script doesn't exit although all the calculations are performed. Two threads seem to remain active.
Thanks a lot for your support.
It does wait for queues to be empty. But since putting things in queue happens in threads then it reaches .join() line faster then .put() happens. So when it does reach .join() queues are empty.
Now I'm not sure what you are trying to achieve simply because a producer has a while stuff loop. I assume that you want to continue processing until this condition is true. In particular you have to wait until t1 thread quits, i.e.
t1.start(); t2.start(); t3.start(); t4.start(); t5.start(); t6.start(); t7.start(); t8.start(); t9.start()
t1.join() # <-- this is important
jobs_queue.join()
scores_queue.join()
print('File written')
Otherwise you won't be able to synchronize it.
Side note 1: due to GIL there is no point in creating CPU bound threads. If your threads are not doing any IO (and they don't) then it will perform better when single-threaded. Well at least multiple consumer threads are pointless.
Side note 2: Do not use commas. It's not pythonic. Instead do this:
threads = []
threads.append(Thread(target=producer, args=(jobs_queue, data_to_compute,)))
threads.append(Thread(target=writer, args=(scores_queue,)))
for i in range(10):
threads.append(Thread(target=consumer, args=(jobs_queue,scores_queue,)))
for t in threads:
t.start()
threads[0].join()
Side note 3: You should handle case when queues are empty. data = in_q.get() will block forever meaning that your script won't quit (unless threads are marked as daemon). You should do for example:
try:
data = in_q.get(timeout=1)
except queue.Empty:
# handle empty queue here, perhaps quit if t1 is not alive
# otherwise just continue the loop
if not t1.is_alive(): # <-- you have to pass t1 to the thread
break
else:
continue
and then join all threads at the end (see side note 2) of the main thread:
for t in threads:
t.start()
for t in threads:
t.join()
print('File written')
And now you don't even have to join queues.
This is the code I used in the end (according to the requirements illustrated before):
from multiprocessing import JoinableQueue
from multiprocessing import Process
def compute_single_score(data):
#do lots of calculations
return 0.0
def producer(out_q, data_to_compute):
while stuff:
data = data_to_compute.popitem()
out_q.put(data)
def consumer(in_q, out_q):
while True:
try:
data = in_q.get(timeout=5)
except:
break
out_q.put([data[0], compute_single_score(*data)])
in_q.task_done()
def writer(in_q):
while True:
try:
data = in_q.get(timeout=5)
except:
break
#write
in_q.task_done()
if __name__ == '__main__':
jobs_queue = JoinableQueue()
scores_queue = JoinableQueue()
processes = []
processes.append(Process(target=producer, args=(jobs_queue, data_to_compute,)))
processes.append(Process(target=writer, args=(scores_queue,)))
for i in range(10):
processes.append(Process(target=consumer, args=(jobs_queue,scores_queue,)))
for p in processes:
p.start()
processes[1].join()
scores_queue.join()
print('File written')
I hope it will be of help for somebody else.