i want to send parameters between two function, but it doesn't work properly.
from multiprocessing import Process
a=False
def func1():
a=True
def func2():
if a:
print("hi")
if __name__ == '__main__':
p1 = Process(target=func1)
p1.start()
p2 = Process(target=func2)
p2.start()
p1.join()
p2.join()
Any suggestion would be appreciated.
Related
I want to get the result_1 and result_2 arrays with the following code:
import multiprocessing as mp
import numpy as np
result_1=[]
result_2=[]
a=np.random.rand(10,10)
b=np.random.rand(7,7)
def inv_1(x):
result_1.append(np.linalg.inv(x))
def inv_2(y):
result_2.append(np.linalg.inv(y))
if __name__ == "__main__":
p1 = mp.Process(target=inv_1, args=(a),)
p2 = mp.Process(target=inv_2, args=(b),)
p1.start()
p2.start()
p1.join()
p2.join()
print(result_1, result_2)
However, when I run the code, I get the following output:
[] []
How can I solve this problem?
Unlike threads, you can't share arbitrary variables between processes. To do what you're trying to do, you can create shared lists using a multiprocessing.Manager object, e.g:
import multiprocessing as mp
import numpy as np
a=np.random.rand(10,10)
b=np.random.rand(7,7)
def inv_1(x, target):
target.append(np.linalg.inv(x))
def inv_2(y, target):
target.append(np.linalg.inv(y))
if __name__ == "__main__":
mgr = mp.Manager()
result_1 = mgr.list()
result_2 = mgr.list()
q = mp.Queue()
p1 = mp.Process(target=inv_1, args=(a, result_1),)
p2 = mp.Process(target=inv_2, args=(b, result_2),)
p1.start()
p2.start()
p1.join()
p2.join()
print('RESULT 1:', result_1)
print('RESULT 2:', result_2)
This does what you're trying to do, although it's not clear to me why you're doing it this way -- both result_1 and result_2 only have a single value (because you're just appending an item to an empty list), so it's not clear why you need a list in the first place.
More broadly, you might want to redesign your code so that it doesn't rely on shared variables. A common solution is to use a queue to pass data from your workers back to the main process.
I am trying to learn about multiprocessing in Python and I was having a look at this tutorial.
According to it, the following code should run in about 1 second:
import multiprocessing
import time
def do_something():
print('Sleeping...')
time.sleep(1.0)
print('Done sleeping!')
if __name__ == '__main__':
start = time.perf_counter()
p1 = multiprocessing.Process(target=do_something)
p2 = multiprocessing.Process(target=do_something)
p1.start()
p2.start()
p1.join()
p2.join()
finish = time.perf_counter()
print('Time', finish-start)
but when I run it, I get an execution time of more than 2 seconds.
Sleeping...
Sleeping...
Done sleeping!
Done sleeping!
Time 2.1449603
Why is that happening? What am I missing?
I want to use multiprocessing concept in my program, which has an overloaded function as follows.
import multiprocessing
from pythonlangutil.overload import Overload, signature
#Overload
#signature("int")
def func1(int):
print(int)
func1()
#func1.overload
#signature()
def func1():
print('Overloaded')
def func2():
print('Execute func2')
if __name__ == "__main__":
p1 = multiprocessing.Process(target=func1,args=(5,))
p2 = multiprocessing.Process(target=func2)
p1.start()
p2.start()
p1.join()
p2.join()
While executing the above example function, im getting the error as
File "<string>",line 1, in <module>
File "..\multiprocessing\spawn.py", line 102, in spawn_main
source_process = _winapi.OpenProcess(
OSError:[WinError 87] The parameter is incorrect
When I execute the function without overloads, it works just fine.
Why does this happen? Can I resolve this error?
from threading import Thread, Lock, current_thread
from queue import Queue
import time
def worker(q, Lock):
while True:
value = q.get()
#processing...
print(f'in {current_thread().name} got {value}')
q.task_done()
if __name__ == "__main__":
q = Queue()
num_threads = 10
for i in range(num_threads):
thread = Thread(target=worker)
thread.daemon=True
thread.start()
for i in range(1, 21):
q.put(i)
q.join()
print('end main')
I keep getting an error in Python 3.9, anyone have a solution, its not taking in "import time" and "Lock"
I want to solve multiple cplex models simultaneously using python multiprocessing. I understand that the basic example of multiprocessing in python is something like:
from multiprocessing import Process
def func1():
'''some code'''
def func2():
'''some code'''
if __name__=='__main__':
p1 = Process(target = func1)
p1.start()
p2 = Process(target = func2)
p2.start()
p1.join()
p2.join()
The structure of my script is like:
Model1(args**):
'''cplex model written with docplex'''
return model
Model2(args**):
'''cplex model written with docplex'''
return model
Generate_pool1(args**):
cpx = mdl.get_cplex()
cpx.parameters.parallel.set(1)
cpx.parameters.threads.set(5)
cpx.parameters.emphasis.mip.set(4)
cpx.parameters.simplex.tolerances.markowitz.set(0.999)
cpx.parameters.simplex.tolerances.optimality.set(1e-9)
cpx.parameters.simplex.tolerances.feasibility.set(1e-9)
cpx.parameters.mip.pool.intensity.set(4)
cpx.parameters.mip.pool.absgap.set(1e75)
cpx.parameters.mip.pool.relgap.set(1e75)
cpx.populatelim=50
numsol = cpx.solution.pool.get_num()
return numsol
Generate_pool2(args**):
cpx = mdl.get_cplex()
cpx.parameters.parallel.set(1)
cpx.parameters.threads.set(5)
cpx.parameters.emphasis.mip.set(4)
cpx.parameters.simplex.tolerances.markowitz.set(0.999)
cpx.parameters.simplex.tolerances.optimality.set(1e-9)
cpx.parameters.simplex.tolerances.feasibility.set(1e-9)
cpx.parameters.mip.pool.intensity.set(4)
cpx.parameters.mip.pool.absgap.set(1e75)
cpx.parameters.mip.pool.relgap.set(1e75)
cpx.populatelim=50
numsol = cpx.solution.pool.get_num()
return numsol
main():
for i in range(len(data)-1):
m1=Model1(data[i])
m2=Model2(data[i+1])
p1 = Process(target = Generate_pool1,(m1,i),)
p1.start()
p2 = Process(target = Generate_pool2,(m2,i+1),)
p2.start()
p1.join()
p2.join()
When I run this code the cplex part doesn't work. The console keeps running but nothing happens and the process does not finish by itself, I have to keyboard interrupt it everytime. My engine has 32 virtual cores and it's runnig on spyder -windows 10.
with docplex you may find an example in https://www.linkedin.com/pulse/making-optimization-simple-python-alex-fleischer/
https://github.com/PhilippeCouronne/docplex_contribs/blob/master/docplex_contribs/src/zoomontecarlo2.py
which uses
https://github.com/PhilippeCouronne/docplex_contribs/blob/master/docplex_contribs/src/process_pool.py
that relies on
import concurrent.futures
from concurrent.futures import ProcessPoolExecutor
This example relies on docplex