ZeroMQ hangs in a python multiprocessing class/object solution - python-3.x

I'm trying to use ZeroMQ in Python (pyzmq) together with multiprocessing. As a minmal (not) working example I have a server- and a client-class which both inherit from multiprocessing.Process. The client as a child-process should send a message to the server-child-process which should print the message:
#mpzmq_class.py
from multiprocessing import Process
import zmq
class Server(Process):
def __init__(self):
super(Server, self).__init__()
self.ctx = zmq.Context()
self.socket = self.ctx.socket(zmq.PULL)
self.socket.connect("tcp://localhost:6068")
def run(self):
msg = self.socket.recv_string()
print(msg)
class Client(Process):
def __init__(self):
super(Client, self).__init__()
self.ctx = zmq.Context()
self.socket = self.ctx.socket(zmq.PUSH)
self.socket.bind("tcp://*:6068")
def run(self):
msg = "Hello World!"
self.socket.send_string(msg)
if __name__ == "__main__":
s = Server()
c = Client()
s.start()
c.start()
s.join()
c.join()
Now if I run this the server-process seems to hang at the receive-call msg = socket.receive_string(). In another (more complicated) case, it even hung at the socket.connect("...")-statement.
If I rewrite the script to use functions instead of classes/objects, it runs just fine:
# mpzmq_function.py
from multiprocessing import Process
import zmq
def server():
ctx = zmq.Context()
socket = ctx.socket(zmq.PULL)
socket.connect("tcp://localhost:6068")
msg = socket.recv_string()
print(msg)
def client():
ctx = zmq.Context()
socket = ctx.socket(zmq.PUSH)
socket.bind("tcp://*:6068")
msg = "Hello World!"
socket.send_string(msg)
if __name__ == "__main__":
s = Process(target=server)
c = Process(target=client)
s.start()
c.start()
s.join()
c.join()
Output:
paul#AP-X:~$ python3 mpzmq_function.py
Hello World!
Can anybody help me with this? I guess it's something I didn't understand concerning the usage of multiprocessing.
Thank you!

I run into the same issue.
I guess the problem is, that the run method has no access to the context object.
Maybe it has something to do with the C implementation and the fact, that processes do not have shared memory.
If instantiate the context in the run method, it works.
Here a working example:
#mpzmq_class.py
from multiprocessing import Process
import zmq
class Base(Process):
"""
Inherit from Process and
holds the zmq address.
"""
def __init__(self, address):
super().__init__()
self.address = address
class Server(Base):
def run(self):
ctx = zmq.Context()
socket = ctx.socket(zmq.PULL)
socket.connect(self.address)
msg = socket.recv_string()
print(msg)
class Client(Base):
def run(self):
ctx = zmq.Context()
socket = ctx.socket(zmq.PUSH)
socket.bind(self.address)
msg = "Hello World!"
socket.send_string(msg)
if __name__ == "__main__":
server_addr = "tcp://127.0.1:6068"
client_addr = "tcp://*:6068"
s = Server(server_addr)
c = Client(client_addr)
s.start()
c.start()
s.join()
c.join()
I added a base class to demonstrate that you can still access normal Python objects from the run method. If you put the context object into the init Method, it won't work.

Related

Shutdown during recv on python socket

During the execution of this code, it blocks on the join
I have a TCP server running on ("127.0.0.1", 1777) for the test
I tried using directly the socket with recv, but the result is the same
Any idea, why the shutdown on READ doesn't interrupt the read ?
import socket
from threading import Thread
from time import sleep
class Parser(Thread):
rbufsize = 4096
wbufsize = 4096
encoding="utf-8"
new_line = "\n"
def __init__(self):
super().__init__()
self._socket = socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM)
self._wfile = None
self._rfile = None
def run(self):
self._socket.connect(("127.0.0.1", 1777))
self._rfile = self._socket.makefile('rb', self.rbufsize, encoding=self.encoding, newline=self.new_line)
self._wfile = self._socket.makefile('wb', self.wbufsize, encoding=self.encoding, newline=self.new_line)
while True:
data = self._rfile.readline()
if not data:
break
self._handle_data(data)
self._cleanup()
def _cleanup(self):
"""
Fermeture
"""
if not self._wfile.closed:
try:
self._wfile.flush()
except socket.error:
# A final socket error may have occurred here, such as
# the local error ECONNABORTED.
pass
self._socket.shutdown(socket.SHUT_RDWR)
self._wfile.close()
self._rfile.close()
self._socket.close()
def stop(self):
self._socket.shutdown(socket.SHUT_RD)
if __name__ == "__main__":
p = Parser()
p.start()
sleep(5)
p.stop()
print("start join")
p.join()

Mocking REST APIs with Flask_restful using threading

I'm looking to mock a set of REST APIs for some tests. The following main() function works fine (i.e. it returns {"some-data": 1234} as json to the browser when I GET localhost:8099). The issue is it blocks the main thread:
from gevent import monkey, sleep, pywsgi
monkey.patch_all()
import flask
from flask_restful import reqparse, abort, Api, Resource
import queue
import sys
import threading
STUFFS = {"some-data": 1234}
class Stuff(Resource):
def get(self):
return flask.jsonify(STUFFS)
class ControlThread(threading.Thread):
def __init__(self, http_server, stop_event):
threading.Thread.__init__(self)
self.stop_event = stop_event
self.http_server = http_server
self.running = False
def run(self):
try:
while not self.stop_event.is_set():
if not self.running:
self.http_server.start()
self.running = True
sleep(0.001)
except (KeyboardInterrupt, SystemExit):
pass
self.http_server.stop()
class StuffMock:
def __init__(self, port, name=None):
if name is None:
name = __name__
self.app = flask.Flask(name)
self.api = Api(self.app)
self.api.add_resource(Stuff, "/stuff/")
self.stop_event = threading.Event()
self.http_server = pywsgi.WSGIServer(('', port), self.app)
self.serving_thread = ControlThread(self.http_server,
self.stop_event)
self.serving_thread.daemon = True
def start(self):
self.serving_thread.start()
def stop(self):
self.stop_event.set()
self.serving_thread.join()
def main():
mocker = StuffMock(8099)
mocker.start()
try:
while True:
sleep(0.01)
except (KeyboardInterrupt, SystemExit):
mocker.stop()
sys.exit()
if __name__ == "__main__":
main()
Without the sleep() call in the while loop above, nothing resolves. Here is a more succinct usage to demonstrate:
import time
from stuff_mock import StuffMock
mocker = StuffMock(8099)
mocker.start()
while True:
user_text = input("let's do some work on the main thread: ")
# will only resolve the GET request after user input
# (i.e. when the main thread executes this sleep call)
time.sleep(0.1)
if user_text == "q":
break
mocker.stop()
The gevent threading module seems to work differently from the core one. Does anyone have any tips or ideas about what's going on under the hood?
Found that if I switch out threading for multiprocessing (and threading.Thread for multiprocessing.Process), everything works as expected, and I can spin up arbitrary numbers of mockers without blocking.

Python Using Multiprocessing for the methods of the same Class

I need some help with multiprocessing module of python. I am using Python3.6.6. My code structure is somewhat like this:
class ABC():
def __init__(self):
self.HOST = 'hostserver.com'
self.TCP_PORT = 0123
self.BUFFER_SIZE = 1024
self.SERVER_INFO = ""
self.SOCK = None
def connect_socket(self):
self.SOCK = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.SOCK.settimeout(1)
self.SOCK.connect((self.HOST, self.TCP_PORT))
self.SOCK.setblocking(True)
def recTask(self):
while True:
self.receive_data()
time.sleep(0.01)
def sendTask(self):
while True:
self.SOCK.sendall(bytes)
print("\n*Message sent*\n")
time.sleep(0.01)
if __name__ == '__main__':
Class_obj = ABC()
id = Class_obj.connect_socket()
ts = mp.Process(name='send_Process', target=Class_obj.sendTask())
ts.daemon = True
tr = mp.Process(name='rec_Process', target=Class_obj.recTask())
tr.daemon = True
tr.start()
ts.start()
ts.join()
tr.join()
Can I call the methods of the same class using objects within the process? I want the 2 functions to run independently of each other. Also, when I run this I just see the "Message sent" being printed. In receive_data() function I have print("Message received") but it never prints. When I comment the code
ts = mp.Process(name='send_Process', target=Class_obj.sendTask())
ts.daemon = True
ts.start()
I see the "Message Received" being printed. Is there something which I am missing?

What is the best way to run a Python function after some PyQt5 QThread classes finish work?

I'm using PyQt5 and Python3, I use 3 QThread classes to run something and after they are done I need to execute a 4th QThread class. But the execution of the 4th need to take place after all of the QThread classes finish work, or only 2 or only 1. It must not run while the first 3 are working.
I looked on the internet but I couldn't find a solution. My code looks like this:
class MyWindow(QtWidgets.QMainWindow):
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
file_path = os.path.abspath('builder_gui.ui')
uic.loadUi(file_path, self)
self.obj1 = TasksThread1(self.comboBox.currentText(),self.comboBox_6.currentText())
self.obj2 = TasksThread2(self.comboBox_2.currentText(),self.comboBox_5.currentText())
self.obj3 = TasksThread3(self.comboBox_3.currentText(),self.comboBox_4.currentText())
self.obj4 = TasksThread4()
self.menubar.setNativeMenuBar(False)
self.progressVal = 1
self.cwd = os.getcwd()
self.obj1.newValueProgress.connect(self.increment_progress)
self.obj1.message.connect(self.status_bar)
self.obj2.newValueProgress.connect(self.increment_progress)
self.obj2.message.connect(self.status_bar)
self.obj3.newValueProgress.connect(self.increment_progress)
self.obj3.message.connect(self.status_bar)
self.obj4.newValueProgress.connect(self.increment_progress)
self.obj4.message.connect(self.status_bar)
self.obj4.doneSignal.connect(self.calculate_done_limit)
self.pushButton.pressed.connect(self.execute_build_script)
def calculate_done_limit(self):
limitCalc = 100 - int(self.progressBar.value())
self.increment_progress(limitCalc)
def run_gits_all(self):
if self.crowdTwistCheck.isChecked():
self.obj1.start()
else:
pass
if self.ThemeCheck.isChecked():
self.obj2.start()
else:
pass
if self.mainAwsCheck.isChecked():
self.obj3.start()
else:
pass
def execute_build_script(self):
self.progressBar.setValue(1)
self.progressVal = 1
self.run_gits_all()
def execute_last_part(self):
self.obj4.start()
def status_bar(self, value_in):
read1 = self.textBrowser.toPlainText()
self.textBrowser.setText(read1 + "\n" + value_in)
def increment_progress(self,valueIn):
self.progressVal += valueIn
self.progressBar.setValue(self.progressVal)
if __name__ == '__main__':
import sys
app = QtWidgets.QApplication(sys.argv)
window = MyWindow()
window.show()
sys.exit(app.exec_())
My first 3 QThreads are like this:
class TasksThread1(QThread):
newValueProgress = QtCore.pyqtSignal(int)
message = QtCore.pyqtSignal(str)
doneSignal = QtCore.pyqtSignal()
def __init__(self, branch, git):
QThread.__init__(self)
self.branch = branch
self.git = git
def remove_folder(self):
do_something_1
def CrowdTwistRepo(self):
do_something_2
def run(self):
self.remove_folder()
self.CrowdTwistRepo()
My last QThread looks like this:
class TasksThread4(QThread):
newValueProgress = QtCore.pyqtSignal(int)
message = QtCore.pyqtSignal(str)
doneSignal = QtCore.pyqtSignal()
def __init__(self):
QThread.__init__(self)
def gulp_sass_function(self):
do_something_1
def gulp_uglify_function(self):
do_something_2
def zipping_function(self):
do_something_3
def run(self):
self.gulp_sass_function()
self.gulp_uglify_function()
self.zipping_function()
If I run the code, all of the QThreads start and I want my 4th QThread to start only after the first 3 have done working. I used QThreads to improve the GUI experience, the GUI froze alot.
thanks,
When your first 3 threads are done, send a signal. Then connect this signal to a function that will start the last thread.

Rpyc Python: Rpyc service thread start call is blocking

The server starts fine I can update values, but the hello world string is not printed. The program is stuck at t.start(), how can I make it non-blocking.
import rpyc
class rpyc_service(rpyc.Service):
def on_connect(self):
self.exposed_var = 0
def on_disconnect(self):
pass
def exposed_update_var(self, val):
self.exposed_var = val
if __name__ == '__main__':
from rpyc.utils.server import ThreadedServer
t = ThreadedServer(rpyc_service, port = 18861)
t.start()
# The hello world section is never printed
print "Hello world"

Resources