How to Access pywebview.Window Object from Another multiprocessing.Process? - python-3.x

I have a webview that controlling the flask api.
The webview will have a button to start the flask server and a button to stop the server. That is why I have to use multiprocessing.Process to create a separate process for Flask. With that, I cannot access my pywebview.Window anymore. I want to use pywebview.Window to evaluate some javascript with pywebview.Window.evaluate_js() within the Flask process (of course it has to be the same pywebview.Window that I already created before open a new process for Flask).
Is anybody know how to accomplish this issue. I appreciate it!
Some sample code:
from flask import Flask, request, jsonify
import os, sys, re, json, socket, sqlite3, base64, requests, webview
from flask_cors import CORS
class ServerFlaskApi:
def __init__(self):
self.app = Flask(__name__, root_path=Root_Dir)
self.app.add_url_rule("/", view_func=self.Default)
def Default(self):
return "Welcome to the Python Http Server for your Application!", 200
def PrintToWebViewConsole(self):
#Trying to use pywebview.Window here, of course WebviewWindow is not defined!!!
WebviewWindow.evaluate_js(js_script)
################
class WebviewApi:
def __init__(self):
self.server_thread = None
def StartServer(self):
self.server_thread = multiprocessing.Process(target=Run_Flask_Server, daemon=True)
self.server_thread.start()
def StopServer(self):
self.server_thread.terminate()
def Run_Flask_Server():
serverApi = ServerFlaskApi()
CORS(serverApi.app)
serverApi.app.run(host=Server_Host, port=Server_Port, debug=True, use_reloader=False)
################
if __name__ == "__main__":
WebViewApi = WebviewApi()
WebviewWindow = webview.create_window(title="Server Monitor", url="view/main-gui.html", js_api=WebViewApi, width=550, height=750, min_size=(550, 750), resizable=False, on_top=True, confirm_close=False)
webview.start(debug=False)
I'm still new in Python, so any suggestion is welcome!
Thank you in advance!

I guess I have to use Threading instead of Processing, since Thread is sharing memory and Process is not.
Also, for anybody who want to stop a Thread, here is a function to do that, not sure if this is a good way to do it, but it does the job for me:
def Kill_Thread(thread):
if not isinstance(thread, threading.Thread):
raise TypeError("Must be set as threading.Thread type!!!")
thread_id = thread.ident
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(thread_id, ctypes.py_object(SystemExit))
if res > 1:
ctypes.pythonapi.PyThreadState_SetAsyncExc(thread_id, 0)
print("Exception raise failure")

Related

How to run and cancel a Linux command using Flask Python API?

I am working on a Flask based Python api. It has two api, run_cmd and stop_cmd. Run cmd will execute a command in the terminal. This command will keep on going until someone manually cancels it. So to cancel it, we have stop_cmd api. Below is the code:
from flask import Flask, jsonify, request
from threading import Thread
from subprocess import call
app = Flask(__name__)
def RunCmd():
call('while true; do echo "hello"; sleep 2s; done', shell=True)
#app.route('/run_cmd', methods=['GET'])
def run_cmd():
Thread(target=RunCmd).start()
return jsonify({"status": "ok"}), 200
#app.route('/stop_cmd', methods=['GET'])
def stop_cmd():
# This api will stop the cmd running in RunCmd
As you can see in the above code, if we hit the /run_cmd, it starts and keeps printing hello in the terminal. I wanted to know how can we cancel this ongoing session of the command so that we can write it in stop cmd api. Is this possible?
This is how I solved it
from flask import Flask, jsonify, request
from threading import Thread
import subprocess
import psutil
from subprocess import call
from command_runner import command_runner
app = Flask(__name__)
proc = ""
def kill(proc_pid):
process = psutil.Process(proc_pid)
for proc in process.children(recursive=True):
proc.kill()
process.kill()
def RunCmd():
global proc
proc = subprocess.Popen(['while true; do echo "hello"; sleep 2s; done'], shell=True)
#app.route('/run_cmd', methods=['GET'])
def run_cmd():
Thread(target=RunCmd).start()
return jsonify({"status": "ok"}), 200
#app.route('/stop_cmd', methods=['GET'])
def stop_cmd():
global proc
kill(proc.pid)
return jsonify({"status": True}), 200
if __name__ == '__main__':
app.run(host='127.0.0.1', port=5000)
Subprocess.call is a part of an older, deprecated API if I am informed correctly. Instead, you should probably use subprocess.Popen(). Then you could start your command by running
proc = subprocess.Popen(["somecommand", "-somearg", "somethingelse"])
This will return a Popen object which you can terminate by sending it a signal, for example proc.terminate() or proc.kill().

Python: Callback on the worker-queue not working

Apologies for the long post. I am trying to subscribe to rabbitmq queue and then trying to create a worker-queue to execute tasks. This is required since the incoming on the rabbitmq would be high and the processing task on the item from the queue would take 10-15 minutes to execute each time. Hence necessitating the need for a worker-queue. Now I am trying to initiate only 4 items in the worker-queue, and register a callback method for processing the items in the queue. The expectation is that my code handles the part when all the 4 instances in the worker-queue are busy, the new incoming would be blocked until a free slot is available.
The rabbitmq piece is working well. The problem is I cannot figure out why the items from my worker-queue are not executing the task, i.e the callback is not working. In fact, the item from the worker queue gets executed only once when the program execution starts. For the rest of the time, tasks keep getting added to the worker-queue without being consumed. Would appreciate it if somebody could help out with the understanding on this one.
I am attaching the code for rabbitmqConsumer, driver, and slaveConsumer. Some information has been redacted in the code for privacy issues.
# This is the driver
#!/usr/bin/env python
import time
from rabbitmqConsumer import BasicMessageReceiver
basic_receiver_object = BasicMessageReceiver()
basic_receiver_object.declare_queue()
while True:
basic_receiver_object.consume_message()
time.sleep(2)
#This is the rabbitmqConsumer
#!/usr/bin/env python
import pika
import ssl
import json
from slaveConsumer import slave
class BasicMessageReceiver:
def __init__(self):
# SSL Context for TLS configuration of Amazon MQ for RabbitMQ
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
url = <url for the queue>
parameters = pika.URLParameters(url)
parameters.ssl_options = pika.SSLOptions(context=ssl_context)
self.connection = pika.BlockingConnection(parameters)
self.channel = self.connection.channel()
# worker-queue object
self.slave_object = slave()
self.slave_object.start_task()
def declare_queue(self, queue_name=“abc”):
print(f"Trying to declare queue inside consumer({queue_name})...")
self.channel.queue_declare(queue=queue_name, durable=True)
def close(self):
print("Closing Receiver")
self.channel.close()
self.connection.close()
def _consume_message_setup(self, queue_name):
def message_consume(ch, method, properties, body):
print(f"I am inside the message_consume")
message = json.loads(body)
self.slave_object.execute_task(message)
ch.basic_ack(delivery_tag=method.delivery_tag)
self.channel.basic_qos(prefetch_count=1)
self.channel.basic_consume(on_message_callback=message_consume,
queue=queue_name)
def consume_message(self, queue_name=“abc”):
print("I am starting the rabbitmq start_consuming")
self._consume_message_setup(queue_name)
self.channel.start_consuming()
#This is the slaveConsumer
#!/usr/bin/env python
import pika
import ssl
import json
import requests
import threading
import queue
import os
class slave:
def __init__(self):
self.job_queue = queue.Queue(maxsize=3)
self.job_item = ""
def start_task(self):
def _worker():
while True:
json_body = self.job_queue.get()
self._parse_object_from_queue(json_body)
self.job_queue.task_done()
threading.Thread(target=_worker, daemon=True).start()
def execute_task(self, obj):
print("Inside execute_task")
self.job_item = obj
self.job_queue.put(self.job_item)
# print(self.job_queue.queue)
def _parse_object_from_queue(self, json_body):
if bool(json_body[‘entity’]):
if json_body['entity'] == 'Hello':
print("Inside Slave: Hello")
elif json_body['entity'] == 'World':
print("Inside Slave: World")
self.job_queue.join()

How to right use "Pool" in parallel downloading files?

I want use a parallel downloading videos from youtube, but my code ending with exception "PicklingError". Can you help guys with code, how it should be, please.
Another fixed variant:
import sys
#from pathos.multiprocessing import ProcessingPool as Pool
from multiprocessing import Pool
from pytube import YouTube
from youtubeMultiDownloader import UiMainWindow
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QFileDialog
class YouTubeInstance:
def __init__(self, path):
self.youtube = YouTube
self.path = path
#self.ui_obj = ui_obj
def download_file(self, url):
self.youtube(url).streams.get_highest_resolution().download(self.path)
#self.ui.ui.youtube_outputs.setText(f'Video \'{self.youtube.title}\' has been downloaded successfully!')
class YouTubeMultiDownloader(QtWidgets.QMainWindow):
def __init__(self):
super().__init__()
self.pool = Pool
self.ui = UiMainWindow()
self.ui.setup_ui(self)
self.path_to_dir = None
self.urls = None
def _get_urls_from_form(self):
self.urls = self.ui.youtube_urls.toPlainText().split('\n')
return len(self.urls)
def choose_directory(self):
self.path_to_dir = str(QFileDialog.getExistingDirectory(self, "Select Directory"))
def run_multi_downloads(self):
youtube = YouTubeInstance(self.path_to_dir)
self.pool(self._get_urls_from_form()).map(youtube.download_file, self.urls)
if __name__ == "__main__":
app = QtWidgets.QApplication([])
application = YouTubeMultiDownloader()
application.show()
sys.exit(app.exec_())
Updated:
My ui :)
Error 1 fixed:
Error 2 fixed:
Error 3 actual:
You've got the wrong side of the stick. Take a look at multiprocessing module documents. As it says, calling Pool method is for running multiple instance of same function simultaneously (in parallel). So call Pool method as many numbers you want, meanwhile your method does not any parameters, call it without any arguments:
with Pool(5) as p:
print(p.map(YouTubeMultiDownloader))
It create 5 parallel instance. You can change the code an refine your errors.

How to run two process at once using kivy

I'm struggling to simultaneously run my Kivy app alongside a python script that is being locally imported.
Full python code
import Client # Locall import
import time
from threading import Thread
from kivy.app import App
from kivy.uix.button import Button
from kivy.lang import Builder
from kivy.uix.screenmanager import ScreenManager, Screen
class MainWindow(Screen):
pass
class SecondWindow(Screen):
pass
class WindowManager(ScreenManager):#will manage navigation of windows
pass
kv = Builder.load_file("my.kv")
class Sound(App):
def build(self):
return kv
def ipconfig(self,input_ip):
if len(input_ip) == 13:
print('Address binded!!')
Client.host = input_ip #Modify ip adress
else:
print('Invalid input_ip')```
if __name__ == '__main__':
#Sound().run()#Kivy run method
Thread(target = Sound().run()).start()
time.sleep(10)
Thread(target = Client.father_main).start()
Where the threading happens
if __name__ == '__main__':
#Sound().run()#Kivy run method
Thread(target = Sound().run()).start()
time.sleep(10)
Thread(target = Client.father_main).start() #Client is locally imported
PROBLEMS
1.Only the kivy app runs but the father_main function fails to.
2.The only time father_main runs is when I close the kivy application.
3.If i try and remove the 'run()' from Sound(). I get TypeError: 'Sound' object is not callable and father_main immediately runs
4.If i only remove the parenthesis from 'run()' so it turns into 'run'. I get Segmentation fault (core dumped)
kivy does not encourage the use of time.sleep() and i still have no clue of what exactly your program is but here a solution.
create an on_start method (A method that runs when kivy app started) and add start the ipconfig method from there but you're going to start it asynchronously.
from multiprocessing.pool import ThreadPool
class Sound(App):
def on_start(self):
pool = ThreadPool(processes=1)
async_start = pool.apply_async(self.ip_config, ("value for ip_input here"))
# do some other things inside the main thread here
if __name__ == "__main__":
Sound().run()
You need to run the App on the main thread. I would suggest something like:
def start_father_main(dt):
Thread(target = Client.father_main).start() #Client is locally imported
if __name__ == '__main__':
Clock.schedule_once(start_father_main, 10)
Sound().run()
I haven't tested this code, but it should give you the idea.

Flask versus PyQt5 - Flask controls program flow?

Flask appears to prevent PyQt5 UI from updating.
The respective code works properly for either PyQt5 or Flask - but not together. I understand that it may need to do with the way threading is set up.
Any assistance would be greatly appreciated. TIA.
`
import sys
import serial
import threading
from PyQt5.QtWidgets import QWidget, QLabel, QApplication
from flask import Flask, render_template, request, redirect, url_for
app1 = Flask(__name__)
ser = serial.Serial ("/dev/ttyS0", 57600,timeout=3) #Open port with baud rate
count=0
temp = []
class Example(QWidget):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
global count
count = 1
self.setGeometry(300, 300, 250, 150)
self.setWindowTitle('PyQt5 vs Flask')
self.lbl1 = QLabel('Count '+str(count), self)
self.lbl1.move(100, 50)
self.show()
threading.Timer(5,self.refresh).start()
def refresh(self):
global count
count +=1
print("UI ",count)
self.lbl1.setText('Count'+str(count))
threading.Timer(5,self.refresh).start()
def get_uart():
global temp
if ser.inWaiting()>0:
temp =[str(float(x.decode('utf-8'))) for x in ser.read_until().split(b',')]
print(temp)
threading.Timer(1,get_uart).start()
#app1.route("/")
def index():
global temp
templateData = {'temp1' : temp[1] ,'temp2' : temp[2]}
return render_template('index.html',**templateData)
if __name__ == "__main__":
app = QApplication(sys.argv)
pyqt5 = Example()
threading.Timer(1,get_uart).start()
ser.flushInput()
#app1.run(host='0.0.0.0',threaded=True, port=5000) # ,debug=True)
sys.exit(app.exec_())
`
Need to have a UI to control the data analysis to be displayed on Website.
[SOLVED]
All Flask parameters can be defined as:
port = int(os.environ.get('PORT', local_port))
kwargs = {'host': '127.0.0.1', 'port': port , 'threaded' : True, 'use_reloader': False, 'debug':False}
threading.Thread(target=app.run, daemon = True, kwargs=kwargs).start()
and Flask will NOT Block and run with the parameters defined in kwargs.
The better way deal with (possibly waiting) processes, is to use Qt's own threads.
In this example I've created a QObject subclass that does all processing and eventually sends a signal whenever the condition is valid. I can't install flask right now, so I've not tested the whole code, but you'll get the idea.
The trick is to use a "worker" QObject that does the processing. Once the object is created is moved to a new QThread, where it does all its processing without blocking the event loop (thus, the GUI).
You can also create other signals for that object and connect to your slots (which might also be standard python functions/methods outside the QtApplication) which will be called whenever necessary.
class Counter(QtCore.QObject):
changed = QtCore.pyqtSignal(str)
def __init__(self):
super().__init__()
self.count = 0
def run(self):
while True:
self.thread().sleep(1)
if ser.inWaiting() > 0:
self.changed.emit('{}: {}'.format(self.count, [str(float(x.decode('utf-8'))) for x in ser.read_until().split(b',')]))
self.count += 1
class Example(QtWidgets.QWidget):
def __init__(self):
super().__init__()
self.counter = Counter()
self.counterThread = QtCore.QThread()
self.counter.moveToThread(self.counterThread)
self.counterThread.started.connect(self.counter.run)
self.initUI()
def initUI(self):
self.setGeometry(300, 300, 250, 150)
self.setWindowTitle('PyQt5 vs Flask')
self.lbl1 = QtWidgets.QLabel('Count {}'.format(self.counter.count), self)
self.lbl1.move(100, 50)
self.counter.changed.connect(self.lbl1.setText)
self.counterThread.start()
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
pyqt5 = Example()
pyqt5.show()
I think the problem stems from how Flask is activated. If the app.run command is given any parameters (even if in a Thread), then it blocks other commands.
The only way I was able to make Flask and PyQt5 work at the same time, was to activate Flask in a dedicated Thread WITHOUT any parameters - SEE BELOW for the various combinations.
Question: Is this a Flask/Python Bug or Feature or some other explanation related to Development vs Production deployment??
In any case, I would like any help with finding a way to deploy flask in a Port other than 5000 - WITHOUT Flask Blocking other code.
import sys
import serial
import threading
import atexit
from PyQt5.QtWidgets import QWidget, QLabel, QApplication
from flask import Flask, render_template, request, redirect, url_for
ser = serial.Serial ("/dev/ttyS0", 57600,timeout=3) #Open port with baud rate
app = Flask(__name__)
count=0
temp = []
class Example(QWidget):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
global count
count = 1
self.setGeometry(300, 300, 250, 150)
self.setWindowTitle('PyQt5 vs Flask')
self.lbl1 = QLabel("Count "+str(count)+" ", self)
self.lbl1.move(100, 50)
self.show()
threading.Timer(5,self.refresh).start()
def refresh(self):
global count
global str_data
count +=1
self.lbl1.setText("Count "+str(count)+" ")
threading.Timer(0.5,self.refresh).start()
def get_uart():
global temp
if ser.inWaiting()>0:
temp =[str(float(x.decode('utf-8'))) for x in ser.read_until().split(b',')]
print(temp)
threading.Timer(1,get_uart).start()
#app.route("/")
def blank():
global count
data="Count "+str(count)
return data
if __name__ == "__main__":
threading.Timer(5,get_uart).start()
#app.run ## Does not block further execution. Website IS NOT available
#app.run() ## Blocks further execution. Website available at port 5000 without Refresh value
#app.run(port=5123) ## Blocks further execution. Website available at port 5123 without Refresh value
#app.run(threaded=True) ## Blocks further execution. Website available at port 5000 without Refresh value
#threading.Thread(target=app.run()).start() ## Blocks further execution. Website available at port 5000 without Refresh value
#threading.Thread(target=app.run(port=5123)).start() ## Blocks further execution. Website available at port 5123 without Refresh value
#threading.Thread(target=app.run(threaded=True)).start() ## Blocks further execution. Website available at port 5000 without Refresh value
threading.Thread(target=app.run).start() ## Flask DOES NOT block. Website is available at port 5000 with Refresh value
print("Flask does not block")
app1 = QApplication(sys.argv)
pyqt5 = Example()
sys.exit(app1.exec_())

Resources