I am trying to run my selenium test, but I get an error.
First, I am creating booking.py file, which contains Booking class:
from asyncio import selector_events
from lib2to3.pgen2 import driver
import booking.constants as const
import os
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.by import By
class Booking:
def __init__(self, teardown = False):
s = Service(ChromeDriverManager().install())
self.driver = webdriver.Chrome(service=s)
self.driver.get(const.BASE_URL)
self.driver.teardown = teardown
self.driver.implicitly_wait(15)
def __exit__(self, exc_type, exc_val, exc_tb):
if self.driver.teardown:
self.driver.quit()
def cookies(self):
self.driver.find_element(By.ID, 'onetrust-accept-btn-handler').click()
def select_place_to_go(self):
self.driver.find_element(By.ID, "ss").click()
Then, I have run.py file:
from booking.booking import Booking
with Booking() as bot:
bot.cookies()
bot.select_place_to_go()
After running run.py file, I get an error:
AttributeError: __enter__
However, it works completely fine using this code:
bot = Booking()
bot.cookies()
bot.select_place_to_go()
Where is the problem?
f you have any ideas about code improvements, please, let me know. Any help is appreciated, thank you!
I'm guessing you're missing the __enter__ function on that class. When you use a with block in Python, the __enter__ method of the object in the with statement is called, the block inside the with runs, and the __exit__ method is invoked .
You'll get this error if your class doesn't have a __enter__ defined. so you have to define __enter__ method in your Booking class and return self in it
def select_place_to_go(self, place_to_go):
search_field = self.find_element(By.ID, 'ss').click()
search_field.clear()
search_field.send_keys(place_to_go)
This is the code I used and I run it without any issue
Related
I have a PyQt application that uses argparse to pass some argument.
I managed to write a simple test to see if the app starts
but I cannot set/mock the argparse arguments
I know it because inside the code I have some try/except like this
try:
if args.erase_data:
pass
except NameError:
logger.error("Error in parsing erase_data input argument \n")
that during the tests fail, while they do not fail if I run the app.
I tried this to mock args
import os
import pathlib
# import pdb
import sys
from unittest import mock
import pytest
from PyQt5 import QtTest
from PyQt5.QtWidgets import *
from pytestqt.plugin import QtBot
sys.path.append(os.getcwd())
src_dir = pathlib.Path(__file__).parents[1].absolute()
print(src_dir)
sys.path.append(src_dir.as_posix())
GUI = __import__("GUI")
#pytest.fixture(scope="module")
def qtbot_session(qapp, request):
result = QtBot(qapp)
with capture_exceptions() as e:
print(getattr(e, "message", repr(e)))
yield result
print(" TEARDOWN qtbot")
#pytest.fixture(scope="module")
def Viewer(request):
with mock.patch.object(sys, "argv", ["",'-d','2']):
print("mocking sys argv")
print(sys.argv)
# pdb.set_trace()
app, app_window = GUI.main()
qtbotbis = QtBot(app)
QtTest.QTest.qWait(0.5 * 1000)
assert app_window.isVisible()
return app, app_window, qtbotbis
but args is still not set.
any idea how to solve it?
I want use a parallel downloading videos from youtube, but my code ending with exception "PicklingError". Can you help guys with code, how it should be, please.
Another fixed variant:
import sys
#from pathos.multiprocessing import ProcessingPool as Pool
from multiprocessing import Pool
from pytube import YouTube
from youtubeMultiDownloader import UiMainWindow
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QFileDialog
class YouTubeInstance:
def __init__(self, path):
self.youtube = YouTube
self.path = path
#self.ui_obj = ui_obj
def download_file(self, url):
self.youtube(url).streams.get_highest_resolution().download(self.path)
#self.ui.ui.youtube_outputs.setText(f'Video \'{self.youtube.title}\' has been downloaded successfully!')
class YouTubeMultiDownloader(QtWidgets.QMainWindow):
def __init__(self):
super().__init__()
self.pool = Pool
self.ui = UiMainWindow()
self.ui.setup_ui(self)
self.path_to_dir = None
self.urls = None
def _get_urls_from_form(self):
self.urls = self.ui.youtube_urls.toPlainText().split('\n')
return len(self.urls)
def choose_directory(self):
self.path_to_dir = str(QFileDialog.getExistingDirectory(self, "Select Directory"))
def run_multi_downloads(self):
youtube = YouTubeInstance(self.path_to_dir)
self.pool(self._get_urls_from_form()).map(youtube.download_file, self.urls)
if __name__ == "__main__":
app = QtWidgets.QApplication([])
application = YouTubeMultiDownloader()
application.show()
sys.exit(app.exec_())
Updated:
My ui :)
Error 1 fixed:
Error 2 fixed:
Error 3 actual:
You've got the wrong side of the stick. Take a look at multiprocessing module documents. As it says, calling Pool method is for running multiple instance of same function simultaneously (in parallel). So call Pool method as many numbers you want, meanwhile your method does not any parameters, call it without any arguments:
with Pool(5) as p:
print(p.map(YouTubeMultiDownloader))
It create 5 parallel instance. You can change the code an refine your errors.
I am using this body(desired_caps are set properly in config file)
Whatever I do I receive 'AttributeError: 'ClassName' object has no attribute 'driver'' or similar errors - no find_element_by_xpath attribute or whatever.
Do you have any suggestions? I am doing in the same way as in lectures, maybe anything related to appium + python setups?
import unittest
from appium import webdriver
import time
import tracemalloc
tracemalloc.start()
from config import desired_caps
# self = webdriver
# self.driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', desired_caps)
class BaseTest(unittest.TestCase):
def test_testcase1(self):
self.driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', desired_caps)
def test_credentials(self):
email = self.driver.find_element_by_xpath("proper Xpath")
email.send_keys("Test")
save = self.driver.find_element_by_link_text("Log In")
save.click()
def tearDown(self):
self.driver.quit()
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(BaseTest)
unittest.TextTestRunner(verbosity=3).run(suite)
you need to make your driver in a function named setUp(). The unit test suite executes kinda like this.
setUp()
run test_testcase1()
tearDown()
setUp()
run test_credentials()
teardown()
...etc...
if driver driver is not made in setup() the other tests will not know about it. Unless you make driver in every single test. Same goes for any other test variables you'd need.
This way each test is independent of each other, and each test gets a fresh start.
This question already has answers here:
Running an async background task in Tornado
(5 answers)
Closed 3 years ago.
I'm writing a service which consume a disk space and have to clean it from time to time. Before this moment cleaning was performed in request handling. Unfortunately, in case when there is a big disk space have to be cleaned, it consume a lot of time and service hangs. I tried to rewrite cleaning procedure as tornado's future but confused in way of making a future(or something like this) from generator. There is a simplification of my code below:
import tornado
import tornado.ioloop
import tornado.web
from tornado.process import Subprocess
#from tornado.stack_context import run_in_stack_context, NullContext
from time import time
import random
import json
import asyncio
class meta_doc(type):
def __init__(cls, name, bases, methods):
super().__init__(name, bases, methods)
cls.storage_size=0
class Documentation_parsing(tornado.web.RequestHandler, metaclass=meta_doc):
max_storage_size=200
optimal_storage_size=100
cleaning=False
#classmethod
#tornado.gen.coroutine
def _collect_old_folders(cls):
print('start cleaning')
for d in subfolders:
if cls.storage_size<cls.optimal_storage_size:
break
delta=random.randint(5, 15)
time.sleep(random.uniform(0.5, 3))
cls.storage_size-=delta
print('Folder have been deleted. Folder size:', cls.storage_size)
yield None
cls.cleaning=False
print('finish cleaning')
#classmethod
def collect_old_folders(cls):
if not cls.cleaning:
cls.cleaning=True
tornado.ioloop.IOLoop.current().add_future(cls._collect_old_folders, lambda f: f.result())
#tornado.gen.coroutine
def post(self):
request_id=self.get_body_argument("request_id", default='')
self.__class__.storage_size+=random.randint(5, 15)
if self.storage_size>self.max_storage_size:
self.collect_old_folders()
self.write(json.dumps({'request_id': request_id, 'storage_size': self.storage_size}))
print('process request: request_id {0}, storage size {1}'.format(request_id, self.storage_size))
ApplicationSuffixes=[(r'/main_parsing.*', Documentation_parsing)]
if __name__=='__main__':
app = tornado.web.Application(ApplicationSuffixes)
app.listen(9999)
tornado.ioloop.IOLoop.current().start()
This code return future assertation error. I looked for another decorator which make a future from generator or corutine, but didn't find. Please help me with this problem.
UPD. I have already tried solution from mentioned question but receive "Cannot import run_in_stack_context". That's why such import is commented in my code
Solve it:
import tornado
import tornado.ioloop
import tornado.web
from tornado.process import Subprocess
#from tornado.stack_context import run_in_stack_context, NullContext
import time
import random
import json
import asyncio
class meta_doc(type):
def __init__(cls, name, bases, methods):
super().__init__(name, bases, methods)
cls.storage_size=0
class Documentation_parsing(tornado.web.RequestHandler, metaclass=meta_doc):
max_storage_size=200
optimal_storage_size=100
cleaning=False
#classmethod
#tornado.gen.coroutine
def _collect_old_folders(cls):
print('start cleaning')
while True:
if cls.storage_size<cls.optimal_storage_size:
break
delta=random.randint(5, 15)
time.sleep(random.uniform(0.5, 3))
cls.storage_size-=delta
print('Folder have been deleted. Folder size:', cls.storage_size)
yield None
cls.cleaning=False
print('finish cleaning')
#classmethod
def collect_old_folders(cls):
def func(inp):
print('called', inp)
if not cls.cleaning:
cls.cleaning=True
future=cls._collect_old_folders()
print(type(future))
tornado.ioloop.IOLoop.current().add_future(future, func)
#tornado.gen.coroutine
def post(self):
request_id=self.get_body_argument("request_id", default='')
self.__class__.storage_size+=random.randint(5, 15)
if self.storage_size>self.max_storage_size:
self.collect_old_folders()
self.write(json.dumps({'request_id': request_id, 'storage_size': self.storage_size}))
print('process request: request_id {0}, storage size {1}'.format(request_id, self.storage_size))
ApplicationSuffixes=[(r'/main_parsing.*', Documentation_parsing)]
if __name__=='__main__':
app = tornado.web.Application(ApplicationSuffixes)
app.listen(8999)
tornado.ioloop.IOLoop.current().start()
Comments: decorator tornado.gen.coroutine return a function which return a future. So I just need to call method to get future. In 5.0.2 version of Tornado I can add a future directly to IOLoop. The only think is that apart of future I have to pass a function as a second parameter. The function receive Exceptions or result of performance of future.
Hello i'm trying to patch my python code with http://bobrochel.blogspot.com/2010/11/bad-servers-chunked-encoding-and.html but when adding this snippet anywhere in the code I always get invalid syntax. What am I doing wrong?
The start of my code looks like this:
import logging
import argparse
import sys
from arbitrer import Arbitrer
def patch_http_response_read(func):
def inner(*args):
try:
return func(*args)
except httplib.IncompleteRead, e:
return e.partial
return inner
httplib.HTTPResponse.read = patch_http_response_read(httplib.HTTPResponse.read)
class ArbitrerCLI:
def __init__(self):
except doesn't work that way anymore.
except httplib.IncompleteRead as e:
Indent correctly.
The try statement changed in Python 3.x.
import httplib
import logging
import argparse
import sys
from arbitrer import Arbitrer
def patch_http_response_read(func):
def inner(*args):
try:
return func(*args)
except httplib.IncompleteRead as e:
return e.partial
return inner
httplib.HTTPResponse.read = patch_http_response_read(httplib.HTTPResponse.read)
class ArbitrerCLI:
def __init__(self):
...