PhantomJS python issue - python-3.x

My python selenium tests are working on firefox dirver (GUI) without any issue. But i wanted to run my tests in headless mode. When i try to run the same script with headless mode (with few modifications). it gives wierd errors.
Ex:
selenium.common.exceptions.NoSuchElementException: Message{"errorMessage":"Unable to find element with id 'ext-gen1499
python script :
import os
import time
from selenium.webdriver.common.proxy import *
from selenium.webdriver.common.by import By
phantomjs_path=r"/home/xxxx/nodejs-installs/phantomjs-2.1.1-linux-x86_64/bin/phantomjs"
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
service_args = ['--proxy=x.x.x.x:80','--proxy-type=https']
driver = webdriver.PhantomJS(executable_path=r'/home/xxxx/nodejs-installs/phantomjs-2.1.1-linux-x86_64/bin/phantomjs',service_args=service_args)
os.environ['MOZ_HEADLESS'] = '1'
driver.get("https://aaaaa.com")
def Login():
try:
driver.find_element_by_id("username").send_keys("test#aaaa.com")
driver.find_element_by_id("password").send_keys("xxxxxxx")
driver.find_element_by_id("Submit").click()
login_flag=1
except:
print("Error Loading Login page")
login_flag=0
finally:
return login_flag
def CreateMail():
try:
element = WebDriverWait(driver, 3).until(EC.presence_of_element_located((By.ID, "button-1143-btnInnerEl")))
driver.find_element_by_id("button-1143-btnInnerEl").click()
except TimeoutException:
print ("Loading took too much time - Create New Mail")
driver.find_element_by_id("ext-gen1499").send_keys("test#test.com")
driver.find_element_by_id("textfield-1265-inputEl").send_keys("Automated Test Mail from Selenium")
driver.find_element_by_id("button-1252-btnIconEl").click()
Am i missing anything ?

It is a good practice to add an implicit wait of at-least 10 seconds , for allowing the target page element/s to load completely.
driver.implicitly_wait(10)

Related

Login into a Website with data from csv file with python selenium

I am trying to create a script that automatically logs in different accounts to a website, the login data should be taken from a CSV file. Unfortunately I get no result, maybe someone has a solution for me?
import pandas as pd
import os
from selenium import webdriver
from twocaptcha import TwoCaptcha
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
loginlist = pd.read_csv('/Volumes/GoogleDrive/Meine Ablage/Privat/py_scripts/login.csv', header=None, skiprows=\[0\], sep =',')
print(loginlist)
driver = webdriver.Chrome()
driver.get("https://freebitco.in/signup/?op=s")
time.sleep(6)
webdriver.ActionChains(driver).send_keys(Keys.ESCAPE).perform()
time.sleep(1)
login = driver.find_element(By.CLASS_NAME, "login_menu_button")
login.click()
time.sleep(3)
def fulfill_form(email, password):
input_email = driver.find_element(By.ID, 'login_form_btc_address')
input_password = driver.find_element(By.ID, 'login_form_password')
input_email.send_keys(email)
time.sleep(1)
input_password.send_keys(password)
time.sleep(5)
failed_attempts = \[\]
for customer in loginlist:
try:
fulfill_form(str(loginlist\[0\]), str(loginlist\[1\]))
except:
failed_attempts.append(loginlist\[0\])
pass
if len(failed_attempts) \> 0:
print("{} cases have failed".format(len(failed_attempts)))
print("Procedure concluded")
I tried several older solutions from other posts, unfortunately they led nowhere.

Python Selenium Element not found and Not Intractable

I'm trying to scrape from the moneycontrol.com. When I tried to send value in the search box I keep getting the same error in except block as "Element not Found".
I tried using XPath id as well as using the full XPath but in both cases, it doesn't work.
WITHOUT MAXIMIZING THE WINDOW
XPath id - //*[#id="search_str"]
Full XPath - /html/body/div[1]/header/div[1]/div[1]/div/div/div[2]/div/div/form/input[5]
Attaching the full code below:
import time
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
def search_stock():
driver = webdriver.Chrome(
r'./chromedriver')
driver.get('https://www.moneycontrol.com/')
time.sleep(5)
search_icon = driver.find_element_by_xpath(
'//*[#id="fixedheader"]/div[4]/span')
search_icon.click()
time.sleep(2)
try:
search_box = driver.find_element_by_xpath('//*[#id="search_str"]')
print("Element is visible? " + str(search_box.is_displayed()))
time.sleep(10)
if search_box.is_displayed():
search_box.send_keys('Zomato')
search_box.send_keys(Keys.RETURN)
except NoSuchElementException:
print("Element not found")
driver.close()
search_stock()
Sometimes, it started working but most of the time it throwing exceptions and errors. Struggling since 3 days but none of the solutions working.
web scraping like that seems quite inefficient it is prob better to use requests and bs4. However if you want to do it like this you could try using action chains. found here Or you can do driver.get('https://www.moneycontrol.com/india/stockpricequote/consumer-food/zomato/Z') from the start instead of typing it in.
You may wanna try the below code :
def search_stock():
driver = webdriver.Chrome(r'./chromedriver')
driver.maximize_window()
driver.implicitly_wait(30)
driver.get('https://www.moneycontrol.com/')
wait = WebDriverWait(driver, 10)
time.sleep(5)
try:
ActionChains(driver).move_to_element(wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "input[id='search_str']")))).perform()
search_box = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "input[id='search_str']")))
print("Element is visible? ", search_box.is_displayed())
time.sleep(10)
if search_box.is_displayed():
search_box.send_keys('Zomato')
search_box.send_keys(Keys.RETURN)
except NoSuchElementException:
print("Element not found")
Imports :
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
Try clicking on search_box and only after that sending text there.
search_box = driver.find_element_by_xpath('//form[#id="form_topsearch"]//input[#id="search_str"]')
search_box.click()
time.sleep(0.1)
search_box.send_keys('Zomato')
search_box.send_keys(Keys.RETURN)
Additionally I would advise you using explicit waits of expected conditions instead of hardcoded sleeps.
With it your code will be faster and more reliable.
import time
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def search_stock():
driver = webdriver.Chrome(r'./chromedriver')
wait = WebDriverWait(driver, 20)
driver.get('https://www.moneycontrol.com/')
wait.until(EC.element_to_be_clickable((By.XPATH, '//*[#id="fixedheader"]/div[4]/span')).click()
search_box = wait.until(EC.element_to_be_clickable((By.XPATH, '//form[#id="form_topsearch"]//input[#id="search_str"]')))
search_box.send_keys('Zomato')
search_box.send_keys(Keys.RETURN)
#I'm not sure you should close the driver immediately after involving searching....
#driver.close()
search_stock()
UPD
Let's try this
import time
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
def search_stock():
driver = webdriver.Chrome(r'./chromedriver')
wait = WebDriverWait(driver, 20)
actions = ActionChains(driver)
driver.get('https://www.moneycontrol.com/')
search_icon = wait.until(EC.presence_of_element_located((By.XPATH, '//*[#id="fixedheader"]/div[4]/span')).click()
time.sleep(0.5)
driver.execute_script("arguments[0].scrollIntoView();", search_icon)
driver.execute_script("arguments[0].click();", search_icon)
search_box = wait.until(EC.presence_of_element_located((By.XPATH, '//form[#id="form_topsearch"]//input[#id="search_str"]')))
driver.execute_script("arguments[0].scrollIntoView();", search_icon)
driver.execute_script("arguments[0].click();", search_icon)
time.sleep(0.5)
search_box.send_keys('Zomato')
search_box.send_keys(Keys.RETURN)
#I'm not sure you should close the driver immediately after involving searching....
#driver.close()
search_stock()
If the above solution is still not working instead of
actions.move_to_element(search_box).click().perform()
try
driver.execute_script("arguments[0].click();", search_box)

python: Selenium saves empty file

I am trying to download a file from the following link by clicking the download button: https://www.investing.com/equities/oil---gas-dev-historical-data
Here is my code:
from datetime import date
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import ElementClickInterceptedException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
profile = webdriver.FirefoxProfile()
profile.set_preference("browser.preferences.instantApply",True)
profile.set_preference("browser.helperApps.neverAsk.saveToDisk", "text/csv")
profile.set_preference("browser.helperApps.alwaysAsk.force",False)
profile.set_preference("browser.download.manager.showWhenStarting",False)
profile.set_preference("browser.download.folderList",0)
driver = webdriver.Firefox(firefox_profile=profile)
driver.get('https://www.investing.com/equities/oil---gas-dev-historical-data')
try:
popup = WebDriverWait(driver, 60).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "i[class*='largeBannerCloser']")))
popup.click()
except TimeoutException as to:
print(to)
#driver.find_element_by_css_selector("i.popupCloseIcon").click()
driver.find_element_by_css_selector("a[class*='login']").click()
driver.find_element_by_id('loginFormUser_email').send_keys('myemail')
driver.find_element_by_id('loginForm_password').send_keys('pass')
driver.find_element_by_xpath("//div[#id='loginEmailSigning']//following-sibling::a[#class='newButton orange']").click()
driver.find_element_by_id('flatDatePickerCanvasHol').click()
start_date = driver.find_element_by_id('startDate')
start_date.send_keys(Keys.BACKSPACE*10)
start_date.send_keys(date(2014,1,1).strftime("%d/%m/%Y"))
driver.find_element_by_id('applyBtn').click()
#driver.implicitly_wait(30)
driver.find_element_by_css_selector('a.newBtn.LightGray.downloadBlueIcon.js-download-data').click()
But it always saves empty file? How can I avoid this behavior?
The page seems to export data displayed on the page only. adding a check to just wait for the table to load.
After changing the date range helped in downloading the data successfully.
try:
popup = WebDriverWait(driver, 60).until(EC.presence_of_element_located((By.XPATH, "//div[#id='results_box']//tbody//tr")))
except TimeoutException as to:
print(to)
driver.find_element_by_css_selector('a.newBtn.LightGray.downloadBlueIcon.js-download-data').click()

Selenium unable to find element of icon

My code so far, result returned to be not be found.
import time
from selenium import webdriver
driver = webdriver.Firefox(executable_path="C:/geckodriver")
dominos_pg = "https://www.dominos.ca/pages/order/#!/locations/search/"
driver.get(dominos_pg)
time.sleep(5)
elem_class = driver.find_element_by_class_name("Carryout c-carryout circ-icons__icon circ-icons__icon--carryout")
Any advice/suggestions appreciated.
Try This:
import time
from selenium import webdriver
driver = webdriver.Firefox(executable_path="C:/geckodriver")
dominos_pg = "https://www.dominos.ca/pages/order/#!/locations/search/"
driver.get(dominos_pg)
time.sleep(5)
elem_class = driver.find_element_by_class_name("Carryout c-carryout circ-icons__icon circ-icons__icon--carryout")
print(elem_class)
If That Don't Work It's Mean Your Url Is Not Correct Because I See There is no element Carryout
Your url is incorrect . Please find below working solution:
Solution 1:
from selenium.webdriver.common.by import By
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait as Wait
driver = webdriver.Chrome(executable_path=r"C:\New folder\chromedriver.exe")
driver.maximize_window()
driver.get("https://www.dominosaruba.com/en/pages/order/#!/locations/search/?type=Carryout")
driver.switch_to.frame(2);
CarryoutElement=WebDriverWait(driver, 20).until(
EC.presence_of_element_located((By.XPATH, "//span[contains(text(),'Carryout')]")))
CarryoutElement.click()
Solution 2:
CarryoutElement=WebDriverWait(driver, 20).until(
EC.presence_of_element_located((By.XPATH, "//span[#class='Carryout c-carryout circ-icons__icon circ-icons__icon--carryout']")))
CarryoutElement.click()
solution 3:
CarryoutElement=WebDriverWait(driver, 20).until(
EC.presence_of_element_located((By.XPATH, "//span[#data-quid='easy-order-locator-carryout']")))
CarryoutElement.click()
After much browsing, the I used xpath through the label

Issue with login using selenium and python

I am getting following error
no such element: Unable to locate element: {"method":"xpath","selector":"//input[#placeholder='User ID']"}
(Session info: chrome=78.0.3904.70). Let me know how can i pass user id here
Without additional context, I can only recommend that you wait on the element before sending keys to it:
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
input = WebDriverWait(driver, 30).until(
EC.visibility_of_element_located((By.XPATH, "//input[#placeholder='User ID']")))
input.send_keys("userId")
Full working sample as requested by asker:
from selenium import webdriver
from time import sleep
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
driver = webdriver.Chrome()
driver.get("https://kite.zerodha.com/connect/login?api_key=b8w63qg9m4c3zubd&sess_id=bW3U1OwidO97o11scfeTbyfX4j5tViNp")
input = WebDriverWait(driver, 30).until(
EC.visibility_of_element_located((By.XPATH, "//input[#placeholder='User ID']")))
input.send_keys("userId")
sleep(10) # this sleep is here so you can visually verify the text was sent.
driver.close()
driver.quit()
The above code has succeeded every time I have run it.

Resources