Element not interactable error in python ,selenium - python-3.x

the code:
import time
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
browser = webdriver.Chrome(r"C:\Users\Desktop\chromedriver.exe")
browser.get('https://www.youtube.com/')
browser.maximize_window()
search = browser.find_element_by_name('search_query')
time.sleep(5)
search.send_keys("shakira waka waka")
search.send_keys(Keys.RETURN)
time.sleep(5)
browser.find_element_by_class_name("style-scope yt-img-shadow").click()
Hello friends, I want the video to play by searching the song names and the singer name generically on the youtube search section and clicking the image that comes after it. But the program gives "element not interactable" error. How can I play the video by clicking the incoming picture?

Try the below code :
driver = webdriver.Chrome()
driver.maximize_window()
wait = WebDriverWait(driver, 10)
driver.get('https://www.youtube.com/')
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "input[id='search']"))).send_keys("shakira waka waka")
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "button[id='search-icon-legacy']"))).click()
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "ytd-video-renderer.style-scope.ytd-item-section-renderer"))).click()

Related

selenium webdriver unable to find element from its xpath even though the xpath is correct

I'm trying to get the attributes of a tag using selenium webdriver and using the xpath as a locator. I gave the xpath to the driver and it returned NoSuchElementException, but when I enter the xpath in the "Inspect element" window, it showed that particular tag, which means the locator does exist. So what's wrong with selenium? Its still the same even if I give the full xpath
from selenium import webdriver
driver = webdriver.Chrome('D:\\chromedriver.exe')
driver.get('https://cq-portal.webomates.com/#/login')
element=driver.find_element_by_xpath("//button[#type='button']")
print(element.get_attribute('class'))
driver.quit()
selenium version = 3.141.0
You need to just give wait to load the page. Your code is perfectly fine. Either give hardcode wait like sleep or presence of element. Both will work.
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium import webdriver
driver = webdriver.Chrome(PATH)
driver.maximize_window()
wait = WebDriverWait(driver, 20)
driver.get('https://cq-portal.webomates.com/#/login')
wait.until(EC.presence_of_element_located((By.XPATH, "//button[#type='button']")))
element = driver.find_element(By.XPATH, "//button[#type='button']")
print(element.get_attribute('class'))
driver.quit()
Output:
btn btn-md btn-primary btn-block
Loops like you are missing a delay.
Please try this:
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium import webdriver
driver = webdriver.Chrome('D:\\chromedriver.exe')
wait = WebDriverWait(driver, 20)
driver.get('https://cq-portal.webomates.com/#/login')
wait.until(EC.visibility_of_element_located((By.XPATH, "//button[#type='button']")))
element=driver.find_element_by_xpath("//button[#type='button']")
print(element.get_attribute('class'))
driver.quit()

Message;no such element(selenium xpath)

I have a problem with selenium find_by_xpath
This is my xpath : //input [#type='text']
And I get this error:
enter image description here
selenium.common.exceptions.NoSuchElementException: Message: no such element: Unable to locate element: {"method":"xpath","selector":"//input [#type='text']"}
from selenium import webdriver
path="C:\Program Files (x86)\chromedriver.exe"
username="123456"
password="123456"
url="https://www.foxesscloud.com/login"
driver=webdriver.Chrome(path)
driver.get(url)
driver.find_element_by_xpath("//input [#type='text']").send_keys("Pies123")
print("done")
print("Log")
It is because of Elements are not rendered properly and you are trying to interact with them. Please use Explicit wait to handle this situation and error.
driver = webdriver.Chrome(driver_path)
driver.maximize_window()
driver.implicitly_wait(30)
wait = WebDriverWait(driver, 30)
driver.get("https://www.foxesscloud.com/login")
wait.until(EC.visibility_of_element_located((By.XPATH, "//input [#type='text']"))).send_keys('Pies123')
wait.until(EC.visibility_of_element_located((By.XPATH, "//input [#type='password']"))).send_keys('Your password here')
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "button.login-click"))).click()
Imports :
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC

Python Selenium Element not found and Not Intractable

I'm trying to scrape from the moneycontrol.com. When I tried to send value in the search box I keep getting the same error in except block as "Element not Found".
I tried using XPath id as well as using the full XPath but in both cases, it doesn't work.
WITHOUT MAXIMIZING THE WINDOW
XPath id - //*[#id="search_str"]
Full XPath - /html/body/div[1]/header/div[1]/div[1]/div/div/div[2]/div/div/form/input[5]
Attaching the full code below:
import time
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
def search_stock():
driver = webdriver.Chrome(
r'./chromedriver')
driver.get('https://www.moneycontrol.com/')
time.sleep(5)
search_icon = driver.find_element_by_xpath(
'//*[#id="fixedheader"]/div[4]/span')
search_icon.click()
time.sleep(2)
try:
search_box = driver.find_element_by_xpath('//*[#id="search_str"]')
print("Element is visible? " + str(search_box.is_displayed()))
time.sleep(10)
if search_box.is_displayed():
search_box.send_keys('Zomato')
search_box.send_keys(Keys.RETURN)
except NoSuchElementException:
print("Element not found")
driver.close()
search_stock()
Sometimes, it started working but most of the time it throwing exceptions and errors. Struggling since 3 days but none of the solutions working.
web scraping like that seems quite inefficient it is prob better to use requests and bs4. However if you want to do it like this you could try using action chains. found here Or you can do driver.get('https://www.moneycontrol.com/india/stockpricequote/consumer-food/zomato/Z') from the start instead of typing it in.
You may wanna try the below code :
def search_stock():
driver = webdriver.Chrome(r'./chromedriver')
driver.maximize_window()
driver.implicitly_wait(30)
driver.get('https://www.moneycontrol.com/')
wait = WebDriverWait(driver, 10)
time.sleep(5)
try:
ActionChains(driver).move_to_element(wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "input[id='search_str']")))).perform()
search_box = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "input[id='search_str']")))
print("Element is visible? ", search_box.is_displayed())
time.sleep(10)
if search_box.is_displayed():
search_box.send_keys('Zomato')
search_box.send_keys(Keys.RETURN)
except NoSuchElementException:
print("Element not found")
Imports :
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
Try clicking on search_box and only after that sending text there.
search_box = driver.find_element_by_xpath('//form[#id="form_topsearch"]//input[#id="search_str"]')
search_box.click()
time.sleep(0.1)
search_box.send_keys('Zomato')
search_box.send_keys(Keys.RETURN)
Additionally I would advise you using explicit waits of expected conditions instead of hardcoded sleeps.
With it your code will be faster and more reliable.
import time
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def search_stock():
driver = webdriver.Chrome(r'./chromedriver')
wait = WebDriverWait(driver, 20)
driver.get('https://www.moneycontrol.com/')
wait.until(EC.element_to_be_clickable((By.XPATH, '//*[#id="fixedheader"]/div[4]/span')).click()
search_box = wait.until(EC.element_to_be_clickable((By.XPATH, '//form[#id="form_topsearch"]//input[#id="search_str"]')))
search_box.send_keys('Zomato')
search_box.send_keys(Keys.RETURN)
#I'm not sure you should close the driver immediately after involving searching....
#driver.close()
search_stock()
UPD
Let's try this
import time
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
def search_stock():
driver = webdriver.Chrome(r'./chromedriver')
wait = WebDriverWait(driver, 20)
actions = ActionChains(driver)
driver.get('https://www.moneycontrol.com/')
search_icon = wait.until(EC.presence_of_element_located((By.XPATH, '//*[#id="fixedheader"]/div[4]/span')).click()
time.sleep(0.5)
driver.execute_script("arguments[0].scrollIntoView();", search_icon)
driver.execute_script("arguments[0].click();", search_icon)
search_box = wait.until(EC.presence_of_element_located((By.XPATH, '//form[#id="form_topsearch"]//input[#id="search_str"]')))
driver.execute_script("arguments[0].scrollIntoView();", search_icon)
driver.execute_script("arguments[0].click();", search_icon)
time.sleep(0.5)
search_box.send_keys('Zomato')
search_box.send_keys(Keys.RETURN)
#I'm not sure you should close the driver immediately after involving searching....
#driver.close()
search_stock()
If the above solution is still not working instead of
actions.move_to_element(search_box).click().perform()
try
driver.execute_script("arguments[0].click();", search_box)

Click on show more button; selenium scrape with python

I'm trying to scrape a website with show more button; and I'm not able to click on it.
The website is: https://www.wtatennis.com/rankings/singles
And my code is:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver import ActionChains
from tqdm import tqdm
import time
options = Options()
options.add_argument("--headless")
browser = webdriver.Chrome(ChromeDriverManager().install(),options=options)
browser.get('https://www.wtatennis.com/rankings/singles')
action = ActionChains(browser)
showmore = browser.find_elements_by_xpath(".//button[contains(#class, 'btn widget-footer__more-button rankings__show-more js-show-more-button')]")
action.move_to_element(showmore).perform()
showmore.click()
time.sleep(5)
Has anyone any idea? Thanks!
Don't use './/' in your locator when you are starting the search from root, as there is no current element your locator won't find any element. Also you can use any attribute to find elements uniquely. see below code:
browser = webdriver.Chrome(options=options)
browser.get('https://www.wtatennis.com/rankings/singles')
WebDriverWait(browser,10).until(EC.element_to_be_clickable((By.XPATH,
'//*[#data-text="Accept Cookies"]'))).click()
WebDriverWait(browser, 10).until(EC.element_to_be_clickable((By.XPATH,
'//*[#data-text = "Show More"]'))).click()
use webdriver wait and data attributes
tu use wait import:
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
To wait till all elements are loaded you have to make sure last element is not changing , if its changing keep scrolling .
browser.get('https://www.wtatennis.com/rankings/singles')
WebDriverWait(browser, 10).until(EC.element_to_be_clickable((By.XPATH,
'//*[#data-text="Accept Cookies"]'))).click()
value = "start"
WebDriverWait(browser, 10).until(EC.element_to_be_clickable((By.XPATH,
'//*[#data-text = "Show More"]'))).click()
while(browser.find_element_by_xpath("(//tr[#class='rankings__row'])[last()]").text != value):
elem = browser.find_element_by_xpath(
'(//*[contains(text(),"Loading")])[2]')
value = browser.find_element_by_xpath(
"(//tr[#class='rankings__row'])[last()]").text
browser.execute_script("arguments[0].scrollIntoView()", elem)
WebDriverWait(browser, 10).until(EC.presence_of_all_elements_located((By.XPATH,
"//tr[#class='rankings__row']")))
try:
WebDriverWait(browser, 10).until_not(EC.text_to_be_present_in_element((By.XPATH,
"(//tr[#class='rankings__row'])[last()]"), value))
except:
None

Python Selenium Vanguard: NoSuchElementException: no such element: Unable to locate element with Selenium and Python

I am trying to download history data and click on link to historical data. However even though the Xcode is correct I get this error:
NoSuchElementException: no such element: Unable to locate element.
Code trials:
from selenium.webdriver.support.ui import WebDriverWait
from selenium import webdriver
driver = webdriver.Chrome(executable_path='/Users/Documents/Coding/chromedriver')
url = "https://www.vanguardinvestor.co.uk/investments/vanguard-lifestrategy-100-equity-fund-accumulation-shares/price-performance?intcmpgn=blendedlifestrategy_lifestrategy100equityfund_fund_link"
driver.get(url)
wait = WebDriverWait(driver, 10)
elem = driver.find_element_by_xpath("//*[#id='prices-and-performance-tab']/div/div[4]/div[3]/div[1]/div[1]/div[3]/div/div/div[2]/div/table/tfoot/tr/td/a")
webdriver.ActionChains(driver).move_to_element(elem).click(elem).perform()
To click on the element wait for the page to load and element to be clickable and then click.Try entire snippet.
from selenium.webdriver.support.ui import WebDriverWait
from selenium import webdriver
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
driver = webdriver.Chrome(executable_path='/Users/Documents/Coding/chromedriver')
url = "https://www.vanguardinvestor.co.uk/investments/vanguard-lifestrategy-100-equity-fund-accumulation-shares/price-performance?intcmpgn=blendedlifestrategy_lifestrategy100equityfund_fund_link"
driver.get(url)
element = WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.XPATH, "//span[text()[contains(.,'Price & Performance')]]")))
element.click
To click on the element with text as Search for more historical prices you need to induce WebDriverWait for the element to be clickable and you can use the following solution:
Code Block:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
options = webdriver.ChromeOptions()
options.add_argument("--start-maximized")
options.add_argument("--disable-extensions")
driver = webdriver.Chrome(chrome_options=options, executable_path = r'C:\Utility\BrowserDrivers\chromedriver.exe' )
driver.get("https://www.vanguardinvestor.co.uk/investments/vanguard-lifestrategy-100-equity-fund-accumulation-shares/price-performance?intcmpgn=blendedlifestrategy_lifestrategy100equityfund_fund_link")
WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.XPATH, "//button[#id='bannerButton']"))).click()
more_historical_prices = WebDriverWait(driver, 20).until(EC.visibility_of_element_located((By.LINK_TEXT, "Search for more historical prices")))
driver.execute_script("arguments[0].scrollIntoView(true);", more_historical_prices)
WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.LINK_TEXT, "Search for more historical prices")))
Browser Snapshot:

Resources