V.Anh V.Anh - 3 years ago 224
Python Question

Selenium: Element not clickable at point (x, y)

I have this site https://www.inc.com/inc5000/list/2017 where I want my script to insert a number in PAGE field and click GO, but I keep getting error:

File "/Users/anhvangiang/Desktop/PY/inc.py", line 34, in scrape
driver.find_element_by_xpath('//*[@id="page-input-button"]').click()
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/selenium/webdriver/remote/webelement.py", line 77, in click
self._execute(Command.CLICK_ELEMENT)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/selenium/webdriver/remote/webelement.py", line 493, in _execute
return self._parent.execute(command, params)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/selenium/webdriver/remote/webdriver.py", line 256, in execute
self.error_handler.check_response(response)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/selenium/webdriver/remote/errorhandler.py", line 194, in check_response
raise exception_class(message, screen, stacktrace)
selenium.common.exceptions.WebDriverException: Message: unknown error:
Element is not clickable at point (741, 697)
(Session info: chrome=61.0.3163.100)
(Driver info: chromedriver=2.30.477690
(c53f4ad87510ee97b5c3425a14c0e79780cdf262),platform=Mac OS X 10.12.6 x86_64)


This is my code:

def scrape(num):
driver = webdriver.Chrome('/Users/anhvangiang/Desktop/PY/chromedriver')
driver.get(main_site)
driver.find_element_by_id('page-input-field').send_keys(str(num))
driver.find_element_by_xpath('//*[@id="Welcome-59"]/div[2]/div[1]/span[2]').click()
time.sleep(5)
driver.find_element_by_xpath('//*[@id="page-input-button"]').click()
soup = BS(driver.page_source, 'lxml')
container = soup.find('section', {'id': 'data-container'})
return [source + tag.find('div', {'class': 'col i5 company'}).find('a')['href'] for tag in container.findAll('div', {'class': 'row'})]


If I put the function scrape inside a loop:

for i in range(1, 100):
print scrape(i)


For a few first i, it will go smoothly, but then it will throw the error like above.

Any suggestion how I can fix it?

Answer Source

This is because the button is not visible at that time, so the selenium WebDriver cannot access that. As I run your code on my local machine , I found that the website shows a popup ad for 15-20 sec (See attached image : Popup_Ad), which is the actual cause of this error. For resolving this error you have to handle the popup ad, you can do this as.

check for the SKIP button, if button exist then first skip the add by Clicking the skip button , then follow the normal flow of code.

Other Suggestions: You should use WebDriverWait to avoid the Element not found and element is not clickable like issues. For Example, you can write above code as

from selenium import webdriver
from bs4 import BeautifulSoup as BS
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support import expected_conditions as EC    
import time

def scrape(num,wdriver):
    # define webdriver 
    driver = wdriver

    # naviagte to url
    driver.get("https://www.inc.com/inc5000/list/2017")

    # define default wait time
    wait = WebDriverWait(driver, 10)

    while True:
        if EC.presence_of_all_elements_located:
            break
        else:
            continue

    # handle Ad Popup
    try:
        skip_button = wait.until(EC.element_to_be_clickable((By.XPATH,"//*[@id='Welcome-59']/div[2]/div[1]/span[2]")))
        skip_button.click()
        print("\nSkip Button Clicked")
    except TimeoutException:
        pass

    time.sleep(5)
    # find the page number input field

    page_num_elem = wait.until(EC.visibility_of_element_located((By.ID,"page-input-field")))
    page_num_elem.clear()
    page_num_elem.send_keys(num)
    time.sleep(2)

    while True:
        try:
            # find go button
            go_button = wait.until(EC.element_to_be_clickable((By.ID, "page-input-button")))
            go_button.click()
            break
        except TimeoutException :
            print("Retrying...")
            continue

    # scrape data
    soup = BS(driver.page_source, 'lxml')
    container = soup.find('section', {'id': 'data-container'})
    return [tag.find('div', {'class': 'col i5 company'}).find('a')['href'] for tag in container.findAll('div', {'class': 'row'})]


if __name__ == "__main__":

    # create webdriver instance
    driver = webdriver.Chrome()
    for num in range(5):
        print("\nPage Number : %s" % (num+1))
        print(scrape(num,driver))
        print(90*"-")
    driver.quit()
Recommended from our users: Dynamic Network Monitoring from WhatsUp Gold from IPSwitch. Free Download