I currently working on Web Scraping with Selenium Python (3.12) and I got this anomaly where Selenium couldn't load the Pagination.
I have attached my screenshot for this anomaly:
Here is what it was like when I operate web browser by myself. There should be around 10 pagination
And here is my code
# funct setup driverdef setup_driver(self): # setup chrome options chrome_options = Options() # set user-agent in the chrome with headless or Incognito mode # chrome_options.add_argument("--headless") chrome_options.add_argument("--incognito") chrome_options.add_argument('start-maximized') # configure and setup the webdriver service = Service(ChromeDriverManager().install()) self.driver = webdriver.Chrome(service=service, options=chrome_options) if self.driver is None: print("Driver doesn't sucessfully initiate") else: print("Driver sucessfully initiate") # verify the user-agent is set correctly current_user_agent = self.driver.execute_script("return navigator.userAgent;") print(f"User Agent: {current_user_agent}")# funct to select dropdown menu for load page def select_dropdown_option(self, dropdown_xpath, option_xpath): try: # for dropdown button dropdown = self.driver.find_element(By.XPATH, dropdown_xpath) dropdown.click() time.sleep(5) # for option button option = self.driver.find_element(By.XPATH, option_xpath) option.click() self.driver.execute_script("arguments[0].click();", option) time.sleep(5) except Exception as e: print(f"Error Selecting Dropdown optiob: {e}") # funct to to select vehicle monitoring menu def select_vehicle_monitoring(self): try: # select vehcile monitoring menu self.driver.find_element(By.XPATH, '//*[@id="app"]/section/section/aside/div/div/div[1]/div/ul/div[2]').click() time.sleep(5) # pop up all tabular data self.driver.find_element(By.XPATH, '//*[@id="app"]/section/section/main/div[2]/div[1]').click() time.sleep(5) # get the 50 rows each pages self.select_dropdown_option('//*[@id="app"]/section/section/main/div[2]/div[2]/div/div[3]/div/div[2]/span[2]/div/div/span/span/i','/html/body/div[5]/div[1]/div[1]/ul/li[5]') time.sleep(1000) except Exception as e: print(f"Error in Selecting Vehicle Monitoring: {e}")
I wish all the pagination could be load just like I was operated by myself, wishing anyone can help me to fix this issue.
Ps: I had tried on different web browser such as Firefox, but didn't work as I wanted.