when using code below
the code wont scrape more than 15 href urls, even when there are over 100 plus in the href
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
username_xpath = '/html/body/div/div/div/div[1]/div/div/div/div/div/div/div[2]/div[2]/div/div/div[2]/div[2]/div/div/div/div[4]/label/div/div[2]/div/input'
button_xpath = '/html/body/div/div/div/div[1]/div/div/div/div/div/div/div[2]/div[2]/div/div/div[2]/div[2]/div/div/div/button[2]/div'
password_xpath = '//*[@id="layers"]/div/div/div/div/div/div/div[2]/div[2]/div/div/div[2]/div[2]/div[1]/div/div/div[3]/div/label/div/div[2]/div[1]/input'
login_button_xpath = '/html/body/div/div/div/div[1]/div[2]/div/div/div/div/div/div[2]/div[2]/div/div/div[2]/div[2]/div[2]/div/div[1]/div/div/button/div'
driver = webdriver.Chrome()
driver.get("https://twitter.com/login")
username_input = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, username_xpath)))
username_input.send_keys("USERNAME")
next_button = driver.find_element(By.XPATH, button_xpath)
next_button.click()
password_input = WebDriverWait(driver, 10).until( EC.presence_of_element_located((By.XPATH, password_xpath)))
password_input.send_keys("PASSWORD")
login_button = driver.find_element(By.XPATH, login_button_xpath)
login_button.click()
time.sleep(5)
driver.get('https://x.com/convomf/status/1785644297448251638')
time.sleep(3)
prev_height = driver.execute_script("return document.body.scrollHeight")
while True:
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
time.sleep(7)
new_height = driver.execute_script("return document.body.scrollHeight")
if new_height == prev_height:
break
prev_height = new_height
anchor_elements = WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located((By.XPATH, '//div[@class="css-175oi2r r-1iusvr4 r-16y2uox r-1777fci r-kzbkwu"]//a[@class="css-175oi2r r-1pi2tsx r-1ny4l3l r-1loqt21"]')))
hrefs = []
for anchor_element in anchor_elements:
href = anchor_element.get_attribute('href')
hrefs.append(href)
print(hrefs)
excepting it to scrape 100 or somewhat near href not just limited to 5 and or 15