I saw some posts with possible solutions about this particular issue, but none of them seem to work. From what I’ve checked so far, adding this options.add_experimental_option('excludeSwitches', ['enable-logging'])
would remove all this erros, but it actually don’t.
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from bs4 import BeautifulSoup
import pandas as pd
def scrape_gas_spot_prices(url):
options = webdriver.ChromeOptions()
options.add_argument("--headless")
options.add_argument("--disable-gpu")
options.add_argument("--ignore-certificate-errors")
options.add_argument("--allow-running-insecure-content")
options.add_experimental_option('excludeSwitches', ['enable-logging'])
driver = webdriver.Chrome(options=options)
driver.get(url)
WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, "mv-quote-row")))
soup = BeautifulSoup(driver.page_source, 'html.parser')
table = soup.find('table', class_='mv-quote')
all_values = []
for row in table.find_all('tr', class_='mv-quote-row'):
row_values = [cell.text.strip() for cell in row.find_all('td')]
all_values.append(row_values)
driver.quit()
return all_values
if __name__ == "__main__":
url = "https://www.eex.com/en/market-data/natural-gas/spot"
scraped_data = scrape_gas_spot_prices(url)
df = pd.DataFrame(scraped_data, columns=["Spot", "Last Price", "Last Volume", "End of Day Index", "Volume Exchange", ""])
print(df)