import requests
from bs4 import BeautifulSoup
from prettytable import PrettyTable
def find_sort_in_urls(url_list, custom_columns):
for url in url_list:
try:
response = requests.get(url)
soup = BeautifulSoup(response.text, ‘html.parser’)
p = PrettyTable()
p.field_names = custom_columns
for table in soup.find_all('table'):
for row in table.find_all('<tr>'):
cells = row.find_all('td')
if any('TOOL01' in cell.get_text() or 'TOOL02' in cell.get_text() for cell in cells):
p.add_row([td.get_text() for td in cells])
# Check if the table has any rows added, then print
if p._rows:
print(f'{p}n')
except requests.exceptions.RequestException as e:
print(f"Error accessing {url}: {e}")
List of URLs to check
urls = [“http://path/toolPM.php”]
Custom column names
custom_columns = [“Location”, “Entity”, “Entity Type”, “Weekly PM”]
find_sort_in_urls(urls, custom_columns)
input(“Press ENTER to continue”)
My script has no error and display nothing. Not sure if my logic is correct