I already have a telegram bot to search news from a brazillian soccer team, and send them during all day on a telegram channel. I’m facing problems with 2 main things. First is: when i run it locally, works well for some time, but after a fews hours, stops sending news.
Second is: it gets some news, but I was looking to get much more news, and really don’t get how to get news from others sources.
My code (credentials hidden, but all tested and working fine):
import asyncio
import requests
import logging
import difflib
import json
from bs4 import BeautifulSoup
from telegram import Update, Bot
from telegram.ext import Application, CommandHandler, ContextTypes
from telegram.error import RetryAfter, TimedOut
from apscheduler.schedulers.asyncio import AsyncIOScheduler
# Logging config
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
# Tokens
TELEGRAM_TOKEN = ''
CHANNEL_ID = ''
API_KEY = ''
# Files to save urls
URLS_FILE = 'sent_urls.json'
TITLES_FILE = 'sent_titles.json'
# Load urls and titles
def load_sent_data(filename):
try:
with open(filename, 'r') as file:
return set(json.load(file))
except (FileNotFoundError, json.JSONDecodeError):
return set()
# Save titles and urls on files
def save_sent_data(data, filename):
with open(filename, 'w') as file:
json.dump(list(data), file)
enviadas_urls = load_sent_data(URLS_FILE)
enviadas_titulos = load_sent_data(TITLES_FILE)
# Clean and split titles
def limpar_e_dividir_titulo(titulo):
return ' '.join(set(titulo.lower().replace(',', '').replace('.', '').split()))
# Title similarity verification
def titulos_sao_similares(titulo1, titulo2):
SIMILARITY_THRESHOLD = 0.39
return difflib.SequenceMatcher(None, titulo1, titulo2).ratio() > SIMILARITY_THRESHOLD
# Checking if new was already sent
def is_already_sent(title, url):
title_keywords = limpar_e_dividir_titulo(title)
for sent_title in enviadas_titulos:
if titulos_sao_similares(title_keywords, sent_title):
return True
return url in enviadas_urls
# Verification and news sending with NewsAPI with similarity checkage
async def fetch_and_send_api_news(bot):
query = 'Grêmio'
url = f'https://newsapi.org/v2/everything?q={query}&language=pt&sortBy=publishedAt&apiKey={API_KEY}'
response = requests.get(url)
if response.status_code == 200:
news_data = response.json()
articles = news_data.get('articles', [])
for article in articles[:5]: # Limite para as últimas 5 notícias da API
title = article['title']
news_url = article['url']
if not is_already_sent(title, news_url):
description = article.get('description', 'Sem descrição')
image_url = article.get('urlToImage', None)
reply_text = f"{title}nn{description}nnLeia mais: {news_url}"
if image_url:
await bot.send_photo(chat_id=CHANNEL_ID, photo=image_url, caption=reply_text)
else:
await bot.send_message(chat_id=CHANNEL_ID, text=reply_text)
enviadas_urls.add(news_url)
enviadas_titulos.add(limpar_e_dividir_titulo(title))
save_sent_data(enviadas_urls, URLS_FILE)
save_sent_data(enviadas_titulos, TITLES_FILE)
else:
logger.error(f"Erro na API: {response.status_code}")
# News scraping from website and sending
async def fetch_and_send_scraped_news(bot):
url = "https://www.gremionews.com.br/novidades"
page = requests.get(url)
soup = BeautifulSoup(page.content, "html.parser")
novidades = soup.findAll("div", class_="list-item")[:10] # Limite para as últimas 10 notícias raspadas
for novidade in novidades:
link = novidade.find("a")['href']
if link not in enviadas_urls:
titulo = novidade.find("a", class_="title").text.strip()
resumo = novidade.find("p", class_="description").text.strip()
imagem_url = novidade.find("a", class_="image").find("img")['src']
texto_mensagem = f"{titulo}nn{resumo}nnLeia mais: {link}"
try:
await bot.send_photo(chat_id=CHANNEL_ID, photo=imagem_url, caption=texto_mensagem)
enviadas_urls.add(link)
save_sent_data(enviadas_urls, URLS_FILE) # Correção aplicada aqui
except (RetryAfter, TimedOut) as e:
logger.error(f"Erro ao enviar notícia: {e}")
await asyncio.sleep(10) # Aguardar antes de tentar novamente
# Bot command to send news manually
async def news_command(update: Update, context: ContextTypes.DEFAULT_TYPE):
await fetch_and_send_api_news(context.bot)
await fetch_and_send_scraped_news(context.bot)
def main():
bot = Bot(token=TELEGRAM_TOKEN)
application = Application.builder().token(TELEGRAM_TOKEN).build()
application.add_handler(CommandHandler("news", news_command))
scheduler = AsyncIOScheduler()
scheduler.add_job(fetch_and_send_api_news, 'interval', hours=1, args=[bot])
scheduler.add_job(fetch_and_send_scraped_news, 'interval', hours=1, args=[bot])
scheduler.start()
application.run_polling()
if __name__ == '__main__':
asyncio.run(main())
What i seek from here: some orientation, and how to proceed with the deploy of this bot.