This commit is contained in:
doms9 2025-10-29 16:30:36 -04:00
parent 7c487189c4
commit 00000d9f0f
3 changed files with 15 additions and 8 deletions

View file

@ -85,6 +85,7 @@ async def refresh_html_cache(
client: httpx.AsyncClient,
url: str,
sport: str,
now_ts: float,
) -> dict[str, str | float]:
try:
@ -93,7 +94,7 @@ async def refresh_html_cache(
except Exception as e:
log.error(f'Failed to fetch "{url}": {e}')
return []
return {}
soup = HTMLParser(r.text)
@ -107,7 +108,7 @@ async def refresh_html_cache(
event = a_tag.text(strip=True)
if not (event_link := a_tag.attributes.get("href")):
if not (href := a_tag.attributes.get("href")):
continue
if not (span := row.css_first("span.countdown-timer")):
@ -122,9 +123,9 @@ async def refresh_html_cache(
events[key] = {
"sport": sport,
"event": event,
"link": event_link,
"link": href,
"event_ts": event_dt.timestamp(),
"timestamp": Time.now().timestamp(),
"timestamp": now_ts,
}
return events
@ -136,9 +137,17 @@ async def get_events(
cached_keys: set[str],
) -> list[dict[str, str]]:
now = Time.clean(Time.now())
if not (events := HTML_CACHE.load()):
tasks = [
refresh_html_cache(client, url, sport) for sport, url in sport_urls.items()
refresh_html_cache(
client,
url,
sport,
now.timestamp(),
)
for sport, url in sport_urls.items()
]
results = await asyncio.gather(*tasks)
@ -149,7 +158,6 @@ async def get_events(
live = []
now = Time.clean(Time.now())
start_ts = now.delta(minutes=-30).timestamp()
end_ts = now.delta(minutes=30).timestamp()