This commit is contained in:
doms9 2025-10-29 16:30:36 -04:00
parent 7c487189c4
commit 00000d9f0f
3 changed files with 15 additions and 8 deletions

View file

@ -85,6 +85,7 @@ async def refresh_html_cache(
client: httpx.AsyncClient, client: httpx.AsyncClient,
url: str, url: str,
sport: str, sport: str,
now_ts: float,
) -> dict[str, str | float]: ) -> dict[str, str | float]:
try: try:
@ -93,7 +94,7 @@ async def refresh_html_cache(
except Exception as e: except Exception as e:
log.error(f'Failed to fetch "{url}": {e}') log.error(f'Failed to fetch "{url}": {e}')
return [] return {}
soup = HTMLParser(r.text) soup = HTMLParser(r.text)
@ -107,7 +108,7 @@ async def refresh_html_cache(
event = a_tag.text(strip=True) event = a_tag.text(strip=True)
if not (event_link := a_tag.attributes.get("href")): if not (href := a_tag.attributes.get("href")):
continue continue
if not (span := row.css_first("span.countdown-timer")): if not (span := row.css_first("span.countdown-timer")):
@ -122,9 +123,9 @@ async def refresh_html_cache(
events[key] = { events[key] = {
"sport": sport, "sport": sport,
"event": event, "event": event,
"link": event_link, "link": href,
"event_ts": event_dt.timestamp(), "event_ts": event_dt.timestamp(),
"timestamp": Time.now().timestamp(), "timestamp": now_ts,
} }
return events return events
@ -136,9 +137,17 @@ async def get_events(
cached_keys: set[str], cached_keys: set[str],
) -> list[dict[str, str]]: ) -> list[dict[str, str]]:
now = Time.clean(Time.now())
if not (events := HTML_CACHE.load()): if not (events := HTML_CACHE.load()):
tasks = [ tasks = [
refresh_html_cache(client, url, sport) for sport, url in sport_urls.items() refresh_html_cache(
client,
url,
sport,
now.timestamp(),
)
for sport, url in sport_urls.items()
] ]
results = await asyncio.gather(*tasks) results = await asyncio.gather(*tasks)
@ -149,7 +158,6 @@ async def get_events(
live = [] live = []
now = Time.clean(Time.now())
start_ts = now.delta(minutes=-30).timestamp() start_ts = now.delta(minutes=-30).timestamp()
end_ts = now.delta(minutes=30).timestamp() end_ts = now.delta(minutes=30).timestamp()

View file

@ -81,7 +81,7 @@ async def refresh_html_cache(
except Exception as e: except Exception as e:
log.error(f'Failed to fetch "{url}": {e}') log.error(f'Failed to fetch "{url}": {e}')
return [] return {}
soup = HTMLParser(r.text) soup = HTMLParser(r.text)
events = {} events = {}

View file

@ -163,7 +163,6 @@ async def get_events(
if not sources: if not sources:
continue continue
# source = sources[0]
source = sources[1] if len(sources) > 1 else sources[0] source = sources[1] if len(sources) > 1 else sources[0]
source_type = source.get("source") source_type = source.get("source")
stream_id = source.get("id") stream_id = source.get("id")