This commit is contained in:
doms9 2025-10-29 03:21:18 -04:00
parent 8609c0a39e
commit 00000d90b7
11 changed files with 501 additions and 187 deletions

View file

@ -134,32 +134,44 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.now().timestamp()
if events:
now = Time.now().timestamp()
for i, ev in enumerate(events, start=1):
handler = partial(process_event, client=client, url=ev["link"], url_num=i)
match_name, url = await network.safe_process(handler, url_num=i, log=log)
if url:
sport = ev["sport"]
key = (
f"[{sport}] {match_name} (FSTV)" if match_name else f"[{sport}] (FSTV)"
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
client=client,
url=ev["link"],
url_num=i,
)
tvg_id, logo = leagues.info(sport)
match_name, url = await network.safe_process(
handler,
url_num=i,
log=log,
)
entry = {
"url": url,
"logo": logo,
"base": base_url,
"timestamp": now,
"id": tvg_id or "Live.Event.us",
"href": ev["href"],
}
if url:
sport = ev["sport"]
urls[key] = cached_urls[key] = entry
key = (
f"[{sport}] {match_name} (FSTV)"
if match_name
else f"[{sport}] (FSTV)"
)
tvg_id, logo = leagues.info(sport)
entry = {
"url": url,
"logo": logo,
"base": base_url,
"timestamp": now,
"id": tvg_id or "Live.Event.us",
"href": ev["href"],
}
urls[key] = cached_urls[key] = entry
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")