This commit is contained in:
doms9 2025-10-29 03:21:18 -04:00
parent 8609c0a39e
commit 00000d90b7
11 changed files with 501 additions and 187 deletions

View file

@ -72,7 +72,9 @@ async def process_event(
await page.close()
async def refresh_html_cache(client: httpx.AsyncClient, url: str) -> dict[str, str]:
async def refresh_html_cache(
client: httpx.AsyncClient, url: str
) -> dict[str, str | float]:
try:
r = await client.get(url)
r.raise_for_status()
@ -172,32 +174,42 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)")
async with async_playwright() as p:
browser, context = await network.browser(p, browser="brave")
if events:
async with async_playwright() as p:
browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1):
handler = partial(process_event, url=ev["link"], url_num=i, context=context)
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
url=ev["link"],
url_num=i,
context=context,
)
url = await network.safe_process(handler, url_num=i, log=log)
url = await network.safe_process(
handler,
url_num=i,
log=log,
)
if url:
sport, event, ts = ev["sport"], ev["event"], ev["event_ts"]
if url:
sport, event, ts = ev["sport"], ev["event"], ev["event_ts"]
tvg_id, logo = leagues.info(sport)
tvg_id, logo = leagues.info(sport)
key = f"[{sport}] {event} (STRMD)"
key = f"[{sport}] {event} (STRMD)"
entry = {
"url": url,
"logo": logo,
"base": "",
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
}
entry = {
"url": url,
"logo": logo,
"base": "",
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
}
urls[key] = cached_urls[key] = entry
urls[key] = cached_urls[key] = entry
await browser.close()
await browser.close()
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")