This commit is contained in:
doms9 2025-10-11 18:51:41 -04:00
parent 00000d9199
commit 00000d92ff

View file

@ -225,7 +225,7 @@ async def get_events(
async def scrape(client: httpx.AsyncClient) -> None: async def scrape(client: httpx.AsyncClient) -> None:
cached_urls = CACHE_FILE.load() cached_urls = CACHE_FILE.load()
cached_count = len(cached_urls) cached_count = len(cached_urls)
urls.update(cached_urls) urls.update({k: v for k, v in cached_urls.items() if v["url"]})
log.info(f"Loaded {cached_count} event(s) from cache") log.info(f"Loaded {cached_count} event(s) from cache")
@ -244,8 +244,6 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
# breakpoint()
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p) browser, context = await network.browser(p)
@ -260,27 +258,29 @@ async def scrape(client: httpx.AsyncClient) -> None:
log=log, log=log,
) )
sport, event, logo, ts = (
ev["sport"],
ev["event"],
ev["logo"],
ev["timestamp"],
)
key = f"[{sport}] {event} (WFTY)"
tvg_id, pic = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo or pic,
"base": base_url,
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
}
cached_urls[key] = entry
if url: if url:
sport, event, logo, ts = ( urls[key] = entry
ev["sport"],
ev["event"],
ev["logo"],
ev["timestamp"],
)
key = f"[{sport}] {event} (WFTY)"
tvg_id, pic = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo or pic,
"base": base_url,
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
}
urls[key] = cached_urls[key] = entry
await browser.close() await browser.close()