From 00000d92ff0a683e29aebf0a2a1a9edc95fdccf1 Mon Sep 17 00:00:00 2001 From: doms9 <96013514+doms9@users.noreply.github.com> Date: Sat, 11 Oct 2025 18:51:41 -0400 Subject: [PATCH] e --- M3U8/scrapers/watchfooty.py | 46 ++++++++++++++++++------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/M3U8/scrapers/watchfooty.py b/M3U8/scrapers/watchfooty.py index 6238de3..590ef48 100644 --- a/M3U8/scrapers/watchfooty.py +++ b/M3U8/scrapers/watchfooty.py @@ -225,7 +225,7 @@ async def get_events( async def scrape(client: httpx.AsyncClient) -> None: cached_urls = CACHE_FILE.load() cached_count = len(cached_urls) - urls.update(cached_urls) + urls.update({k: v for k, v in cached_urls.items() if v["url"]}) log.info(f"Loaded {cached_count} event(s) from cache") @@ -244,8 +244,6 @@ async def scrape(client: httpx.AsyncClient) -> None: log.info(f"Processing {len(events)} new URL(s)") - # breakpoint() - async with async_playwright() as p: browser, context = await network.browser(p) @@ -260,27 +258,29 @@ async def scrape(client: httpx.AsyncClient) -> None: log=log, ) + sport, event, logo, ts = ( + ev["sport"], + ev["event"], + ev["logo"], + ev["timestamp"], + ) + + key = f"[{sport}] {event} (WFTY)" + + tvg_id, pic = leagues.get_tvg_info(sport, event) + + entry = { + "url": url, + "logo": logo or pic, + "base": base_url, + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + } + + cached_urls[key] = entry + if url: - sport, event, logo, ts = ( - ev["sport"], - ev["event"], - ev["logo"], - ev["timestamp"], - ) - - key = f"[{sport}] {event} (WFTY)" - - tvg_id, pic = leagues.get_tvg_info(sport, event) - - entry = { - "url": url, - "logo": logo or pic, - "base": base_url, - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - } - - urls[key] = cached_urls[key] = entry + urls[key] = entry await browser.close()