From 00000d9875022566498e38713cee93f5e7ae9de5 Mon Sep 17 00:00:00 2001 From: doms9 <96013514+doms9@users.noreply.github.com> Date: Sat, 27 Dec 2025 12:52:18 -0500 Subject: [PATCH] e cache nulls from totalsportek --- M3U8/scrapers/totalsportek.py | 50 ++++++++++++++++++++--------------- M3U8/scrapers/watchfooty.py | 1 + 2 files changed, 29 insertions(+), 22 deletions(-) diff --git a/M3U8/scrapers/totalsportek.py b/M3U8/scrapers/totalsportek.py index 2723111..5ccd650 100644 --- a/M3U8/scrapers/totalsportek.py +++ b/M3U8/scrapers/totalsportek.py @@ -108,9 +108,11 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: async def scrape() -> None: cached_urls = CACHE_FILE.load() - cached_count = len(cached_urls) + valid_urls = {k: v for k, v in cached_urls.items() if v["url"]} - urls.update(cached_urls) + valid_count = cached_count = len(valid_urls) + + urls.update(valid_urls) log.info(f"Loaded {cached_count} event(s) from cache") @@ -137,29 +139,33 @@ async def scrape() -> None: log=log, ) + sport, event, link = ( + ev["sport"], + ev["event"], + ev["link"], + ) + + key = f"[{sport}] {event} ({TAG})" + + tvg_id, logo = leagues.get_tvg_info(sport, event) + + entry = { + "url": url, + "logo": logo, + "base": iframe, + "timestamp": now.timestamp(), + "id": tvg_id or "Live.Event.us", + "link": link, + } + + cached_urls[key] = entry + if url: - sport, event, link = ( - ev["sport"], - ev["event"], - ev["link"], - ) + valid_count += 1 - key = f"[{sport}] {event} ({TAG})" + urls[key] = entry - tvg_id, logo = leagues.get_tvg_info(sport, event) - - entry = { - "url": url, - "logo": logo, - "base": iframe, - "timestamp": now.timestamp(), - "id": tvg_id or "Live.Event.us", - "link": link, - } - - urls[key] = cached_urls[key] = entry - - if new_count := len(cached_urls) - cached_count: + if new_count := valid_count - cached_count: log.info(f"Collected and cached {new_count} new event(s)") else: diff --git a/M3U8/scrapers/watchfooty.py b/M3U8/scrapers/watchfooty.py index f749688..3180668 100644 --- a/M3U8/scrapers/watchfooty.py +++ b/M3U8/scrapers/watchfooty.py @@ -304,6 +304,7 @@ async def scrape() -> None: if url: valid_count += 1 + urls[key] = entry finally: