diff --git a/M3U8/scrapers/ovogoal.py b/M3U8/scrapers/ovogoal.py index 4f419e14..71644b80 100644 --- a/M3U8/scrapers/ovogoal.py +++ b/M3U8/scrapers/ovogoal.py @@ -89,7 +89,7 @@ async def get_events() -> list[dict[str, str]]: async def scrape() -> None: if cached_urls := CACHE_FILE.load(): - urls.update(cached_urls) + urls.update({k: v for k, v in cached_urls.items() if v["url"]}) log.info(f"Loaded {len(urls)} event(s) from cache") diff --git a/M3U8/scrapers/pawa.py b/M3U8/scrapers/pawa.py index a4cf6f96..56ba2e6e 100644 --- a/M3U8/scrapers/pawa.py +++ b/M3U8/scrapers/pawa.py @@ -87,7 +87,7 @@ async def get_events() -> list[dict[str, str]]: async def scrape() -> None: if cached_urls := CACHE_FILE.load(): - urls.update(cached_urls) + urls.update({k: v for k, v in cached_urls.items() if v["url"]}) log.info(f"Loaded {len(urls)} event(s) from cache") diff --git a/M3U8/scrapers/roxie.py b/M3U8/scrapers/roxie.py index 7851a305..4d05d9b4 100644 --- a/M3U8/scrapers/roxie.py +++ b/M3U8/scrapers/roxie.py @@ -115,7 +115,7 @@ async def get_events() -> list[dict[str, str]]: async def scrape(browser: Browser) -> None: if cached_urls := CACHE_FILE.load(): - urls.update(cached_urls) + urls.update({k: v for k, v in cached_urls.items() if v["url"]}) log.info(f"Loaded {len(urls)} event(s) from cache") diff --git a/M3U8/scrapers/shark.py b/M3U8/scrapers/shark.py index 4bbfff09..45990552 100644 --- a/M3U8/scrapers/shark.py +++ b/M3U8/scrapers/shark.py @@ -90,7 +90,7 @@ async def get_events() -> dict[str, dict[str, str | float]]: async def scrape() -> None: if cached_urls := CACHE_FILE.load(): - urls.update(cached_urls) + urls.update({k: v for k, v in cached_urls.items() if v["url"]}) log.info(f"Loaded {len(urls)} event(s) from cache") diff --git a/M3U8/scrapers/streamcenter.py b/M3U8/scrapers/streamcenter.py index 3fd6a946..bd30b50f 100644 --- a/M3U8/scrapers/streamcenter.py +++ b/M3U8/scrapers/streamcenter.py @@ -96,7 +96,7 @@ async def get_events() -> list[dict[str, str]]: async def scrape() -> None: if cached_urls := CACHE_FILE.load(): - urls.update(cached_urls) + urls.update({k: v for k, v in cached_urls.items() if v["url"]}) log.info(f"Loaded {len(urls)} event(s) from cache") diff --git a/M3U8/scrapers/streamtpnew.py b/M3U8/scrapers/streamtpnew.py index f5da675f..e31c4151 100644 --- a/M3U8/scrapers/streamtpnew.py +++ b/M3U8/scrapers/streamtpnew.py @@ -83,7 +83,7 @@ async def get_events() -> list[dict[str, str]]: async def scrape() -> None: if cached_urls := CACHE_FILE.load(): - urls.update(cached_urls) + urls.update({k: v for k, v in cached_urls.items() if v["url"]}) log.info(f"Loaded {len(urls)} event(s) from cache") diff --git a/M3U8/scrapers/tvapp.py b/M3U8/scrapers/tvapp.py index f89a6b82..1c26f987 100644 --- a/M3U8/scrapers/tvapp.py +++ b/M3U8/scrapers/tvapp.py @@ -75,7 +75,7 @@ async def get_events() -> list[dict[str, str]]: async def scrape() -> None: if cached_urls := CACHE_FILE.load(): - urls.update(cached_urls) + urls.update({k: v for k, v in cached_urls.items() if v["url"]}) log.info(f"Loaded {len(urls)} event(s) from cache") diff --git a/M3U8/scrapers/webcast.py b/M3U8/scrapers/webcast.py index df5ef690..0abf7585 100644 --- a/M3U8/scrapers/webcast.py +++ b/M3U8/scrapers/webcast.py @@ -133,7 +133,7 @@ async def get_events() -> list[dict[str, str]]: async def scrape() -> None: if cached_urls := CACHE_FILE.load(): - urls.update(cached_urls) + urls.update({k: v for k, v in cached_urls.items() if v["url"]}) log.info(f"Loaded {len(urls)} event(s) from cache")