diff --git a/M3U8/scrape/livetvsx.py b/M3U8/scrape/livetvsx.py index ddbdd8d..360439e 100644 --- a/M3U8/scrape/livetvsx.py +++ b/M3U8/scrape/livetvsx.py @@ -269,12 +269,13 @@ async def main(client: httpx.AsyncClient) -> None: cached_urls = load_cache(CACHE_FILE, exp=14400) cached_count = len(cached_urls) + urls.update(cached_urls) log.info(f"Collected {cached_count} event(s) from cache") events = await get_events(BASE_URL, cert, set(cached_urls.keys())) - log.info(f"Processing {len(events)} new URLs") + log.info(f"Processing {len(events)} new URL(s)") for i, ev in enumerate(events, start=1): sport = ev["sport"] @@ -302,6 +303,9 @@ async def main(client: httpx.AsyncClient) -> None: urls[key] = cached_urls[key] = entry - CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8") + if (new_count := len(cached_urls) - cached_count) > 0: + CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8") - log.info(f"Collected {len(cached_urls) - cached_count} new event(s)") + log.info(f"Collected and cached {new_count} new event(s)") + else: + log.info("No new events found") diff --git a/M3U8/scrape/ppv.py b/M3U8/scrape/ppv.py index 8e0f6c1..e5a7dd5 100644 --- a/M3U8/scrape/ppv.py +++ b/M3U8/scrape/ppv.py @@ -181,6 +181,7 @@ async def main(client: httpx.AsyncClient) -> None: cached_urls = load_cache(CACHE_FILE, exp=14400) cached_count = len(cached_urls) + urls.update(cached_urls) log.info(f"Collected {cached_count} event(s) from cache") @@ -190,7 +191,7 @@ async def main(client: httpx.AsyncClient) -> None: set(cached_urls.keys()), ) - log.info(f"Processing {len(events)} new URLs") + log.info(f"Processing {len(events)} new URL(s)") for i, ev in enumerate(events, start=1): url = await safe_process_event( @@ -210,9 +211,12 @@ async def main(client: httpx.AsyncClient) -> None: urls[key] = cached_urls[key] = entry - CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8") + if (new_count := len(cached_urls) - cached_count) > 0: + CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8") - log.info(f"Collected {len(cached_urls) - cached_count} new event(s)") + log.info(f"Collected and cached {new_count} new event(s)") + else: + log.info("No new events found") # works if no cloudflare bot detection diff --git a/M3U8/scrape/streambtw.py b/M3U8/scrape/streambtw.py index 25019c7..7aeb85f 100644 --- a/M3U8/scrape/streambtw.py +++ b/M3U8/scrape/streambtw.py @@ -88,7 +88,7 @@ async def main(client: httpx.AsyncClient) -> None: events = await get_events(client) - log.info(f"Processing {len(events)} new URLs") + log.info(f"Processing {len(events)} new URL(s)") for i, ev in enumerate(events, start=1): url = await safe_process_event(