From 00000d98b81225ca90eed272436a8ad3ffa1954d Mon Sep 17 00:00:00 2001 From: doms9 <96013514+doms9@users.noreply.github.com> Date: Thu, 4 Sep 2025 14:50:52 -0400 Subject: [PATCH] e --- M3U8/scrape/livetvsx.py | 13 +++++-------- M3U8/scrape/ppv.py | 8 +++++--- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/M3U8/scrape/livetvsx.py b/M3U8/scrape/livetvsx.py index 52b950c..0aa9a97 100644 --- a/M3U8/scrape/livetvsx.py +++ b/M3U8/scrape/livetvsx.py @@ -13,7 +13,7 @@ from .utils import LOGOS, TZ, get_logger, now, safe_process_event log = get_logger(__name__) -urls: dict[str, dict[str, str]] = {} +urls: dict[str, dict[str, str | float]] = {} BASE_URL = "https://cdn.livetv861.me/rss/upcoming_en.xml" @@ -28,8 +28,6 @@ CERT_FILE = Path(__file__).parent / "utils" / "cached-ca.pem" CACHE_FILE = Path(__file__).parent / "caches" / "livetvsx.json" -exist_sprts = set(LOGOS.keys()) - async def write_to_cert(client: httpx.AsyncClient, url: str, cert: Path) -> None: try: @@ -266,9 +264,6 @@ async def get_events( elem.clear() continue - if exist_sprts & {sport, event}: - continue - events.append( { "sport": sport, @@ -291,9 +286,11 @@ async def main(client: httpx.AsyncClient) -> None: cached_urls = load_cache() cached_count = len(cached_urls) + log.info(f"Collected {cached_count} event(s) from cache") + events = await get_events(BASE_URL, cert, set(cached_urls.keys())) - log.info(f"Processing {len(events)} URLs") + log.info(f"Processing {len(events)} new URLs") for i, ev in enumerate(events, start=1): sport = ev["sport"] @@ -323,4 +320,4 @@ async def main(client: httpx.AsyncClient) -> None: CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8") - log.info(f"Collected {len(cached_urls) - cached_count} event(s)") + log.info(f"Collected {len(cached_urls) - cached_count} new event(s)") diff --git a/M3U8/scrape/ppv.py b/M3U8/scrape/ppv.py index 49df96d..5b79eb9 100644 --- a/M3U8/scrape/ppv.py +++ b/M3U8/scrape/ppv.py @@ -14,7 +14,7 @@ from .utils import TZ, get_base, get_logger, now, safe_process_event log = get_logger(__name__) -urls: dict[str, dict[str, str]] = {} +urls: dict[str, dict[str, str | float]] = {} API_FILE = Path(__file__).parent / "caches" / "ppv_api.json" @@ -189,13 +189,15 @@ async def main(client: httpx.AsyncClient) -> None: cached_urls = load_cache() cached_count = len(cached_urls) + log.info(f"Collected {cached_count} event(s) from cache") + events = await get_events( client, urljoin(base_url, "/api/streams"), set(cached_urls.keys()), ) - log.info(f"Processing {len(events)} URLs") + log.info(f"Processing {len(events)} new URLs") for i, ev in enumerate(events, start=1): url = await safe_process_event( @@ -217,7 +219,7 @@ async def main(client: httpx.AsyncClient) -> None: CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8") - log.info(f"Collected {len(cached_urls) - cached_count} event(s)") + log.info(f"Collected {len(cached_urls) - cached_count} new event(s)") # works if no cloudflare bot detection