diff --git a/M3U8/scrapers/livetvsx.py b/M3U8/scrapers/livetvsx.py index 360439e..c05f8e2 100644 --- a/M3U8/scrapers/livetvsx.py +++ b/M3U8/scrapers/livetvsx.py @@ -38,7 +38,11 @@ CERT_FILE = Path(__file__).parent / "utils" / "cached-ca.pem" CACHE_FILE = Path(__file__).parent / "caches" / "livetvsx.json" -async def write_to_cert(client: httpx.AsyncClient, url: str, cert: Path) -> None: +async def write_to_cert( + client: httpx.AsyncClient, + url: str, + cert: Path, +) -> None: try: r = await client.get(url) r.raise_for_status() @@ -263,17 +267,21 @@ async def get_events( async def main(client: httpx.AsyncClient) -> None: - log.info(f'Scraping from "{BASE_URL}"') - - cert = await get_cert(client) - cached_urls = load_cache(CACHE_FILE, exp=14400) cached_count = len(cached_urls) urls.update(cached_urls) log.info(f"Collected {cached_count} event(s) from cache") - events = await get_events(BASE_URL, cert, set(cached_urls.keys())) + log.info(f'Scraping from "{BASE_URL}"') + + cert = await get_cert(client) + + events = await get_events( + BASE_URL, + cert, + set(cached_urls.keys()), + ) log.info(f"Processing {len(events)} new URL(s)") diff --git a/M3U8/scrapers/ppv.py b/M3U8/scrapers/ppv.py index e5a7dd5..676b909 100644 --- a/M3U8/scrapers/ppv.py +++ b/M3U8/scrapers/ppv.py @@ -164,7 +164,7 @@ async def get_events( { "sport": sport, "event": name, - "link": urljoin(base_url, f"/live/{uri_name}"), + "link": urljoin(base_url, f"live/{uri_name}"), "logo": logo, } ) @@ -173,21 +173,21 @@ async def get_events( async def main(client: httpx.AsyncClient) -> None: - if not (base_url := await get_base(client, MIRRORS)): - log.warning("No working PPV mirrors") - return - - log.info(f'Scraping from "{base_url}"') - cached_urls = load_cache(CACHE_FILE, exp=14400) cached_count = len(cached_urls) urls.update(cached_urls) log.info(f"Collected {cached_count} event(s) from cache") + if not (base_url := await get_base(client, MIRRORS)): + log.warning("No working PPV mirrors") + return + + log.info(f'Scraping from "{base_url}"') + events = await get_events( client, - urljoin(base_url, "/api/streams"), + urljoin(base_url, "api/streams"), set(cached_urls.keys()), ) diff --git a/M3U8/scrapers/utils/config.py b/M3U8/scrapers/utils/config.py index fca6449..0595020 100644 --- a/M3U8/scrapers/utils/config.py +++ b/M3U8/scrapers/utils/config.py @@ -2,6 +2,7 @@ import asyncio import json import logging import re +from collections.abc import Callable from datetime import datetime from pathlib import Path from typing import Any @@ -103,9 +104,9 @@ def load_cache( async def safe_process_event( - fn, + fn: Callable, url_num: int, - timeout=20, + timeout: int | float = 20, log: logging.Logger | None = None, ) -> Any | None: