diff --git a/M3U8/scrape/caches/ppv_api.json b/M3U8/scrape/caches/ppv_api.json index 42f3f16..f034aed 100644 --- a/M3U8/scrape/caches/ppv_api.json +++ b/M3U8/scrape/caches/ppv_api.json @@ -1,6 +1,6 @@ { "success": true, - "timestamp": 1757200717, + "timestamp": 1756995060, "READ_ME": "Interested in using our API? Contact us for more information.", "performance": 0.013457059860229492, "streams": [ diff --git a/M3U8/scrape/livetvsx.py b/M3U8/scrape/livetvsx.py index b75d69b..ddbdd8d 100644 --- a/M3U8/scrape/livetvsx.py +++ b/M3U8/scrape/livetvsx.py @@ -213,11 +213,11 @@ async def get_events( ) -> list[dict[str, str]]: events: list[dict[str, str]] = [] - pub_date_format = "%a, %d %b %Y %H:%M:%S %z" - window_start, window_end = now - timedelta(hours=1), now + timedelta(minutes=30) if buffer := await fetch_xml_stream(url, ssl_ctx): + pub_date_format = "%a, %d %b %Y %H:%M:%S %z" + for _, elem in ET.iterparse(buffer, events=("end",)): if elem.tag == "item": title = elem.findtext("title") diff --git a/M3U8/scrape/streambtw.py b/M3U8/scrape/streambtw.py index e748513..25019c7 100644 --- a/M3U8/scrape/streambtw.py +++ b/M3U8/scrape/streambtw.py @@ -6,7 +6,7 @@ from urllib.parse import urljoin import httpx from selectolax.parser import HTMLParser -from .utils import get_logger, load_cache, safe_process_event +from .utils import get_logger, load_cache, now, safe_process_event log = get_logger(__name__) @@ -79,7 +79,7 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]: async def main(client: httpx.AsyncClient) -> None: - if cached := load_cache(CACHE_FILE, hour=12): + if cached := load_cache(CACHE_FILE, exp=86400): urls.update(cached) log.info(f"Collected {len(urls)} event(s) from cache") return @@ -101,6 +101,7 @@ async def main(client: httpx.AsyncClient) -> None: entry = { "url": url, "logo": ev["logo"], + "timestamp": now.timestamp(), } urls[f"[{ev['sport']}] {ev['event']}"] = entry diff --git a/M3U8/scrape/tvpass.py b/M3U8/scrape/tvpass.py index 1c0535f..c1081ff 100644 --- a/M3U8/scrape/tvpass.py +++ b/M3U8/scrape/tvpass.py @@ -4,7 +4,7 @@ from pathlib import Path import httpx -from .utils import LOGOS, get_logger, load_cache +from .utils import LOGOS, get_logger, load_cache, now log = get_logger(__name__) @@ -27,7 +27,7 @@ async def fetch_m3u8(client: httpx.AsyncClient) -> list[str]: async def main(client: httpx.AsyncClient) -> None: - if cached := load_cache(CACHE_FILE, hour=12): + if cached := load_cache(CACHE_FILE, exp=86400): urls.update(cached) log.info(f"Collected {len(urls)} event(s) from cache") return @@ -57,6 +57,7 @@ async def main(client: httpx.AsyncClient) -> None: sport, "https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png", ), + "timestamp": now.timestamp(), } CACHE_FILE.write_text(json.dumps(urls, indent=2), encoding="utf-8") diff --git a/M3U8/scrape/utils/config.py b/M3U8/scrape/utils/config.py index bfa0940..cd6ed44 100644 --- a/M3U8/scrape/utils/config.py +++ b/M3U8/scrape/utils/config.py @@ -71,20 +71,17 @@ def get_logger(name: str | None = None) -> logging.Logger: def load_cache( file: Path, exp: int | float = None, - hour: int = None, ) -> dict[str, dict[str, str | float]]: try: - data = json.loads(file.read_text(encoding="utf-8")) + data: dict[str, dict[str, str | float]] = json.loads( + file.read_text(encoding="utf-8") + ) - if exp: - return { - k: v - for k, v in data.items() - if now.timestamp() - v.get("timestamp", 0) < exp - } - - elif hour: - return {} if now.hour <= hour else data + return { + k: v + for k, v in data.items() + if now.timestamp() - v.get("timestamp", 0) < exp + } except (FileNotFoundError, json.JSONDecodeError): return {}