diff --git a/M3U8/scrape/caches/streambtw.json b/M3U8/scrape/caches/streambtw.json index 40cdc76..9e26dfe 100644 --- a/M3U8/scrape/caches/streambtw.json +++ b/M3U8/scrape/caches/streambtw.json @@ -1,82 +1 @@ -{ - "[NFL Week 1] Dallas Cowboys at Philadelphia Eagles": { - "url": "https://smsbusinesshub.com/playlist/stream_nfl1.m3u8", - "logo": "https://artwork.espncdn.com/categories/ad4c3bd2-ddb6-3f8c-8abf-744855a08fa4/logo/original_201807271434.svg", - "timestamp": 1757031738.450115 - }, - "[World Cup Qualifiers] Liechtenstein vs Belgium": { - "url": "https://streameast247.com/playlist/stream_4.m3u8", - "logo": "https://digitalhub.fifa.com/transform/befe3a64-328b-453c-8b58-0faeb9103684/FIFA_Logo_White_Generic?&io=transform:fill,height:128&quality=75", - "timestamp": 1757031738.450115 - }, - "[World Cup Qualifiers] Luxembourg vs Northern Ireland": { - "url": "https://streameast247.com/playlist/stream_6.m3u8", - "logo": "https://digitalhub.fifa.com/transform/befe3a64-328b-453c-8b58-0faeb9103684/FIFA_Logo_White_Generic?&io=transform:fill,height:128&quality=75", - "timestamp": 1757031738.450115 - }, - "[World Cup Qualifiers] Suriname vs Panama": { - "url": "https://streameast247.com/playlist/stream_5.m3u8", - "logo": "https://digitalhub.fifa.com/transform/befe3a64-328b-453c-8b58-0faeb9103684/FIFA_Logo_White_Generic?&io=transform:fill,height:128&quality=75", - "timestamp": 1757031738.450115 - }, - "[World Cup Qualifiers] Argentina vs Venezuela": { - "url": "https://streameast247.com/playlist/stream_7.m3u8", - "logo": "https://digitalhub.fifa.com/transform/befe3a64-328b-453c-8b58-0faeb9103684/FIFA_Logo_White_Generic?&io=transform:fill,height:128&quality=75", - "timestamp": 1757031738.450115 - }, - "[World Cup Qualifiers] Colombia vs Bolivia": { - "url": "https://streameast247.com/playlist/stream_8.m3u8", - "logo": "https://digitalhub.fifa.com/transform/befe3a64-328b-453c-8b58-0faeb9103684/FIFA_Logo_White_Generic?&io=transform:fill,height:128&quality=75", - "timestamp": 1757031738.450115 - }, - "[World Cup Qualifiers] Paraguay vs Ecuador": { - "url": "https://streameast247.com/playlist/stream_9.m3u8", - "logo": "https://digitalhub.fifa.com/transform/befe3a64-328b-453c-8b58-0faeb9103684/FIFA_Logo_White_Generic?&io=transform:fill,height:128&quality=75", - "timestamp": 1757031738.450115 - }, - "[World Cup Qualifiers] Uruguay vs Peru": { - "url": "https://streameast247.com/playlist/stream_10.m3u8", - "logo": "https://digitalhub.fifa.com/transform/befe3a64-328b-453c-8b58-0faeb9103684/FIFA_Logo_White_Generic?&io=transform:fill,height:128&quality=75", - "timestamp": 1757031738.450115 - }, - "[World Cup Qualifiers] Brazil vs Chile": { - "url": "https://streameast247.com/playlist/stream_11.m3u8", - "logo": "https://digitalhub.fifa.com/transform/befe3a64-328b-453c-8b58-0faeb9103684/FIFA_Logo_White_Generic?&io=transform:fill,height:128&quality=75", - "timestamp": 1757031738.450115 - }, - "[World Cup Qualifiers] Guatemala vs El Salvador": { - "url": "https://streameast247.com/playlist/stream_5.m3u8", - "logo": "https://digitalhub.fifa.com/transform/befe3a64-328b-453c-8b58-0faeb9103684/FIFA_Logo_White_Generic?&io=transform:fill,height:128&quality=75", - "timestamp": 1757031738.450115 - }, - "[MLB] Philadelphia vs Milwaukee": { - "url": "https://smsbusinesshub.com/playlist/stream_mlb1.m3u8", - "logo": "https://artwork.espncdn.com/categories/b38f959b-7865-31ac-8841-b88355519e10/logo/original_201811152008.svg", - "timestamp": 1757031738.450115 - }, - "[MLB] L.A. Dodgers vs Pittsburgh": { - "url": "https://smsbusinesshub.com/playlist/stream_mlb2.m3u8", - "logo": "https://artwork.espncdn.com/categories/b38f959b-7865-31ac-8841-b88355519e10/logo/original_201811152008.svg", - "timestamp": 1757031738.450115 - }, - "[MLB] Cleveland vs Tampa Bay": { - "url": "https://smsbusinesshub.com/playlist/stream_mlb3.m3u8", - "logo": "https://artwork.espncdn.com/categories/b38f959b-7865-31ac-8841-b88355519e10/logo/original_201811152008.svg", - "timestamp": 1757031738.450115 - }, - "[MLB] N.Y. Yankees vs Houston": { - "url": "https://smsbusinesshub.com/playlist/stream_mlb1.m3u8", - "logo": "https://artwork.espncdn.com/categories/b38f959b-7865-31ac-8841-b88355519e10/logo/original_201811152008.svg", - "timestamp": 1757031738.450115 - }, - "[MLB] L.A. Angels vs Kansas City": { - "url": "https://smsbusinesshub.com/playlist/stream_mlb4.m3u8", - "logo": "https://artwork.espncdn.com/categories/b38f959b-7865-31ac-8841-b88355519e10/logo/original_201811152008.svg", - "timestamp": 1757031738.450115 - }, - "[MLB] Chi. White Sox vs Minnesota": { - "url": "https://smsbusinesshub.com/playlist/stream_mlb5.m3u8", - "logo": "https://artwork.espncdn.com/categories/b38f959b-7865-31ac-8841-b88355519e10/logo/original_201811152008.svg", - "timestamp": 1757031738.450115 - } -} \ No newline at end of file +{} \ No newline at end of file diff --git a/M3U8/scrape/livetvsx.py b/M3U8/scrape/livetvsx.py index 6f46e14..a233bcf 100644 --- a/M3U8/scrape/livetvsx.py +++ b/M3U8/scrape/livetvsx.py @@ -15,7 +15,7 @@ from .utils import ( TZ, capture_req, get_logger, - load_ts_cache, + load_cache, now, safe_process_event, ) @@ -269,7 +269,7 @@ async def main(client: httpx.AsyncClient) -> None: cert = await get_cert(client) - cached_urls = load_ts_cache(CACHE_FILE, 14400) + cached_urls = load_cache(CACHE_FILE, exp=14400) cached_count = len(cached_urls) log.info(f"Collected {cached_count} event(s) from cache") diff --git a/M3U8/scrape/ppv.py b/M3U8/scrape/ppv.py index 7c104af..8e0f6c1 100644 --- a/M3U8/scrape/ppv.py +++ b/M3U8/scrape/ppv.py @@ -16,7 +16,7 @@ from .utils import ( capture_req, get_base, get_logger, - load_ts_cache, + load_cache, now, safe_process_event, ) @@ -179,7 +179,7 @@ async def main(client: httpx.AsyncClient) -> None: log.info(f'Scraping from "{base_url}"') - cached_urls = load_ts_cache(CACHE_FILE, 14400) + cached_urls = load_cache(CACHE_FILE, exp=14400) cached_count = len(cached_urls) log.info(f"Collected {cached_count} event(s) from cache") diff --git a/M3U8/scrape/streambtw.py b/M3U8/scrape/streambtw.py index 2726b9f..e748513 100644 --- a/M3U8/scrape/streambtw.py +++ b/M3U8/scrape/streambtw.py @@ -6,11 +6,11 @@ from urllib.parse import urljoin import httpx from selectolax.parser import HTMLParser -from .utils import get_logger, load_ts_cache, now, safe_process_event +from .utils import get_logger, load_cache, safe_process_event log = get_logger(__name__) -urls: dict[str, dict[str, str | float]] = {} +urls: dict[str, dict[str, str]] = {} BASE_URL = "https://streambtw.com/" @@ -22,7 +22,6 @@ async def process_event( url: str, url_num: int, ) -> str | None: - try: r = await client.get(url) r.raise_for_status() @@ -80,7 +79,7 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]: async def main(client: httpx.AsyncClient) -> None: - if cached := load_ts_cache(CACHE_FILE, 86400): # find out when site updates + if cached := load_cache(CACHE_FILE, hour=12): urls.update(cached) log.info(f"Collected {len(urls)} event(s) from cache") return @@ -102,7 +101,6 @@ async def main(client: httpx.AsyncClient) -> None: entry = { "url": url, "logo": ev["logo"], - "timestamp": now.timestamp(), } urls[f"[{ev['sport']}] {ev['event']}"] = entry diff --git a/M3U8/scrape/tvpass.py b/M3U8/scrape/tvpass.py index 51559a0..1c0535f 100644 --- a/M3U8/scrape/tvpass.py +++ b/M3U8/scrape/tvpass.py @@ -4,7 +4,7 @@ from pathlib import Path import httpx -from .utils import LOGOS, get_logger, now +from .utils import LOGOS, get_logger, load_cache log = get_logger(__name__) @@ -15,14 +15,6 @@ BASE_URL = "https://tvpass.org/playlist/m3u" CACHE_FILE = Path(__file__).parent / "caches" / "tvpass.json" -def load_cache() -> dict[str, str]: - try: - data = json.loads(CACHE_FILE.read_text(encoding="utf-8")) - return {} if now.hour <= 12 else data - except (FileNotFoundError, json.JSONDecodeError): - return {} - - async def fetch_m3u8(client: httpx.AsyncClient) -> list[str]: try: r = await client.get(BASE_URL) @@ -35,7 +27,7 @@ async def fetch_m3u8(client: httpx.AsyncClient) -> list[str]: async def main(client: httpx.AsyncClient) -> None: - if cached := load_cache(): + if cached := load_cache(CACHE_FILE, hour=12): urls.update(cached) log.info(f"Collected {len(urls)} event(s) from cache") return diff --git a/M3U8/scrape/utils/__init__.py b/M3U8/scrape/utils/__init__.py index b8884d4..de1bf57 100644 --- a/M3U8/scrape/utils/__init__.py +++ b/M3U8/scrape/utils/__init__.py @@ -4,7 +4,7 @@ from .config import ( capture_req, get_base, get_logger, - load_ts_cache, + load_cache, now, safe_process_event, ) @@ -15,7 +15,7 @@ __all__ = [ "capture_req", "get_base", "get_logger", - "load_ts_cache", + "load_cache", "now", "safe_process_event", ] diff --git a/M3U8/scrape/utils/config.py b/M3U8/scrape/utils/config.py index 36b4c51..bfa0940 100644 --- a/M3U8/scrape/utils/config.py +++ b/M3U8/scrape/utils/config.py @@ -68,20 +68,23 @@ def get_logger(name: str | None = None) -> logging.Logger: return logger -def load_ts_cache( +def load_cache( file: Path, - cache_exp: int | float, + exp: int | float = None, + hour: int = None, ) -> dict[str, dict[str, str | float]]: try: - data: dict[str, dict[str, str | float]] = json.loads( - file.read_text(encoding="utf-8") - ) + data = json.loads(file.read_text(encoding="utf-8")) - return { - k: v - for k, v in data.items() - if now.timestamp() - v.get("timestamp", 0) < cache_exp - } + if exp: + return { + k: v + for k, v in data.items() + if now.timestamp() - v.get("timestamp", 0) < exp + } + + elif hour: + return {} if now.hour <= hour else data except (FileNotFoundError, json.JSONDecodeError): return {}