From 00000d9a4d3159722b0abf242ec643684592e906 Mon Sep 17 00:00:00 2001 From: doms9 <96013514+doms9@users.noreply.github.com> Date: Tue, 2 Dec 2025 15:26:13 -0500 Subject: [PATCH] e --- M3U8/scrapers/pixel.py | 5 +---- M3U8/scrapers/ppv.py | 8 +------- M3U8/scrapers/roxie.py | 6 +++--- M3U8/scrapers/streamfree.py | 6 ++++-- M3U8/scrapers/strmd.py | 7 +++---- M3U8/scrapers/watchfooty.py | 5 +---- 6 files changed, 13 insertions(+), 24 deletions(-) diff --git a/M3U8/scrapers/pixel.py b/M3U8/scrapers/pixel.py index 9338317..0e6944c 100644 --- a/M3U8/scrapers/pixel.py +++ b/M3U8/scrapers/pixel.py @@ -49,10 +49,7 @@ async def get_events() -> dict[str, dict[str, str | float]]: events = {} - pattern = re.compile( - r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?", - re.IGNORECASE, - ) + pattern = re.compile(r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?", re.IGNORECASE) for event in api_data.get("events", []): event_dt = Time.from_str(event["date"], timezone="UTC") diff --git a/M3U8/scrapers/ppv.py b/M3U8/scrapers/ppv.py index 2e0faeb..e9a6251 100644 --- a/M3U8/scrapers/ppv.py +++ b/M3U8/scrapers/ppv.py @@ -41,13 +41,7 @@ async def get_events( cached_keys: set[str], ) -> list[dict[str, str]]: if not (api_data := API_FILE.load(per_entry=False)): - api_data = await refresh_api_cache( - client, - urljoin( - BASE_URL, - "api/streams", - ), - ) + api_data = await refresh_api_cache(client, urljoin(BASE_URL, "api/streams")) API_FILE.write(api_data) diff --git a/M3U8/scrapers/roxie.py b/M3U8/scrapers/roxie.py index 8540394..2b47e1f 100644 --- a/M3U8/scrapers/roxie.py +++ b/M3U8/scrapers/roxie.py @@ -18,7 +18,7 @@ HTML_CACHE = Cache("roxie-html.json", exp=19_800) BASE_URL = "https://roxiestreams.live" -valid_sports = { +SPORT_ENDPOINTS = { "fighting": "Fighting", "mlb": "MLB", "motorsports": "Racing", @@ -90,7 +90,7 @@ async def refresh_html_cache( event_dt = Time.from_str(data_start, timezone="PST") - event_sport = valid_sports[sport] + event_sport = SPORT_ENDPOINTS[sport] key = f"[{event_sport}] {event} ({TAG})" @@ -156,7 +156,7 @@ async def scrape(client: httpx.AsyncClient) -> None: log.info(f'Scraping from "{BASE_URL}"') - sport_urls = {sport: urljoin(BASE_URL, sport) for sport in valid_sports} + sport_urls = {sport: urljoin(BASE_URL, sport) for sport in SPORT_ENDPOINTS} events = await get_events( client, diff --git a/M3U8/scrapers/streamfree.py b/M3U8/scrapers/streamfree.py index f34b9e9..fcdc6c9 100644 --- a/M3U8/scrapers/streamfree.py +++ b/M3U8/scrapers/streamfree.py @@ -35,19 +35,21 @@ async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | flo events = {} - for _, streams in api_data.get("streams", {}).items(): + for streams in api_data.get("streams", {}).values(): if not streams: continue for stream in streams: sport, name = stream["league"], stream["name"] + stream_key = stream["stream_key"] + key = f"[{sport}] {name} ({TAG})" tvg_id, logo = leagues.get_tvg_info(sport, name) events[key] = { - "url": urljoin(BASE_URL, f"live/{stream['stream_key']}720p/index.m3u8"), + "url": urljoin(BASE_URL, f"live/{stream_key}720p/index.m3u8"), "logo": logo, "base": BASE_URL, "timestamp": Time.now().timestamp(), diff --git a/M3U8/scrapers/strmd.py b/M3U8/scrapers/strmd.py index ac192de..70ab1a2 100644 --- a/M3U8/scrapers/strmd.py +++ b/M3U8/scrapers/strmd.py @@ -128,8 +128,7 @@ async def get_events( if not (api_data := API_FILE.load(per_entry=False, index=-1)): api_data = await refresh_api_cache( - client, - urljoin(url, "api/matches/all-today"), + client, urljoin(url, "api/matches/all-today") ) API_FILE.write(api_data) @@ -172,8 +171,8 @@ async def get_events( if not sources: continue - skip_sources = {"admin", "alpha", "bravo"} - valid_sources = [d for d in sources if d.get("source") not in skip_sources] + skip_types = {"admin", "alpha", "bravo"} + valid_sources = [d for d in sources if d.get("source") not in skip_types] if not valid_sources: continue diff --git a/M3U8/scrapers/watchfooty.py b/M3U8/scrapers/watchfooty.py index eb0e689..dd14179 100644 --- a/M3U8/scrapers/watchfooty.py +++ b/M3U8/scrapers/watchfooty.py @@ -59,10 +59,7 @@ async def refresh_api_cache( log.info("Refreshing API cache") tasks = [ - get_api_data( - client, - urljoin(url, f"api/v1/matches/{sport}"), - ) + get_api_data(client, urljoin(url, f"api/v1/matches/{sport}")) for sport in SPORT_ENDPOINTS ]