From 00000d91340120a23d7b2d09da8b743c9f4adf7b Mon Sep 17 00:00:00 2001 From: doms9 <96013514+doms9@users.noreply.github.com> Date: Sun, 19 Oct 2025 11:38:06 -0400 Subject: [PATCH] e --- M3U8/fetch.py | 13 ++++++- M3U8/scrapers/old/livetvsx.py | 1 - M3U8/scrapers/{old => }/ppv.py | 70 +++++++++------------------------- M3U8/scrapers/streameast.py | 1 - M3U8/scrapers/streamed.py | 1 - M3U8/scrapers/strmd.py | 5 +-- M3U8/scrapers/watchfooty.py | 1 - 7 files changed, 33 insertions(+), 59 deletions(-) rename M3U8/scrapers/{old => }/ppv.py (75%) diff --git a/M3U8/fetch.py b/M3U8/fetch.py index 4d7f4df..4323869 100644 --- a/M3U8/fetch.py +++ b/M3U8/fetch.py @@ -2,7 +2,16 @@ import asyncio from pathlib import Path -from scrapers import fstv, streambtw, streameast, streamed, strmd, tvpass, watchfooty +from scrapers import ( + fstv, + ppv, + streambtw, + streameast, + streamed, + strmd, + tvpass, + watchfooty, +) from scrapers.utils import get_logger, network log = get_logger(__name__) @@ -29,6 +38,7 @@ async def main() -> None: tasks = [ asyncio.create_task(fstv.scrape(network.client)), + asyncio.create_task(ppv.scrape(network.client)), asyncio.create_task(streambtw.scrape(network.client)), asyncio.create_task(streameast.scrape(network.client)), asyncio.create_task(streamed.scrape(network.client)), @@ -41,6 +51,7 @@ async def main() -> None: additions = ( fstv.urls + | ppv.urls | streambtw.urls | streameast.urls | streamed.urls diff --git a/M3U8/scrapers/old/livetvsx.py b/M3U8/scrapers/old/livetvsx.py index de217c6..ce9bca4 100644 --- a/M3U8/scrapers/old/livetvsx.py +++ b/M3U8/scrapers/old/livetvsx.py @@ -182,7 +182,6 @@ async def process_event( if captured: log.info(f"URL {url_num}) Captured M3U8") - return captured[-1] log.warning(f"URL {url_num}) No M3U8 captured") diff --git a/M3U8/scrapers/old/ppv.py b/M3U8/scrapers/ppv.py similarity index 75% rename from M3U8/scrapers/old/ppv.py rename to M3U8/scrapers/ppv.py index 4844260..056639b 100644 --- a/M3U8/scrapers/old/ppv.py +++ b/M3U8/scrapers/ppv.py @@ -25,36 +25,9 @@ MIRRORS = [ ] -def get_tvg(sport: str, event: str) -> str: - match sport: - case "American Football": - if leagues.is_valid(event, "NFL"): - return "NFL.Dummy.us" - - else: - return "NCAA.Sports.Dummy.us" - - case "Basketball": - if leagues.is_valid(event, "NBA"): - return "NBA.Basketball.Dummy.us" - - elif leagues.is_valid(event, "WNBA"): - return "WNBA.dummy.us" - - # NCAA - - else: - return "Basketball.Dummy.us" - - case "Ice Hockey": - return "NHL.Hockey.Dummy.us" - - case _: - return leagues.info(sport)[0] - - async def refresh_api_cache( - client: httpx.AsyncClient, url: str + client: httpx.AsyncClient, + url: str, ) -> dict[str, dict[str, str]]: log.info("Refreshing API cache") @@ -73,6 +46,7 @@ async def process_event( url_num: int, context: BrowserContext, ) -> str | None: + page = await context.new_page() captured: list[str] = [] @@ -93,7 +67,7 @@ async def process_event( wait_task = asyncio.create_task(got_one.wait()) try: - await asyncio.wait_for(wait_task, timeout=10) + await asyncio.wait_for(wait_task, timeout=6) except asyncio.TimeoutError: log.warning(f"URL {url_num}) Timed out waiting for M3U8.") return @@ -109,7 +83,6 @@ async def process_event( if captured: log.info(f"URL {url_num}) Captured M3U8") - return captured[-1] log.warning(f"URL {url_num}) No M3U8 captured after waiting.") @@ -129,6 +102,7 @@ async def get_events( base_url: str, cached_keys: set[str], ) -> list[dict[str, str]]: + if not (api_data := API_FILE.load(per_entry=False)): api_data = await refresh_api_cache( client, @@ -153,12 +127,13 @@ async def get_events( continue for event in stream_group["streams"]: - name, start_ts, logo, iframe = ( - event["name"], - event["starts_at"], - event["poster"], - event["iframe"], - ) + name = event.get("name") + start_ts = event.get("starts_at") + logo = event.get("poster") + iframe = event.get("iframe") + + if not (name and start_ts and iframe): + continue key = f"[{sport}] {name} (PPV)" @@ -209,15 +184,9 @@ async def scrape(client: httpx.AsyncClient) -> None: browser, context = await network.browser(p) for i, ev in enumerate(events, start=1): - url = await network.safe_process( - lambda: process_event( - ev["link"], - url_num=i, - context=context, - ), - url_num=i, - log=log, - ) + handler = partial(process_event, url=ev["link"], url_num=i, context=context) + + url = await network.safe_process(handler, url_num=i, log=log) if url: sport, event, logo, ts = ( @@ -229,12 +198,14 @@ async def scrape(client: httpx.AsyncClient) -> None: key = f"[{sport}] {event} (PPV)" + tvg_id, pic = leagues.get_tvg_info(sport, event) + entry = { "url": url, - "logo": logo, + "logo": logo or pic, "base": base_url, "timestamp": ts, - "id": get_tvg(sport, event) or "Live.Event.us", + "id": tvg_id or "Live.Event.us", } urls[key] = cached_urls[key] = entry @@ -247,6 +218,3 @@ async def scrape(client: httpx.AsyncClient) -> None: log.info("No new events found") CACHE_FILE.write(cached_urls) - - -# same provider as strmd.py diff --git a/M3U8/scrapers/streameast.py b/M3U8/scrapers/streameast.py index f95d080..5c70abf 100644 --- a/M3U8/scrapers/streameast.py +++ b/M3U8/scrapers/streameast.py @@ -71,7 +71,6 @@ async def process_event( if captured: log.info(f"URL {url_num}) Captured M3U8") - return captured[-1] log.warning(f"URL {url_num}) No M3U8 captured after waiting.") diff --git a/M3U8/scrapers/streamed.py b/M3U8/scrapers/streamed.py index 67a01ad..dddefb4 100644 --- a/M3U8/scrapers/streamed.py +++ b/M3U8/scrapers/streamed.py @@ -58,7 +58,6 @@ async def process_event( if captured: log.info(f"URL {url_num}) Captured M3U8") - return captured[-1] log.warning(f"URL {url_num}) No M3U8 captured after waiting.") diff --git a/M3U8/scrapers/strmd.py b/M3U8/scrapers/strmd.py index 2fc204c..a6a0dff 100644 --- a/M3U8/scrapers/strmd.py +++ b/M3U8/scrapers/strmd.py @@ -92,7 +92,6 @@ async def process_event( if captured: log.info(f"URL {url_num}) Captured M3U8") - return captured[-1] log.warning(f"URL {url_num}) No M3U8 captured after waiting.") @@ -164,8 +163,8 @@ async def get_events( if not sources: continue - source = sources[0] - # source = sources[1] if len(sources) > 1 else sources[0] + # source = sources[0] + source = sources[1] if len(sources) > 1 else sources[0] source_type = source.get("source") stream_id = source.get("id") diff --git a/M3U8/scrapers/watchfooty.py b/M3U8/scrapers/watchfooty.py index 330a572..50f5e62 100644 --- a/M3U8/scrapers/watchfooty.py +++ b/M3U8/scrapers/watchfooty.py @@ -131,7 +131,6 @@ async def process_event( if captured: log.info(f"URL {url_num}) Captured M3U8") - return captured[-1] log.warning(f"URL {url_num}) No M3U8 captured after waiting.")