From 00000d99244bcb8139d1783321e6aef3b7b28b1a Mon Sep 17 00:00:00 2001 From: doms9 <96013514+doms9@users.noreply.github.com> Date: Fri, 19 Dec 2025 17:58:37 -0500 Subject: [PATCH] e --- M3U8/scrapers/ppv.py | 2 +- M3U8/scrapers/sport9.py | 2 +- M3U8/scrapers/streamcenter.py | 2 +- M3U8/scrapers/streamhub.py | 2 +- M3U8/scrapers/streamsgate.py | 2 +- M3U8/scrapers/strmd.py | 2 +- M3U8/scrapers/tvpass.py | 2 +- M3U8/scrapers/utils/webwork.py | 19 +++++++------------ M3U8/scrapers/watchfooty.py | 2 +- 9 files changed, 15 insertions(+), 20 deletions(-) diff --git a/M3U8/scrapers/ppv.py b/M3U8/scrapers/ppv.py index 5a0ca62..d4ad414 100644 --- a/M3U8/scrapers/ppv.py +++ b/M3U8/scrapers/ppv.py @@ -108,7 +108,7 @@ async def scrape() -> None: if events: async with async_playwright() as p: - browser, context = await network.browser(p, browser="brave") + browser, context = await network.browser(p, browser="external") for i, ev in enumerate(events, start=1): handler = partial( diff --git a/M3U8/scrapers/sport9.py b/M3U8/scrapers/sport9.py index c8734f9..f3b7ab6 100644 --- a/M3U8/scrapers/sport9.py +++ b/M3U8/scrapers/sport9.py @@ -107,7 +107,7 @@ async def scrape() -> None: now = Time.clean(Time.now()).timestamp() async with async_playwright() as p: - browser, context = await network.browser(p, browser="brave") + browser, context = await network.browser(p, browser="external") for i, ev in enumerate(events, start=1): handler = partial( diff --git a/M3U8/scrapers/streamcenter.py b/M3U8/scrapers/streamcenter.py index d01e8c3..86719db 100644 --- a/M3U8/scrapers/streamcenter.py +++ b/M3U8/scrapers/streamcenter.py @@ -105,7 +105,7 @@ async def scrape() -> None: if events: async with async_playwright() as p: - browser, context = await network.browser(p, browser="brave") + browser, context = await network.browser(p, browser="external") for i, ev in enumerate(events, start=1): handler = partial( diff --git a/M3U8/scrapers/streamhub.py b/M3U8/scrapers/streamhub.py index 1cdc6d6..ad051ad 100644 --- a/M3U8/scrapers/streamhub.py +++ b/M3U8/scrapers/streamhub.py @@ -149,7 +149,7 @@ async def scrape() -> None: if events: async with async_playwright() as p: - browser, context = await network.browser(p, browser="brave") + browser, context = await network.browser(p, browser="external") for i, ev in enumerate(events, start=1): handler = partial( diff --git a/M3U8/scrapers/streamsgate.py b/M3U8/scrapers/streamsgate.py index d83861e..a6519ad 100644 --- a/M3U8/scrapers/streamsgate.py +++ b/M3U8/scrapers/streamsgate.py @@ -140,7 +140,7 @@ async def scrape() -> None: if events: async with async_playwright() as p: - browser, context = await network.browser(p, browser="brave") + browser, context = await network.browser(p, browser="external") for i, ev in enumerate(events, start=1): handler = partial( diff --git a/M3U8/scrapers/strmd.py b/M3U8/scrapers/strmd.py index c78b2d4..a882152 100644 --- a/M3U8/scrapers/strmd.py +++ b/M3U8/scrapers/strmd.py @@ -137,7 +137,7 @@ async def scrape() -> None: if events: async with async_playwright() as p: - browser, context = await network.browser(p, browser="brave") + browser, context = await network.browser(p, browser="external") for i, ev in enumerate(events, start=1): handler = partial( diff --git a/M3U8/scrapers/tvpass.py b/M3U8/scrapers/tvpass.py index 85c15f6..560734c 100644 --- a/M3U8/scrapers/tvpass.py +++ b/M3U8/scrapers/tvpass.py @@ -6,7 +6,7 @@ log = get_logger(__name__) urls: dict[str, dict[str, str | float]] = {} -TAG = "TVP" +TAG = "TVPASS" CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=86_400) diff --git a/M3U8/scrapers/utils/webwork.py b/M3U8/scrapers/utils/webwork.py index d3c00db..942e0ef 100644 --- a/M3U8/scrapers/utils/webwork.py +++ b/M3U8/scrapers/utils/webwork.py @@ -18,14 +18,14 @@ T = TypeVar("T") class Network: - proxy_base = "https://stream.nvrmind.xyz" - UA = ( "Mozilla/5.0 (Windows NT 10.0; Win64; x64) " "AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0" ) + proxy_base = "https://stream.nvrmind.xyz" + def __init__(self) -> None: self.client = httpx.AsyncClient( timeout=5, @@ -41,14 +41,12 @@ class Network: query: dict | None = None, ) -> str: - base = network.proxy_base - tag = tag.lower() return ( - f"{urljoin(base, f'{tag}/{path}')}?{urlencode(query)}" + f"{urljoin(network.proxy_base, f'{tag}/{path}')}?{urlencode(query)}" if query - else urljoin(base, f"{tag}/{path}") + else urljoin(network.proxy_base, f"{tag}/{path}") ) async def request( @@ -207,12 +205,9 @@ class Network: @staticmethod async def browser( - playwright: Playwright, - browser: str = "firefox", - ignore_https_errors: bool = False, + playwright: Playwright, browser: str = "internal" ) -> tuple[Browser, BrowserContext]: - - if browser == "brave": + if browser == "external": brwsr = await playwright.chromium.connect_over_cdp("http://localhost:9222") context = brwsr.contexts[0] @@ -222,7 +217,7 @@ class Network: context = await brwsr.new_context( user_agent=Network.UA, - ignore_https_errors=ignore_https_errors, + ignore_https_errors=False, viewport={"width": 1366, "height": 768}, device_scale_factor=1, locale="en-US", diff --git a/M3U8/scrapers/watchfooty.py b/M3U8/scrapers/watchfooty.py index 18aa7e7..5b0a3df 100644 --- a/M3U8/scrapers/watchfooty.py +++ b/M3U8/scrapers/watchfooty.py @@ -261,7 +261,7 @@ async def scrape() -> None: if events: async with async_playwright() as p: - browser, context = await network.browser(p, browser="brave") + browser, context = await network.browser(p, browser="external") for i, ev in enumerate(events, start=1): handler = partial(