Compare commits

..

No commits in common. "f8603e08d5232d11e5b260c4683980ef0cc3cd02" and "13e9afae400e8e1a24bdd7fe70c2d8c39ad0807c" have entirely different histories.

14 changed files with 90820 additions and 90154 deletions

175530
EPG/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -62,7 +62,7 @@ async def main() -> None:
asyncio.create_task(streamsgate.scrape()), asyncio.create_task(streamsgate.scrape()),
asyncio.create_task(strmd.scrape()), asyncio.create_task(strmd.scrape()),
asyncio.create_task(tvpass.scrape()), asyncio.create_task(tvpass.scrape()),
# asyncio.create_task(watchfooty.scrape()), asyncio.create_task(watchfooty.scrape()),
asyncio.create_task(webcast.scrape()), asyncio.create_task(webcast.scrape()),
] ]

View file

@ -108,7 +108,7 @@ async def scrape() -> None:
if events: if events:
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p, browser="external") browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(

View file

@ -107,7 +107,7 @@ async def scrape() -> None:
now = Time.clean(Time.now()).timestamp() now = Time.clean(Time.now()).timestamp()
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p, browser="external") browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(

View file

@ -105,7 +105,7 @@ async def scrape() -> None:
if events: if events:
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p, browser="external") browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(

View file

@ -149,7 +149,7 @@ async def scrape() -> None:
if events: if events:
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p, browser="external") browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(

View file

@ -140,7 +140,7 @@ async def scrape() -> None:
if events: if events:
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p, browser="external") browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(

View file

@ -137,7 +137,7 @@ async def scrape() -> None:
if events: if events:
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p, browser="external") browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(

View file

@ -6,7 +6,7 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {} urls: dict[str, dict[str, str | float]] = {}
TAG = "TVPASS" TAG = "TVP"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=86_400) CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=86_400)

View file

@ -18,14 +18,14 @@ T = TypeVar("T")
class Network: class Network:
proxy_base = "https://stream.nvrmind.xyz"
UA = ( UA = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) " "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) " "AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0" "Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
) )
proxy_base = "https://stream.nvrmind.xyz"
def __init__(self) -> None: def __init__(self) -> None:
self.client = httpx.AsyncClient( self.client = httpx.AsyncClient(
timeout=5, timeout=5,
@ -41,12 +41,14 @@ class Network:
query: dict | None = None, query: dict | None = None,
) -> str: ) -> str:
base = network.proxy_base
tag = tag.lower() tag = tag.lower()
return ( return (
f"{urljoin(network.proxy_base, f'{tag}/{path}')}?{urlencode(query)}" f"{urljoin(base, f'{tag}/{path}')}?{urlencode(query)}"
if query if query
else urljoin(network.proxy_base, f"{tag}/{path}") else urljoin(base, f"{tag}/{path}")
) )
async def request( async def request(
@ -205,9 +207,12 @@ class Network:
@staticmethod @staticmethod
async def browser( async def browser(
playwright: Playwright, browser: str = "internal" playwright: Playwright,
browser: str = "firefox",
ignore_https_errors: bool = False,
) -> tuple[Browser, BrowserContext]: ) -> tuple[Browser, BrowserContext]:
if browser == "external":
if browser == "brave":
brwsr = await playwright.chromium.connect_over_cdp("http://localhost:9222") brwsr = await playwright.chromium.connect_over_cdp("http://localhost:9222")
context = brwsr.contexts[0] context = brwsr.contexts[0]
@ -217,7 +222,7 @@ class Network:
context = await brwsr.new_context( context = await brwsr.new_context(
user_agent=Network.UA, user_agent=Network.UA,
ignore_https_errors=False, ignore_https_errors=ignore_https_errors,
viewport={"width": 1366, "height": 768}, viewport={"width": 1366, "height": 768},
device_scale_factor=1, device_scale_factor=1,
locale="en-US", locale="en-US",

View file

@ -261,7 +261,7 @@ async def scrape() -> None:
if events: if events:
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p, browser="external") browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(

View file

@ -1,10 +1,7 @@
## Base Log @ 2025-12-20 20:39 UTC ## Base Log @ 2025-12-19 20:41 UTC
### ✅ Working Streams: 145<br>❌ Dead Streams: 1 ### ✅ Working Streams: 146<br>❌ Dead Streams: 0
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| FDSN Florida | HTTP Error (403) | `http://cord-cutter.net:8080/30550113/30550113/46794` |
--- ---
#### Base Channels URL #### Base Channels URL
``` ```