This commit is contained in:
doms9 2025-10-30 15:38:34 -04:00
parent 566f5c2c80
commit 00000d980a
12 changed files with 337 additions and 547 deletions

View file

@ -1,10 +1,9 @@
import asyncio
from functools import partial
from pathlib import Path
from urllib.parse import urljoin
import httpx
from playwright.async_api import BrowserContext, async_playwright
from playwright.async_api import async_playwright
from .utils import Cache, Time, get_logger, leagues, network
@ -41,62 +40,6 @@ async def refresh_api_cache(
return r.json()
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=6)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def get_events(
client: httpx.AsyncClient,
base_url: str,
@ -186,10 +129,12 @@ async def scrape(client: httpx.AsyncClient) -> None:
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
network.process_event,
url=ev["link"],
url_num=i,
context=context,
timeout=6,
log=log,
)
url = await network.safe_process(

View file

@ -4,7 +4,7 @@ from pathlib import Path
from urllib.parse import urljoin
import httpx
from playwright.async_api import BrowserContext, async_playwright
from playwright.async_api import async_playwright
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
@ -25,62 +25,6 @@ CACHE_FILE = Cache(Path(__file__).parent / "caches" / "roxie.json", exp=10_800)
HTML_CACHE = Cache(Path(__file__).parent / "caches" / "roxie_html.json", exp=28_800)
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=15)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def refresh_html_cache(
client: httpx.AsyncClient,
url: str,
@ -196,10 +140,12 @@ async def scrape(client: httpx.AsyncClient) -> None:
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
network.process_event,
url=ev["link"],
url_num=i,
context=context,
timeout=15,
log=log,
)
url = await network.safe_process(

View file

@ -1,10 +1,9 @@
import asyncio
from functools import partial
from pathlib import Path
from urllib.parse import urljoin
import httpx
from playwright.async_api import BrowserContext, async_playwright
from playwright.async_api import async_playwright
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
@ -33,57 +32,6 @@ MIRRORS = [
]
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(url, wait_until="domcontentloaded", timeout=15_000)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def get_events(
client: httpx.AsyncClient,
url: str,
@ -176,14 +124,18 @@ async def scrape(client: httpx.AsyncClient) -> None:
if events:
async with async_playwright() as p:
browser, context = await network.browser(p, browser="brave")
try:
browser, context = await network.browser(p, browser="brave")
except Exception:
browser, context = await network.browser(p)
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
network.process_event,
url=ev["link"],
url_num=i,
context=context,
log=log,
)
url = await network.safe_process(
@ -195,11 +147,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
if url:
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
tvg_id, logo = leagues.info(sport)
if sport == "NBA" and leagues.is_valid(event, "WNBA"):
sport = "WNBA"
tvg_id, logo = leagues.info("WNBA")
tvg_id, logo = leagues.get_tvg_info(sport, event)
key = f"[{sport}] {event} (SEAST)"

View file

@ -1,10 +1,9 @@
import asyncio
import re
from functools import partial
from pathlib import Path
import httpx
from playwright.async_api import BrowserContext, async_playwright
from playwright.async_api import async_playwright
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
@ -20,58 +19,6 @@ HTML_CACHE = Cache(Path(__file__).parent / "caches" / "streamed_php.json", exp=2
BASE_URL = "https://streamed.site/webmaster.php"
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(url, wait_until="domcontentloaded", timeout=15_000)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def refresh_html_cache(
client: httpx.AsyncClient, url: str
) -> dict[str, str | float]:
@ -180,10 +127,11 @@ async def scrape(client: httpx.AsyncClient) -> None:
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
network.process_event,
url=ev["link"],
url_num=i,
context=context,
log=log,
)
url = await network.safe_process(

View file

@ -1,4 +1,3 @@
import asyncio
import re
from functools import partial
from pathlib import Path
@ -6,7 +5,7 @@ from typing import Any
from urllib.parse import urljoin
import httpx
from playwright.async_api import BrowserContext, async_playwright
from playwright.async_api import async_playwright
from .utils import Cache, Time, get_logger, leagues, network
@ -50,62 +49,6 @@ async def refresh_api_cache(
return data
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def get_events(
client: httpx.AsyncClient,
base_url: str,
@ -211,10 +154,11 @@ async def scrape(client: httpx.AsyncClient) -> None:
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
network.process_event,
url=ev["link"],
url_num=i,
context=context,
log=log,
)
url = await network.safe_process(

View file

@ -1,10 +1,9 @@
import asyncio
from functools import partial
from pathlib import Path
from urllib.parse import urljoin
import httpx
from playwright.async_api import BrowserContext, async_playwright
from playwright.async_api import async_playwright
from .utils import Cache, Time, get_logger, leagues, network
@ -39,62 +38,6 @@ async def refresh_api_cache(
return data
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=6)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def get_events(
client: httpx.AsyncClient,
url: str,
@ -172,10 +115,12 @@ async def scrape(client: httpx.AsyncClient) -> None:
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
network.process_event,
url=ev["link"],
url_num=i,
context=context,
timeout=6,
log=log,
)
url = await network.safe_process(

View file

@ -151,7 +151,7 @@ class Leagues:
else self.info("NCAA")
)
case "Basketball":
case "Basketball" | "NBA":
if self.is_valid(event, "NBA"):
return self.info("NBA")

View file

@ -1,186 +1,250 @@
import asyncio
import logging
import re
from collections.abc import Callable
from typing import Any
import httpx
from playwright.async_api import Browser, BrowserContext, Playwright, Request
from .logger import get_logger
class Network:
UA = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
)
def __init__(self) -> None:
self.client = httpx.AsyncClient(
timeout=5,
follow_redirects=True,
headers={"User-Agent": Network.UA},
)
self._logger = get_logger("network")
async def check_status(self, url: str) -> bool:
try:
r = await self.client.get(url)
r.raise_for_status()
return r.status_code == 200
except (httpx.HTTPError, httpx.TimeoutException) as e:
self._logger.debug(f"Status check failed for {url}: {e}")
return False
async def get_base(self, mirrors: list[str]) -> str | None:
tasks = [self.check_status(link) for link in mirrors]
results = await asyncio.gather(*tasks, return_exceptions=True)
working_mirrors = [
mirror for mirror, success in zip(mirrors, results) if success
]
return working_mirrors[0] if working_mirrors else None
@staticmethod
async def safe_process(
fn: Callable,
url_num: int,
timeout: int | float = 15,
log: logging.Logger | None = None,
) -> Any | None:
if not log:
log = logging.getLogger(__name__)
task = asyncio.create_task(fn())
try:
return await asyncio.wait_for(task, timeout=timeout)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out after {timeout}s, skipping event")
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
except Exception as e:
log.debug(f"URL {url_num}) Ignore exception after timeout: {e}")
return None
except Exception as e:
log.error(f"URL {url_num}) Unexpected error: {e}")
return None
@staticmethod
def capture_req(
req: Request,
captured: list[str],
got_one: asyncio.Event,
patterns: list[str] | None = None,
) -> None:
if not patterns:
patterns = ["amazonaws", "knitcdn"]
pattern = re.compile(
rf"^(?!.*({'|'.join(patterns)})).*\.m3u8",
re.IGNORECASE,
)
if pattern.search(req.url):
captured.append(req.url)
got_one.set()
@staticmethod
async def browser(
playwright: Playwright,
browser: str = "firefox",
ignore_https_errors: bool = False,
) -> tuple[Browser, BrowserContext]:
if browser == "brave":
brwsr = await playwright.chromium.connect_over_cdp("http://localhost:9222")
context = brwsr.contexts[0]
else:
brwsr = await playwright.firefox.launch(headless=True)
context = await brwsr.new_context(
user_agent=Network.UA,
ignore_https_errors=ignore_https_errors,
viewport={"width": 1366, "height": 768},
device_scale_factor=1,
locale="en-US",
timezone_id="America/New_York",
color_scheme="dark",
permissions=["geolocation"],
extra_http_headers={
"Accept-Language": "en-US,en;q=0.9",
"Upgrade-Insecure-Requests": "1",
},
)
await context.add_init_script(
"""
Object.defineProperty(navigator, "webdriver", { get: () => undefined });
Object.defineProperty(navigator, "languages", {
get: () => ["en-US", "en"],
});
Object.defineProperty(navigator, "plugins", {
get: () => [1, 2, 3, 4],
});
const elementDescriptor = Object.getOwnPropertyDescriptor(
HTMLElement.prototype,
"offsetHeight"
);
Object.defineProperty(HTMLDivElement.prototype, "offsetHeight", {
...elementDescriptor,
get: function () {
if (this.id === "modernizr") {
return 24;
}
return elementDescriptor.get.apply(this);
},
});
Object.defineProperty(window.screen, "width", { get: () => 1366 });
Object.defineProperty(window.screen, "height", { get: () => 768 });
const getParameter = WebGLRenderingContext.prototype.getParameter;
WebGLRenderingContext.prototype.getParameter = function (param) {
if (param === 37445) return "Intel Inc."; // UNMASKED_VENDOR_WEBGL
if (param === 37446) return "Intel Iris OpenGL Engine"; // UNMASKED_RENDERER_WEBGL
return getParameter.apply(this, [param]);
};
const observer = new MutationObserver((mutations) => {
mutations.forEach((mutation) => {
mutation.addedNodes.forEach((node) => {
if (node.tagName === "IFRAME" && node.hasAttribute("sandbox")) {
node.removeAttribute("sandbox");
}
});
});
});
observer.observe(document.documentElement, { childList: true, subtree: true });
"""
)
return brwsr, context
network = Network()
__all__ = ["network"]
import asyncio
import logging
import re
from collections.abc import Callable
from functools import partial
from typing import Any
import httpx
from playwright.async_api import Browser, BrowserContext, Playwright, Request
from .logger import get_logger
class Network:
UA = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
)
def __init__(self) -> None:
self.client = httpx.AsyncClient(
timeout=5,
follow_redirects=True,
headers={"User-Agent": Network.UA},
http2=True,
)
self._logger = get_logger("network")
async def check_status(self, url: str) -> bool:
try:
r = await self.client.get(url)
r.raise_for_status()
return r.status_code == 200
except (httpx.HTTPError, httpx.TimeoutException) as e:
self._logger.debug(f"Status check failed for {url}: {e}")
return False
async def get_base(self, mirrors: list[str]) -> str | None:
tasks = [self.check_status(link) for link in mirrors]
results = await asyncio.gather(*tasks, return_exceptions=True)
working_mirrors = [
mirror for mirror, success in zip(mirrors, results) if success
]
return working_mirrors[0] if working_mirrors else None
@staticmethod
async def safe_process(
fn: Callable,
url_num: int,
timeout: int | float = 15,
log: logging.Logger | None = None,
) -> Any | None:
if not log:
log = logging.getLogger(__name__)
task = asyncio.create_task(fn())
try:
return await asyncio.wait_for(task, timeout=timeout)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out after {timeout}s, skipping event")
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
except Exception as e:
log.debug(f"URL {url_num}) Ignore exception after timeout: {e}")
return None
except Exception as e:
log.error(f"URL {url_num}) Unexpected error: {e}")
return None
@staticmethod
def capture_req(
req: Request,
captured: list[str],
got_one: asyncio.Event,
) -> None:
invalids = ["amazonaws", "knitcdn"]
escaped = [re.escape(i) for i in invalids]
pattern = re.compile(
rf"^(?!.*({'|'.join(escaped)})).*\.m3u8",
re.IGNORECASE,
)
if pattern.search(req.url):
captured.append(req.url)
got_one.set()
async def process_event(
self,
url: str,
url_num: int,
context: BrowserContext,
timeout: int | float = 10,
log: logging.Logger | None = None,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(
self.capture_req,
captured=captured,
got_one=got_one,
)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=timeout)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[0]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
@staticmethod
async def browser(
playwright: Playwright,
browser: str = "firefox",
ignore_https_errors: bool = False,
) -> tuple[Browser, BrowserContext]:
if browser == "brave":
brwsr = await playwright.chromium.connect_over_cdp("http://localhost:9222")
context = brwsr.contexts[0]
else:
brwsr = await playwright.firefox.launch(headless=True)
context = await brwsr.new_context(
user_agent=Network.UA,
ignore_https_errors=ignore_https_errors,
viewport={"width": 1366, "height": 768},
device_scale_factor=1,
locale="en-US",
timezone_id="America/New_York",
color_scheme="dark",
permissions=["geolocation"],
extra_http_headers={
"Accept-Language": "en-US,en;q=0.9",
"Upgrade-Insecure-Requests": "1",
},
)
await context.add_init_script(
"""
Object.defineProperty(navigator, "webdriver", { get: () => undefined });
Object.defineProperty(navigator, "languages", {
get: () => ["en-US", "en"],
});
Object.defineProperty(navigator, "plugins", {
get: () => [1, 2, 3, 4],
});
const elementDescriptor = Object.getOwnPropertyDescriptor(
HTMLElement.prototype,
"offsetHeight"
);
Object.defineProperty(HTMLDivElement.prototype, "offsetHeight", {
...elementDescriptor,
get: function () {
if (this.id === "modernizr") {
return 24;
}
return elementDescriptor.get.apply(this);
},
});
Object.defineProperty(window.screen, "width", { get: () => 1366 });
Object.defineProperty(window.screen, "height", { get: () => 768 });
const getParameter = WebGLRenderingContext.prototype.getParameter;
WebGLRenderingContext.prototype.getParameter = function (param) {
if (param === 37445) return "Intel Inc."; // UNMASKED_VENDOR_WEBGL
if (param === 37446) return "Intel Iris OpenGL Engine"; // UNMASKED_RENDERER_WEBGL
return getParameter.apply(this, [param]);
};
const observer = new MutationObserver((mutations) => {
mutations.forEach((mutation) => {
mutation.addedNodes.forEach((node) => {
if (node.tagName === "IFRAME" && node.hasAttribute("sandbox")) {
node.removeAttribute("sandbox");
}
});
});
});
observer.observe(document.documentElement, { childList: true, subtree: true });
"""
)
return brwsr, context
network = Network()
__all__ = ["network"]