This commit is contained in:
doms9 2025-10-09 20:18:37 -04:00
parent cc38eca60b
commit 00000d9383
4 changed files with 333 additions and 298 deletions

View file

@ -4,7 +4,7 @@ from pathlib import Path
from urllib.parse import urljoin
import httpx
from playwright.async_api import async_playwright
from playwright.async_api import BrowserContext, async_playwright
from .utils import Cache, Time, get_logger, leagues, network
@ -68,60 +68,60 @@ async def refresh_api_cache(
return r.json()
async def process_event(url: str, url_num: int) -> str | None:
async with async_playwright() as p:
browser, context = await network.browser(p)
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
page = await context.new_page()
captured: list[str] = []
captured: list[str] = []
got_one = asyncio.Event()
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
page.remove_listener("request", handler)
await page.close()
await browser.close()
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def get_events(
@ -199,32 +199,41 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)")
for i, ev in enumerate(events, start=1):
url = await network.safe_process(
lambda: process_event(ev["link"], url_num=i),
url_num=i,
log=log,
)
async with async_playwright() as p:
browser, context = await network.browser(p)
if url:
sport, event, logo, ts = (
ev["sport"],
ev["event"],
ev["logo"],
ev["timestamp"],
for i, ev in enumerate(events, start=1):
url = await network.safe_process(
lambda: process_event(
ev["link"],
url_num=i,
context=context,
),
url_num=i,
log=log,
)
key = f"[{sport}] {event} (PPV)"
if url:
sport, event, logo, ts = (
ev["sport"],
ev["event"],
ev["logo"],
ev["timestamp"],
)
entry = {
"url": url,
"logo": logo,
"base": base_url,
"timestamp": ts,
"id": get_tvg(sport, event) or "Live.Event.us",
}
key = f"[{sport}] {event} (PPV)"
urls[key] = cached_urls[key] = entry
entry = {
"url": url,
"logo": logo,
"base": base_url,
"timestamp": ts,
"id": get_tvg(sport, event) or "Live.Event.us",
}
urls[key] = cached_urls[key] = entry
await browser.close()
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")