This commit is contained in:
doms9 2025-09-03 15:00:17 -04:00
parent dece752803
commit 00000d9440
12 changed files with 371 additions and 133 deletions

View file

@ -5,20 +5,16 @@ import ssl
import xml.etree.ElementTree as ET
from datetime import datetime, timedelta
from pathlib import Path
from typing import Any
import httpx
from playwright.async_api import Request, async_playwright
from .tvpass import TZ, logos
from .utils.logger import get_logger
from .utils import LOGOS, TZ, get_logger, safe_process_event
log = get_logger(__name__)
urls: dict[str, dict[str, str]] = {}
tvp_sports = set(logos.keys())
BASE_URL = "https://cdn.livetv861.me/rss/upcoming_en.xml"
CERT_BUNDL_URLS = [
@ -32,23 +28,7 @@ CERT_FILE = Path(__file__).parent / "utils" / "cached-ca.pem"
CACHE_FILE = Path(__file__).parent / "caches" / "livetvsx.json"
async def safe_process_event(fn, url_num: int, timeout=20) -> Any | None:
task = asyncio.create_task(fn())
try:
return await asyncio.wait_for(task, timeout=timeout)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out after {timeout}s, skipping event")
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
except Exception as e:
log.debug(f"URL {url_num}) Ignore exception after timeout: {e}")
exist_sprts = set(LOGOS.keys())
async def write_to_cert(client: httpx.AsyncClient, url: str, cert: Path) -> None:
@ -86,15 +66,13 @@ async def get_cert(client: httpx.AsyncClient) -> ssl.SSLContext:
def load_cache() -> dict[str, dict[str, str | str]]:
try:
data = json.loads(CACHE_FILE.read_text(encoding="utf-8"))
data: dict = json.loads(CACHE_FILE.read_text(encoding="utf-8"))
now = datetime.now(TZ).timestamp()
return {
k: v
for k, v in data.items()
if now - v.get("timestamp", 0) < timedelta(hours=4).total_seconds()
}
age: float = now - data.get("timestamp", 0)
return {k: v for k, v in data.items() if age < 14400} # 4 hours
except (FileNotFoundError, json.JSONDecodeError):
return {}
@ -163,15 +141,17 @@ async def parse_feed(
elem.clear()
continue
elif not tvp_sports & {sport, event}:
events.append(
{
"sport": sport,
"event": event,
"title": title,
"link": link,
}
)
if not exist_sprts & {sport, event}:
continue
events.append(
{
"sport": sport,
"event": event,
"title": title,
"link": link,
}
)
elem.clear()
@ -288,7 +268,7 @@ async def process_event(url: str, url_num: int) -> str | None:
log.warning(f"URL {url_num}) No m3u8 captured in popup or inline playback.")
return
except Exception as e:
except Exception:
try:
ev_page.remove_listener("request", capture_req)
@ -310,10 +290,9 @@ async def main(client: httpx.AsyncClient) -> None:
cert = await get_cert(client)
cached_urls = load_cache()
cached_keys = set(cached_urls.keys())
cached_count = len(cached_urls)
events = await parse_feed(BASE_URL, cert, cached_keys)
events = await parse_feed(BASE_URL, cert, set(cached_urls.keys()))
log.info(f"Processing {len(events)} URLs")
@ -328,13 +307,15 @@ async def main(client: httpx.AsyncClient) -> None:
key = f"[{sport}: {event}] {title}"
url = await safe_process_event(
lambda: process_event(link, url_num=num), url_num=num
lambda: process_event(link, url_num=num),
url_num=num,
log=log,
)
if url:
entry = {
"url": url,
"logo": logos.get(
"logo": LOGOS.get(
sport,
"https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png",
),
@ -349,4 +330,4 @@ async def main(client: httpx.AsyncClient) -> None:
log.info(f"Cached {cached_count} event(s)")
log.info(f"Collected {new_count} new event(s)")
log.info(f"Collected {new_count} event(s)")