iptv/M3U8/scrapers/utils/config.py

165 lines
4.1 KiB
Python
Raw Normal View History

2025-09-03 15:00:17 -04:00
import asyncio
2025-09-04 19:53:27 -04:00
import json
2025-09-03 15:00:17 -04:00
import logging
2025-09-04 19:53:27 -04:00
import re
2025-09-08 12:02:36 -04:00
from collections.abc import Callable
2025-09-04 09:59:19 -04:00
from datetime import datetime
2025-09-03 15:00:17 -04:00
from pathlib import Path
from typing import Any
import httpx
import pytz
2025-09-04 19:53:27 -04:00
from playwright.async_api import Request
2025-09-03 15:00:17 -04:00
TZ = pytz.timezone("America/New_York")
2025-09-04 09:59:19 -04:00
now = datetime.now(TZ)
2025-09-03 15:00:17 -04:00
LOGOS = {
"MLB": "https://i.gyazo.com/0fe7865ef2f06c9507791b24f04dbca8.png",
"NBA": "https://i.gyazo.com/773c23570f095a5d549c23b9401d83f4.png",
"NCAAF": "https://i.gyazo.com/ca63b40c86e757436de9d34d369b24f8.png",
"NCAAB": "https://i.gyazo.com/ca63b40c86e757436de9d34d369b24f8.png",
"NFL": "https://i.gyazo.com/fb4956d7a2fe54a1bac54cd81e1b3f11.png",
"NHL": "https://i.gyazo.com/526607d4e886d5ed1fecca4bff3115e2.png",
"WNBA": "https://i.gyazo.com/02d665a5704118d195dbcd5fa20d5462.png",
}
LOG_FMT = (
"[%(asctime)s] "
"%(levelname)-8s "
"[%(name)s] "
"%(message)-70s "
"(%(filename)s:%(lineno)d)"
)
COLORS = {
"DEBUG": "\033[37m",
"INFO": "\033[32m",
"WARNING": "\033[33m",
"ERROR": "\033[31m",
"CRITICAL": "\033[41m",
"reset": "\033[0m",
}
class ColorFormatter(logging.Formatter):
def format(self, record) -> str:
color = COLORS.get(record.levelname, "")
levelname = record.levelname
record.levelname = f"{color}{levelname}{COLORS['reset']}"
formatted = super().format(record)
record.levelname = levelname
return formatted
def get_logger(name: str | None = None) -> logging.Logger:
if not name:
name = Path(__file__).stem
logger = logging.getLogger(name)
if not logger.hasHandlers():
handler = logging.StreamHandler()
formatter = ColorFormatter(LOG_FMT, datefmt="%Y-%m-%d | %H:%M:%S")
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
return logger
2025-09-06 16:16:36 -04:00
def near_hr(dt: datetime) -> float:
return dt.replace(minute=0, second=0, microsecond=0).timestamp()
2025-09-09 13:34:16 -04:00
def is_fresh(
entry: dict,
nearest_hr: bool,
exp: int,
) -> bool:
ts = entry.get("timestamp", 31496400)
if nearest_hr:
ts = near_hr(datetime.fromtimestamp(ts))
return now.timestamp() - ts < exp
2025-09-05 10:37:22 -04:00
def load_cache(
2025-09-04 19:53:27 -04:00
file: Path,
2025-09-09 13:34:16 -04:00
exp: int | float,
2025-09-06 16:16:36 -04:00
nearest_hr: bool = False,
2025-09-09 13:34:16 -04:00
per_entry: bool = True,
2025-09-04 19:53:27 -04:00
) -> dict[str, dict[str, str | float]]:
try:
2025-09-09 13:34:16 -04:00
data: dict = json.loads(file.read_text(encoding="utf-8"))
2025-09-04 19:53:27 -04:00
except (FileNotFoundError, json.JSONDecodeError):
return {}
2025-09-09 13:34:16 -04:00
if per_entry:
return {k: v for k, v in data.items() if is_fresh(v, nearest_hr, exp)}
ts = data.get("timestamp", 31496400)
if nearest_hr:
ts = near_hr(datetime.fromtimestamp(ts))
return data if now.timestamp() - ts < exp else {}
2025-09-04 19:53:27 -04:00
2025-09-03 15:00:17 -04:00
async def safe_process_event(
2025-09-08 12:02:36 -04:00
fn: Callable,
2025-09-03 15:00:17 -04:00
url_num: int,
2025-09-08 12:02:36 -04:00
timeout: int | float = 20,
2025-09-03 15:00:17 -04:00
log: logging.Logger | None = None,
) -> Any | None:
if not log:
log = logging.getLogger(__name__)
task = asyncio.create_task(fn())
try:
return await asyncio.wait_for(task, timeout=timeout)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out after {timeout}s, skipping event")
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
except Exception as e:
log.debug(f"URL {url_num}) Ignore exception after timeout: {e}")
async def check_status(client: httpx.AsyncClient, url: str) -> bool:
try:
r = await client.get(url)
r.raise_for_status()
except Exception:
return False
return r.status_code == 200
async def get_base(client: httpx.AsyncClient, mirrors: list[str]) -> str:
tasks = [check_status(client, link) for link in mirrors]
results = await asyncio.gather(*tasks)
return [url for url, ok in zip(mirrors, results) if ok][0]
2025-09-04 19:53:27 -04:00
def capture_req(
req: Request,
captured: list[str],
got_one: asyncio.Event,
) -> None:
valid_m3u8 = re.compile(r"^(?!.*(amazonaws|knitcdn)).*\.m3u8")
if valid_m3u8.search(req.url):
captured.append(req.url)
got_one.set()