iptv/M3U8/scrapers/ppv.py

169 lines
4.5 KiB
Python
Raw Normal View History

2025-12-08 13:21:43 -05:00
from functools import partial
from playwright.async_api import async_playwright
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
2025-12-13 16:57:14 -05:00
TAG = "PPV"
2025-12-16 02:30:44 -05:00
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
2025-12-08 13:21:43 -05:00
2025-12-16 02:30:44 -05:00
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=19_800)
2025-12-08 13:21:43 -05:00
API_MIRRORS = [
"https://old.ppv.to/api/streams",
"https://api.ppvs.su/api/streams",
"https://api.ppv.to/api/streams",
]
BASE_MIRRORS = [
"https://old.ppv.to",
"https://ppvs.su",
"https://ppv.to",
]
2025-12-18 03:04:11 -05:00
async def get_events(api_url: str, cached_keys: list[str]) -> list[dict[str, str]]:
events = []
2025-12-08 13:21:43 -05:00
if not (api_data := API_FILE.load(per_entry=False)):
2025-12-27 10:25:35 -05:00
log.info("Refreshing API cache")
2025-12-18 03:04:11 -05:00
api_data = {}
2025-12-08 13:21:43 -05:00
2025-12-18 03:04:11 -05:00
if r := await network.request(api_url, log=log):
api_data: dict = r.json()
2025-12-08 13:21:43 -05:00
2025-12-18 03:04:11 -05:00
API_FILE.write(api_data)
2025-12-08 13:21:43 -05:00
now = Time.clean(Time.now())
2025-12-18 04:14:54 -05:00
2025-12-08 13:21:43 -05:00
start_dt = now.delta(minutes=-30)
end_dt = now.delta(minutes=30)
for stream_group in api_data.get("streams", []):
sport = stream_group["category"]
if sport == "24/7 Streams":
continue
for event in stream_group.get("streams", []):
name = event.get("name")
2025-12-18 04:14:54 -05:00
2025-12-08 13:21:43 -05:00
start_ts = event.get("starts_at")
2025-12-18 04:14:54 -05:00
2025-12-08 13:21:43 -05:00
logo = event.get("poster")
2025-12-18 04:14:54 -05:00
2025-12-08 13:21:43 -05:00
iframe = event.get("iframe")
if not (name and start_ts and iframe):
continue
2025-12-18 03:04:11 -05:00
if f"[{sport}] {name} ({TAG})" in cached_keys:
2025-12-08 13:21:43 -05:00
continue
event_dt = Time.from_ts(start_ts)
if not start_dt <= event_dt <= end_dt:
continue
events.append(
{
"sport": sport,
"event": name,
"link": iframe,
"logo": logo,
"timestamp": event_dt.timestamp(),
}
)
return events
2025-12-18 03:04:11 -05:00
async def scrape() -> None:
2025-12-08 13:21:43 -05:00
cached_urls = CACHE_FILE.load()
2025-12-18 04:14:54 -05:00
2025-12-08 13:21:43 -05:00
cached_count = len(cached_urls)
2025-12-18 04:14:54 -05:00
2025-12-08 13:21:43 -05:00
urls.update(cached_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
base_url = await network.get_base(BASE_MIRRORS)
api_url = await network.get_base(API_MIRRORS)
if not (base_url and api_url):
log.warning("No working PPV mirrors")
2025-12-25 01:45:49 -05:00
2025-12-08 13:21:43 -05:00
CACHE_FILE.write(cached_urls)
2025-12-25 01:45:49 -05:00
2025-12-08 13:21:43 -05:00
return
2025-12-12 10:46:56 -05:00
log.info(f'Scraping from "{base_url}"')
2025-12-18 03:04:11 -05:00
events = await get_events(api_url, cached_urls.keys())
2025-12-08 13:21:43 -05:00
log.info(f"Processing {len(events)} new URL(s)")
if events:
async with async_playwright() as p:
2025-12-19 17:58:37 -05:00
browser, context = await network.browser(p, browser="external")
2025-12-08 13:21:43 -05:00
2025-12-23 13:28:56 -05:00
try:
for i, ev in enumerate(events, start=1):
handler = partial(
network.process_event,
url=ev["link"],
url_num=i,
context=context,
timeout=6,
log=log,
2025-12-08 13:21:43 -05:00
)
2025-12-23 13:28:56 -05:00
url = await network.safe_process(
handler,
url_num=i,
semaphore=network.PW_S,
log=log,
)
if url:
sport, event, logo, ts, link = (
ev["sport"],
ev["event"],
ev["logo"],
ev["timestamp"],
ev["link"],
)
key = f"[{sport}] {event} ({TAG})"
2025-12-08 13:21:43 -05:00
2025-12-23 13:28:56 -05:00
tvg_id, pic = leagues.get_tvg_info(sport, event)
2025-12-08 13:21:43 -05:00
2025-12-23 13:28:56 -05:00
entry = {
"url": url,
"logo": logo or pic,
"base": base_url,
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
"link": link,
}
2025-12-08 13:21:43 -05:00
2025-12-23 13:28:56 -05:00
urls[key] = cached_urls[key] = entry
2025-12-08 13:21:43 -05:00
2025-12-23 13:28:56 -05:00
finally:
await browser.close()
2025-12-08 13:21:43 -05:00
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
2025-12-18 04:14:54 -05:00
2025-12-08 13:21:43 -05:00
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)