diff --git a/M3U8/fetch.py b/M3U8/fetch.py index d90abf73..6a30b287 100644 --- a/M3U8/fetch.py +++ b/M3U8/fetch.py @@ -67,7 +67,6 @@ async def main() -> None: # asyncio.create_task(fsports.scrape(xtrnl_brwsr)), asyncio.create_task(ppv.scrape(xtrnl_brwsr)), asyncio.create_task(roxie.scrape(hdl_brwsr)), - asyncio.create_task(webcast.scrape(hdl_brwsr)), ] httpx_tasks = [ @@ -84,6 +83,7 @@ async def main() -> None: asyncio.create_task(streamtpnew.scrape()), asyncio.create_task(totalsportek.scrape()), asyncio.create_task(tvapp.scrape()), + asyncio.create_task(webcast.scrape()), ] await asyncio.gather(*(pw_tasks + httpx_tasks)) diff --git a/M3U8/scrapers/webcast.py b/M3U8/scrapers/webcast.py index 6dec453c..3e3de884 100644 --- a/M3U8/scrapers/webcast.py +++ b/M3U8/scrapers/webcast.py @@ -1,7 +1,8 @@ +import ast import asyncio +import re from functools import partial -from playwright.async_api import Browser from selectolax.parser import HTMLParser from .utils import Cache, Time, get_logger, leagues, network @@ -12,7 +13,7 @@ urls: dict[str, dict[str, str | float]] = {} TAG = "WEBCAST" -CACHE_FILE = Cache(TAG, exp=19_800) +CACHE_FILE = Cache(TAG, exp=12_600) BASE_URLS = { "MLB": "https://mlbwebcast.com", @@ -25,6 +26,65 @@ def fix_event(s: str) -> str: return " vs ".join(s.split("@")) +async def process_event(url: str, url_num: int) -> str | None: + if not (event_data := await network.request(url, log=log)): + log.warning(f"URL {url_num}) Failed to load url.") + return + + soup = HTMLParser(event_data.content) + + if not (iframe := soup.css_first('iframe[name="srcFrame"]')): + log.warning(f"URL {url_num}) No iframe element found.") + return + + if not (iframe_src := iframe.attributes.get("src")): + log.warning(f"URL {url_num}) No iframe source found.") + return + + if not ( + iframe_src_data := await network.request( + iframe_src, + headers={"Referer": url}, + log=log, + ) + ): + log.warning(f"URL {url_num}) Failed to load iframe source.") + return + + pattern = re.compile(r'var\s+\w*=\[([^"]*)\];', re.I) + + if not (match := pattern.search(iframe_src_data.text)): + log.warning(f"URL {url_num}) No Clappr source found.") + return + + try: + ev_id, ev_ts, ev_pt = ast.literal_eval(match[1]) + except ValueError: + log.warning(f"URL {url_num}) Failed to parse event info.") + return + + params: dict[str, int | str] = dict(zip(["id", "ts", "pt"], [ev_id, ev_ts, ev_pt])) + + if not ( + api_data := await network.request( + "https://mlbwebcast.com/stream/check_stream.php", + headers={"Referer": iframe_src}, + params=params, + log=log, + ) + ): + log.warning(f"URL {url_num}) Failed to make php request.") + return + + elif (data := api_data.json()).get("error"): + log.warning(f"URL {url_num}) Failed to make php request.") + return + + log.info(f"URL {url_num}) Captured M3U8") + + return data.get("url") + + async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: tasks = [network.request(url, log=log) for url in BASE_URLS.values()] @@ -70,7 +130,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: return events -async def scrape(browser: Browser) -> None: +async def scrape() -> None: cached_urls = CACHE_FILE.load() valid_urls = {k: v for k, v in cached_urls.items() if v["url"]} @@ -88,45 +148,41 @@ async def scrape(browser: Browser) -> None: now = Time.clean(Time.now()) - async with network.event_context(browser) as context: - for i, ev in enumerate(events, start=1): - async with network.event_page(context) as page: - handler = partial( - network.process_event, - url=(link := ev["link"]), - url_num=i, - page=page, - log=log, - ) + for i, ev in enumerate(events, start=1): + handler = partial( + process_event, + url=(link := ev["link"]), + url_num=i, + ) - url = await network.safe_process( - handler, - url_num=i, - semaphore=network.PW_S, - log=log, - ) + url = await network.safe_process( + handler, + url_num=i, + semaphore=network.PW_S, + log=log, + ) - sport, event = ev["sport"], ev["event"] + sport, event = ev["sport"], ev["event"] - key = f"[{sport}] {event} ({TAG})" + key = f"[{sport}] {event} ({TAG})" - tvg_id, logo = leagues.get_tvg_info(sport, event) + tvg_id, logo = leagues.get_tvg_info(sport, event) - entry = { - "url": url, - "logo": logo, - "base": BASE_URLS[sport], - "timestamp": now.timestamp(), - "id": tvg_id or "Live.Event.us", - "link": link, - } + entry = { + "url": url, + "logo": logo, + "base": BASE_URLS[sport], + "timestamp": now.timestamp(), + "id": tvg_id or "Live.Event.us", + "link": link, + } - cached_urls[key] = entry + cached_urls[key] = entry - if url: - valid_count += 1 + if url: + valid_count += 1 - urls[key] = entry + urls[key] = entry log.info(f"Collected and cached {valid_count - cached_count} new event(s)")