From 00000d92e48b2794c9b5eff03687f7ddf9103ec7 Mon Sep 17 00:00:00 2001 From: doms9 <96013514+doms9@users.noreply.github.com> Date: Tue, 14 Apr 2026 20:55:38 -0400 Subject: [PATCH] e --- M3U8/scrapers/webcast.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/M3U8/scrapers/webcast.py b/M3U8/scrapers/webcast.py index 3e3de884..95d6f63f 100644 --- a/M3U8/scrapers/webcast.py +++ b/M3U8/scrapers/webcast.py @@ -2,6 +2,7 @@ import ast import asyncio import re from functools import partial +from urllib.parse import urljoin from selectolax.parser import HTMLParser @@ -26,7 +27,12 @@ def fix_event(s: str) -> str: return " vs ".join(s.split("@")) -async def process_event(url: str, url_num: int) -> str | None: +async def process_event( + url: str, + url_num: int, + sport: str, +) -> str | None: + if not (event_data := await network.request(url, log=log)): log.warning(f"URL {url_num}) Failed to load url.") return @@ -67,7 +73,7 @@ async def process_event(url: str, url_num: int) -> str | None: if not ( api_data := await network.request( - "https://mlbwebcast.com/stream/check_stream.php", + urljoin(BASE_URLS[sport], "stream/check_stream.php"), headers={"Referer": iframe_src}, params=params, log=log, @@ -153,6 +159,7 @@ async def scrape() -> None: process_event, url=(link := ev["link"]), url_num=i, + sport=(sport := ev["sport"]), ) url = await network.safe_process( @@ -162,7 +169,7 @@ async def scrape() -> None: log=log, ) - sport, event = ev["sport"], ev["event"] + event = ev["event"] key = f"[{sport}] {event} ({TAG})"