diff --git a/M3U8/fetch.py b/M3U8/fetch.py index 9846300..6327601 100644 --- a/M3U8/fetch.py +++ b/M3U8/fetch.py @@ -77,7 +77,7 @@ async def main() -> None: await asyncio.gather(*tasks) - #await watchfooty.scrape() + await watchfooty.scrape() additions = ( cdnlivetv.urls diff --git a/M3U8/scrapers/totalsportek.py b/M3U8/scrapers/totalsportek.py index 65eb325..2a92dd7 100644 --- a/M3U8/scrapers/totalsportek.py +++ b/M3U8/scrapers/totalsportek.py @@ -14,7 +14,7 @@ TAG = "TOTALSPRTK" CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=28_800) -BASE_URL = "https://live2.totalsportek777.com/" +BASE_URL = "https://live.totalsportek777.com/" def fix_league(s: str) -> str: @@ -29,7 +29,7 @@ async def process_event(url: str, url_num: int) -> tuple[str | None, str | None] soup = HTMLParser(html_data.content) - if not (iframe := soup.css_first(".box iframe")): + if not (iframe := soup.css_first("iframe")): log.warning(f"URL {url_num}) No iframe element found.") return None, None @@ -53,7 +53,7 @@ async def process_event(url: str, url_num: int) -> tuple[str | None, str | None] log.info(f"URL {url_num}) Captured M3U8") - return match[2], iframe_src + return bytes.fromhex(match[2]).decode("utf-8"), iframe_src async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: