From 00000d9a6c6503d0da3188497f2f0652c1402c72 Mon Sep 17 00:00:00 2001 From: doms9 <96013514+doms9@users.noreply.github.com> Date: Wed, 11 Feb 2026 20:29:22 -0500 Subject: [PATCH] e edit scraping method for roxie.py --- M3U8/fetch.py | 2 +- M3U8/scrapers/roxie.py | 100 ++++++++++++++++++++++------------------- 2 files changed, 54 insertions(+), 48 deletions(-) diff --git a/M3U8/fetch.py b/M3U8/fetch.py index ed7b4f44..d53f6d96 100644 --- a/M3U8/fetch.py +++ b/M3U8/fetch.py @@ -65,7 +65,6 @@ async def main() -> None: asyncio.create_task(embedhd.scrape(hdl_brwsr)), asyncio.create_task(pixel.scrape(hdl_brwsr)), asyncio.create_task(ppv.scrape(xtrnl_brwsr)), - asyncio.create_task(roxie.scrape(hdl_brwsr)), asyncio.create_task(sport9.scrape(xtrnl_brwsr)), asyncio.create_task(streamcenter.scrape(xtrnl_brwsr)), # asyncio.create_task(streamhub.scrape(xtrnl_brwsr)), @@ -79,6 +78,7 @@ async def main() -> None: asyncio.create_task(fawa.scrape()), asyncio.create_task(istreameast.scrape()), asyncio.create_task(pawa.scrape()), + asyncio.create_task(roxie.scrape()), asyncio.create_task(shark.scrape()), asyncio.create_task(streambtw.scrape()), asyncio.create_task(xstreameast.scrape()), diff --git a/M3U8/scrapers/roxie.py b/M3U8/scrapers/roxie.py index 21aeecf8..0808f56a 100644 --- a/M3U8/scrapers/roxie.py +++ b/M3U8/scrapers/roxie.py @@ -1,8 +1,8 @@ import asyncio +import re from functools import partial from urllib.parse import urljoin -from playwright.async_api import Browser from selectolax.parser import HTMLParser from .utils import Cache, Time, get_logger, leagues, network @@ -30,6 +30,22 @@ SPORT_ENDPOINTS = { } +async def process_event(url: str, url_num: int) -> str | None: + if not (html_data := await network.request(url, log=log)): + return + + valid_m3u8 = re.compile(r"'clappr',\s+'([^\"]*)'", re.IGNORECASE) + + if not (match := valid_m3u8.search(html_data.text)): + log.info(f"URL {url_num}) No M3U8 found") + + return + + log.info(f"URL {url_num}) Captured M3U8") + + return match[1] + + async def refresh_html_cache( url: str, sport: str, @@ -99,8 +115,8 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: live = [] - start_ts = now.delta(hours=-1).timestamp() - end_ts = now.delta(minutes=5).timestamp() + start_ts = now.delta(minutes=-30).timestamp() + end_ts = now.delta(minutes=30).timestamp() for k, v in events.items(): if k in cached_keys: @@ -114,14 +130,12 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: return live -async def scrape(browser: Browser) -> None: +async def scrape() -> None: cached_urls = CACHE_FILE.load() - valid_urls = {k: v for k, v in cached_urls.items() if v["url"]} + cached_count = len(cached_urls) - valid_count = cached_count = len(valid_urls) - - urls.update(valid_urls) + urls.update(cached_urls) log.info(f"Loaded {cached_count} event(s) from cache") @@ -132,52 +146,44 @@ async def scrape(browser: Browser) -> None: log.info(f"Processing {len(events)} new URL(s)") if events: - async with network.event_context(browser) as context: - for i, ev in enumerate(events, start=1): - async with network.event_page(context) as page: - handler = partial( - network.process_event, - url=ev["link"], - url_num=i, - page=page, - log=log, - ) + for i, ev in enumerate(events, start=1): + handler = partial( + process_event, + url=ev["link"], + url_num=i, + ) - url = await network.safe_process( - handler, - url_num=i, - semaphore=network.PW_S, - log=log, - ) + url = await network.safe_process( + handler, + url_num=i, + semaphore=network.HTTP_S, + log=log, + ) - sport, event, ts, link = ( - ev["sport"], - ev["event"], - ev["event_ts"], - ev["link"], - ) + if url: + sport, event, ts, link = ( + ev["sport"], + ev["event"], + ev["event_ts"], + ev["link"], + ) - tvg_id, logo = leagues.get_tvg_info(sport, event) + tvg_id, logo = leagues.get_tvg_info(sport, event) - key = f"[{sport}] {event} ({TAG})" + key = f"[{sport}] {event} ({TAG})" - entry = { - "url": url, - "logo": logo, - "base": BASE_URL, - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - "link": link, - } + entry = { + "url": url, + "logo": logo, + "base": BASE_URL, + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + "link": link, + } - cached_urls[key] = entry + urls[key] = cached_urls[key] = entry - if url: - valid_count += 1 - - urls[key] = entry - - if new_count := valid_count - cached_count: + if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {new_count} new event(s)") else: