diff --git a/M3U8/scrapers/webcast.py b/M3U8/scrapers/webcast.py index cc3f7f5..92e06f5 100644 --- a/M3U8/scrapers/webcast.py +++ b/M3U8/scrapers/webcast.py @@ -16,17 +16,17 @@ CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800) HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=86_400) -BASE_URLS = {"NFL": "https://nflwebcast.com", "NHL": "https://slapstreams.com"} +BASE_URL = "https://slapstreams.com" def fix_event(s: str) -> str: return " vs ".join(s.split("@")) -async def refresh_html_cache(url: str) -> dict[str, dict[str, str | float]]: +async def refresh_html_cache() -> dict[str, dict[str, str | float]]: events = {} - if not (html_data := await network.request(url, log=log)): + if not (html_data := await network.request(BASE_URL, log=log)): return events now = Time.clean(Time.now()) @@ -85,11 +85,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: if not (events := HTML_CACHE.load()): log.info("Refreshing HTML cache") - tasks = [refresh_html_cache(url) for url in BASE_URLS.values()] - - results = await asyncio.gather(*tasks) - - events = {k: v for data in results for k, v in data.items()} + events = await refresh_html_cache() HTML_CACHE.write(events) @@ -119,7 +115,7 @@ async def scrape() -> None: log.info(f"Loaded {cached_count} event(s) from cache") - log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"') + log.info(f'Scraping from "{BASE_URL}"') events = await get_events(cached_urls.keys()) @@ -161,7 +157,7 @@ async def scrape() -> None: entry = { "url": url, "logo": logo, - "base": BASE_URLS[sport], + "base": BASE_URL, "timestamp": ts, "id": tvg_id or "Live.Event.us", "link": link,