diff --git a/M3U8/scrapers/ppv.py b/M3U8/scrapers/ppv.py index 34ef415..720a34f 100644 --- a/M3U8/scrapers/ppv.py +++ b/M3U8/scrapers/ppv.py @@ -15,13 +15,12 @@ API_FILE = Cache(Path(__file__).parent / "caches" / "ppv_api.json", exp=28_800) CACHE_FILE = Cache(Path(__file__).parent / "caches" / "ppv.json", exp=10_800) -MIRRORS = [ - "https://ppvs.su", - "https://ppv.to", - "https://ppv.wtf", - "https://ppv.land", - "https://freeppv.fun", -] +# MIRRORS = [ +# "https://ppv.to", +# "https://ppvs.su", +# ] + +BASE_URL = "https://ppv.to" async def refresh_api_cache( @@ -108,16 +107,16 @@ async def scrape(client: httpx.AsyncClient) -> None: log.info(f"Loaded {cached_count} event(s) from cache") - if not (base_url := await network.get_base(MIRRORS)): - log.warning("No working PPV mirrors") - CACHE_FILE.write(cached_urls) - return + # if not (base_url := await network.get_base(MIRRORS)): + # log.warning("No working PPV mirrors") + # CACHE_FILE.write(cached_urls) + # return - log.info(f'Scraping from "{base_url}"') + log.info(f'Scraping from "{BASE_URL}"') events = await get_events( client, - base_url, + BASE_URL, set(cached_urls.keys()), ) @@ -158,7 +157,7 @@ async def scrape(client: httpx.AsyncClient) -> None: entry = { "url": url, "logo": logo or pic, - "base": base_url, + "base": BASE_URL, "timestamp": ts, "id": tvg_id or "Live.Event.us", }