From 00000d9bd819dfa05ae3024c08dcc56bb45d5aef Mon Sep 17 00:00:00 2001 From: doms9 <96013514+doms9@users.noreply.github.com> Date: Sun, 11 Jan 2026 12:26:52 -0500 Subject: [PATCH] e edit window for embedhd.py fix links from ppv.py --- M3U8/scrapers/embedhd.py | 2 +- M3U8/scrapers/ppv.py | 22 ++++++---------------- 2 files changed, 7 insertions(+), 17 deletions(-) diff --git a/M3U8/scrapers/embedhd.py b/M3U8/scrapers/embedhd.py index 2ae63d2..288c445 100644 --- a/M3U8/scrapers/embedhd.py +++ b/M3U8/scrapers/embedhd.py @@ -37,7 +37,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: API_CACHE.write(api_data) events = [] - start_dt = now.delta(minutes=-30) + start_dt = now.delta(hours=-1) end_dt = now.delta(hours=3) for info in api_data.get("days", []): diff --git a/M3U8/scrapers/ppv.py b/M3U8/scrapers/ppv.py index 4449116..5f7b301 100644 --- a/M3U8/scrapers/ppv.py +++ b/M3U8/scrapers/ppv.py @@ -14,20 +14,14 @@ CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800) API_FILE = Cache(f"{TAG.lower()}-api.json", exp=19_800) -API_MIRRORS = [ +MIRRORS = [ "https://old.ppv.to/api/streams", "https://api.ppvs.su/api/streams", "https://api.ppv.to/api/streams", ] -BASE_MIRRORS = [ - "https://old.ppv.to", - "https://ppvs.su", - "https://ppv.to", -] - -async def get_events(api_url: str, cached_keys: list[str]) -> list[dict[str, str]]: +async def get_events(url: str, cached_keys: list[str]) -> list[dict[str, str]]: now = Time.clean(Time.now()) if not (api_data := API_FILE.load(per_entry=False)): @@ -35,7 +29,7 @@ async def get_events(api_url: str, cached_keys: list[str]) -> list[dict[str, str api_data = {"timestamp": now.timestamp()} - if r := await network.request(api_url, log=log): + if r := await network.request(url, log=log): api_data: dict = r.json() API_FILE.write(api_data) @@ -93,11 +87,7 @@ async def scrape() -> None: log.info(f"Loaded {cached_count} event(s) from cache") - base_url = await network.get_base(BASE_MIRRORS) - - api_url = await network.get_base(API_MIRRORS) - - if not (base_url and api_url): + if not (base_url := await network.get_base(MIRRORS)): log.warning("No working PPV mirrors") CACHE_FILE.write(cached_urls) @@ -106,7 +96,7 @@ async def scrape() -> None: log.info(f'Scraping from "{base_url}"') - events = await get_events(api_url, cached_urls.keys()) + events = await get_events(base_url, cached_urls.keys()) log.info(f"Processing {len(events)} new URL(s)") @@ -148,7 +138,7 @@ async def scrape() -> None: entry = { "url": url, "logo": logo or pic, - "base": base_url, + "base": link, "timestamp": ts, "id": tvg_id or "Live.Event.us", "link": link,