diff --git a/M3U8/scrapers/fstv.py b/M3U8/scrapers/fstv.py index e27564f..d79dbee 100644 --- a/M3U8/scrapers/fstv.py +++ b/M3U8/scrapers/fstv.py @@ -131,6 +131,8 @@ async def scrape(client: httpx.AsyncClient) -> None: log.info(f"Processing {len(events)} new URL(s)") + now = Time.now().timestamp() + for i, ev in enumerate(events, start=1): match_name, url = await network.safe_process( lambda: process_event( @@ -155,7 +157,7 @@ async def scrape(client: httpx.AsyncClient) -> None: "url": url, "logo": logo, "base": base_url, - "timestamp": Time.now().timestamp(), + "timestamp": now, "id": tvg_id or "Live.Event.us", "href": ev["href"], } diff --git a/M3U8/scrapers/livetvsx.py b/M3U8/scrapers/livetvsx.py index ac2d041..74112f0 100644 --- a/M3U8/scrapers/livetvsx.py +++ b/M3U8/scrapers/livetvsx.py @@ -210,7 +210,6 @@ async def get_events( events: list[dict[str, str]] = [] now = Time.now() - start_dt = now.delta(minutes=-30) end_dt = now.delta(minutes=30) diff --git a/M3U8/scrapers/ppv.py b/M3U8/scrapers/ppv.py index a668a3b..466a269 100644 --- a/M3U8/scrapers/ppv.py +++ b/M3U8/scrapers/ppv.py @@ -126,14 +126,17 @@ async def get_events( base_url: str, cached_keys: set[str], ) -> list[dict[str, str]]: - - events: list[dict[str, str]] = [] - if not (api_data := API_FILE.load(nearest_hr=True, per_entry=False)): api_data = await refresh_api_cache(client, urljoin(base_url, "api/streams")) API_FILE.write(api_data) + events: list[dict[str, str]] = [] + + now = Time.now() + start_dt = now.delta(minutes=-30) + end_dt = now.delta(minutes=30) + for stream_group in api_data["streams"]: sport = stream_group["category"] @@ -141,10 +144,9 @@ async def get_events( continue for event in stream_group["streams"]: - name, start_ts, end_ts, logo, uri_name = ( + name, start_ts, logo, uri_name = ( event["name"], event["starts_at"], - event["ends_at"], event["poster"], event["uri_name"], ) @@ -154,11 +156,9 @@ async def get_events( if cached_keys & {key}: continue - start_dt = Time.from_ts(start_ts).delta(minutes=-30) + event_dt = Time.from_ts(start_ts) - end_dt = Time.from_ts(end_ts).delta(minutes=30) - - if not start_dt <= Time.now() < end_dt: + if not start_dt <= event_dt < end_dt: continue events.append( @@ -167,7 +167,7 @@ async def get_events( "event": name, "link": urljoin(base_url, f"live/{uri_name}"), "logo": logo, - "timestamp": start_dt.timestamp(), + "timestamp": event_dt.timestamp(), } ) diff --git a/M3U8/scrapers/streambtw.py b/M3U8/scrapers/streambtw.py index 4b898c2..8474309 100644 --- a/M3U8/scrapers/streambtw.py +++ b/M3U8/scrapers/streambtw.py @@ -86,6 +86,8 @@ async def scrape(client: httpx.AsyncClient) -> None: log.info(f"Processing {len(events)} new URL(s)") + now = Time.now().timestamp() + for i, ev in enumerate(events, start=1): url = await network.safe_process( lambda: process_event(client, url=ev["link"], url_num=i), @@ -105,7 +107,7 @@ async def scrape(client: httpx.AsyncClient) -> None: "url": url, "logo": logo, "base": BASE_URL, - "timestamp": Time.now().timestamp(), + "timestamp": now, "id": tvg_id or "Live.Event.us", } diff --git a/M3U8/scrapers/tvpass.py b/M3U8/scrapers/tvpass.py index 5207a30..2517e39 100644 --- a/M3U8/scrapers/tvpass.py +++ b/M3U8/scrapers/tvpass.py @@ -33,6 +33,8 @@ async def scrape(client: httpx.AsyncClient) -> None: log.info(f'Scraping from "{BASE_URL}"') + now = Time.now().timestamp() + if not (data := await fetch_m3u8(client)): log.warning("No M3U8 data received") return @@ -62,7 +64,7 @@ async def scrape(client: httpx.AsyncClient) -> None: "logo": logo, "id": tvg_id or "Live.Event.us", "base": "https://tvpass.org", - "timestamp": Time.now().timestamp(), + "timestamp": now, } urls[key] = entry