diff --git a/M3U8/scrapers/roxie.py b/M3U8/scrapers/roxie.py index 3d502f8..4cacd6b 100644 --- a/M3U8/scrapers/roxie.py +++ b/M3U8/scrapers/roxie.py @@ -85,6 +85,7 @@ async def refresh_html_cache( client: httpx.AsyncClient, url: str, sport: str, + now_ts: float, ) -> dict[str, str | float]: try: @@ -93,7 +94,7 @@ async def refresh_html_cache( except Exception as e: log.error(f'Failed to fetch "{url}": {e}') - return [] + return {} soup = HTMLParser(r.text) @@ -107,7 +108,7 @@ async def refresh_html_cache( event = a_tag.text(strip=True) - if not (event_link := a_tag.attributes.get("href")): + if not (href := a_tag.attributes.get("href")): continue if not (span := row.css_first("span.countdown-timer")): @@ -122,9 +123,9 @@ async def refresh_html_cache( events[key] = { "sport": sport, "event": event, - "link": event_link, + "link": href, "event_ts": event_dt.timestamp(), - "timestamp": Time.now().timestamp(), + "timestamp": now_ts, } return events @@ -136,9 +137,17 @@ async def get_events( cached_keys: set[str], ) -> list[dict[str, str]]: + now = Time.clean(Time.now()) + if not (events := HTML_CACHE.load()): tasks = [ - refresh_html_cache(client, url, sport) for sport, url in sport_urls.items() + refresh_html_cache( + client, + url, + sport, + now.timestamp(), + ) + for sport, url in sport_urls.items() ] results = await asyncio.gather(*tasks) @@ -149,7 +158,6 @@ async def get_events( live = [] - now = Time.clean(Time.now()) start_ts = now.delta(minutes=-30).timestamp() end_ts = now.delta(minutes=30).timestamp() diff --git a/M3U8/scrapers/streamed.py b/M3U8/scrapers/streamed.py index 83fb861..15b4b83 100644 --- a/M3U8/scrapers/streamed.py +++ b/M3U8/scrapers/streamed.py @@ -81,7 +81,7 @@ async def refresh_html_cache( except Exception as e: log.error(f'Failed to fetch "{url}": {e}') - return [] + return {} soup = HTMLParser(r.text) events = {} diff --git a/M3U8/scrapers/strmd.py b/M3U8/scrapers/strmd.py index f2e50ad..b48a046 100644 --- a/M3U8/scrapers/strmd.py +++ b/M3U8/scrapers/strmd.py @@ -163,7 +163,6 @@ async def get_events( if not sources: continue - # source = sources[0] source = sources[1] if len(sources) > 1 else sources[0] source_type = source.get("source") stream_id = source.get("id")