diff --git a/M3U8/scrapers/cdnlivetv.py b/M3U8/scrapers/cdnlivetv.py index 20934df1..39437e5c 100644 --- a/M3U8/scrapers/cdnlivetv.py +++ b/M3U8/scrapers/cdnlivetv.py @@ -92,9 +92,11 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: async def scrape(browser: Browser) -> None: cached_urls = CACHE_FILE.load() - cached_count = len(cached_urls) + valid_urls = {k: v for k, v in cached_urls.items() if v["url"]} - urls.update(cached_urls) + valid_count = cached_count = len(valid_urls) + + urls.update(valid_urls) log.info(f"Loaded {cached_count} event(s) from cache") @@ -121,29 +123,33 @@ async def scrape(browser: Browser) -> None: log=log, ) + sport, event, ts = ( + ev["sport"], + ev["event"], + ev["timestamp"], + ) + + key = f"[{sport}] {event} ({TAG})" + + tvg_id, logo = leagues.get_tvg_info(sport, event) + + entry = { + "url": url, + "logo": logo, + "base": link, + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + "link": link, + } + + cached_urls[key] = entry + if url: - sport, event, ts = ( - ev["sport"], - ev["event"], - ev["timestamp"], - ) + valid_count += 1 - key = f"[{sport}] {event} ({TAG})" + urls[key] = entry - tvg_id, logo = leagues.get_tvg_info(sport, event) - - entry = { - "url": url, - "logo": logo, - "base": link, - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - "link": link, - } - - urls[key] = cached_urls[key] = entry - - log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/embedhd.py b/M3U8/scrapers/embedhd.py index f38f46a2..64347665 100644 --- a/M3U8/scrapers/embedhd.py +++ b/M3U8/scrapers/embedhd.py @@ -79,9 +79,11 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: async def scrape(browser: Browser) -> None: cached_urls = CACHE_FILE.load() - cached_count = len(cached_urls) + valid_urls = {k: v for k, v in cached_urls.items() if v["url"]} - urls.update(cached_urls) + valid_count = cached_count = len(valid_urls) + + urls.update(valid_urls) log.info(f"Loaded {cached_count} event(s) from cache") @@ -108,29 +110,33 @@ async def scrape(browser: Browser) -> None: log=log, ) + sport, event, ts = ( + ev["sport"], + ev["event"], + ev["timestamp"], + ) + + tvg_id, logo = leagues.get_tvg_info(sport, event) + + key = f"[{sport}] {event} ({TAG})" + + entry = { + "url": url, + "logo": logo, + "base": "https://vividmosaica.com/", + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + "link": link, + } + + cached_urls[key] = entry + if url: - sport, event, ts = ( - ev["sport"], - ev["event"], - ev["timestamp"], - ) + valid_count += 1 - tvg_id, logo = leagues.get_tvg_info(sport, event) + urls[key] = entry - key = f"[{sport}] {event} ({TAG})" - - entry = { - "url": url, - "logo": logo, - "base": "https://vividmosaica.com/", - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - "link": link, - } - - urls[key] = cached_urls[key] = entry - - log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/fsports.py b/M3U8/scrapers/fsports.py index 1f5dfb88..2dbb35d3 100644 --- a/M3U8/scrapers/fsports.py +++ b/M3U8/scrapers/fsports.py @@ -80,9 +80,11 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: async def scrape(browser: Browser) -> None: cached_urls = CACHE_FILE.load() - cached_count = len(cached_urls) + valid_urls = {k: v for k, v in cached_urls.items() if v["url"]} - urls.update(cached_urls) + valid_count = cached_count = len(valid_urls) + + urls.update(valid_urls) log.info(f"Loaded {cached_count} event(s) from cache") @@ -111,25 +113,29 @@ async def scrape(browser: Browser) -> None: log=log, ) + sport, event = ev["sport"], ev["event"] + + key = f"[{sport}] {event} ({TAG})" + + tvg_id, logo = leagues.get_tvg_info(sport, event) + + entry = { + "url": url, + "logo": logo, + "base": "https://vividmosaica.com/", + "timestamp": now.timestamp(), + "id": tvg_id or "Live.Event.us", + "link": link, + } + + cached_urls[key] = entry + if url: - sport, event = ev["sport"], ev["event"] + valid_count += 1 - key = f"[{sport}] {event} ({TAG})" + urls[key] = entry - tvg_id, logo = leagues.get_tvg_info(sport, event) - - entry = { - "url": url, - "logo": logo, - "base": "https://vividmosaica.com/", - "timestamp": now.timestamp(), - "id": tvg_id or "Live.Event.us", - "link": link, - } - - urls[key] = cached_urls[key] = entry - - log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/streamtpnew.py b/M3U8/scrapers/streamtpnew.py index 4c98d9eb..60220310 100644 --- a/M3U8/scrapers/streamtpnew.py +++ b/M3U8/scrapers/streamtpnew.py @@ -39,19 +39,17 @@ async def process_event(url: str, url_num: int) -> str | None: embed_list: list[tuple[int, str]] = ast.literal_eval(embed_list_str) - embed_list.sort(key=lambda i: i[0]) - m3u8 = "".join( chr( int("".join(c for c in base64.b64decode(v).decode("utf-8") if c.isdigit())) - sum(map(int, digit_list)) ) - for _, v in embed_list + for _, v in sorted(embed_list, key=lambda i: i[0]) ) log.info(f"URL {url_num}) Captured M3U8") - return m3u8.split("&ip")[0] + return m3u8.split("ip=")[0] async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: