- edit caching method
- misc edits.
This commit is contained in:
doms9 2026-04-01 20:23:29 -04:00
parent b4dc04ad58
commit 00000d90e4
4 changed files with 82 additions and 66 deletions

View file

@ -92,9 +92,11 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
async def scrape(browser: Browser) -> None:
cached_urls = CACHE_FILE.load()
cached_count = len(cached_urls)
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
urls.update(cached_urls)
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
@ -121,29 +123,33 @@ async def scrape(browser: Browser) -> None:
log=log,
)
sport, event, ts = (
ev["sport"],
ev["event"],
ev["timestamp"],
)
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": link,
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
sport, event, ts = (
ev["sport"],
ev["event"],
ev["timestamp"],
)
valid_count += 1
key = f"[{sport}] {event} ({TAG})"
urls[key] = entry
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": link,
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
"link": link,
}
urls[key] = cached_urls[key] = entry
log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
else:
log.info("No new events found")

View file

@ -79,9 +79,11 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
async def scrape(browser: Browser) -> None:
cached_urls = CACHE_FILE.load()
cached_count = len(cached_urls)
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
urls.update(cached_urls)
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
@ -108,29 +110,33 @@ async def scrape(browser: Browser) -> None:
log=log,
)
sport, event, ts = (
ev["sport"],
ev["event"],
ev["timestamp"],
)
tvg_id, logo = leagues.get_tvg_info(sport, event)
key = f"[{sport}] {event} ({TAG})"
entry = {
"url": url,
"logo": logo,
"base": "https://vividmosaica.com/",
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
sport, event, ts = (
ev["sport"],
ev["event"],
ev["timestamp"],
)
valid_count += 1
tvg_id, logo = leagues.get_tvg_info(sport, event)
urls[key] = entry
key = f"[{sport}] {event} ({TAG})"
entry = {
"url": url,
"logo": logo,
"base": "https://vividmosaica.com/",
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
"link": link,
}
urls[key] = cached_urls[key] = entry
log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
else:
log.info("No new events found")

View file

@ -80,9 +80,11 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
async def scrape(browser: Browser) -> None:
cached_urls = CACHE_FILE.load()
cached_count = len(cached_urls)
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
urls.update(cached_urls)
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
@ -111,25 +113,29 @@ async def scrape(browser: Browser) -> None:
log=log,
)
sport, event = ev["sport"], ev["event"]
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": "https://vividmosaica.com/",
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
sport, event = ev["sport"], ev["event"]
valid_count += 1
key = f"[{sport}] {event} ({TAG})"
urls[key] = entry
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": "https://vividmosaica.com/",
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
urls[key] = cached_urls[key] = entry
log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
else:
log.info("No new events found")

View file

@ -39,19 +39,17 @@ async def process_event(url: str, url_num: int) -> str | None:
embed_list: list[tuple[int, str]] = ast.literal_eval(embed_list_str)
embed_list.sort(key=lambda i: i[0])
m3u8 = "".join(
chr(
int("".join(c for c in base64.b64decode(v).decode("utf-8") if c.isdigit()))
- sum(map(int, digit_list))
)
for _, v in embed_list
for _, v in sorted(embed_list, key=lambda i: i[0])
)
log.info(f"URL {url_num}) Captured M3U8")
return m3u8.split("&ip")[0]
return m3u8.split("ip=")[0]
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: