This commit is contained in:
doms9 2026-02-12 17:29:49 -05:00
parent 5b7a84a759
commit 00000d92ff
3 changed files with 85 additions and 67 deletions

View file

@ -89,9 +89,11 @@ async def scrape() -> None:
cached_hrefs = {entry["href"] for entry in cached_urls.values()}
cached_count = len(cached_urls)
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
urls.update(cached_urls)
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
@ -118,30 +120,34 @@ async def scrape() -> None:
log=log,
)
sport, event, link = (
ev["sport"],
ev["event"],
ev["link"],
)
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": BASE_URL,
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"href": ev["href"],
"link": link,
}
cached_urls[key] = entry
if url:
sport, event, link = (
ev["sport"],
ev["event"],
ev["link"],
)
valid_count += 1
key = f"[{sport}] {event} ({TAG})"
urls[key] = entry
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": BASE_URL,
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"href": ev["href"],
"link": link,
}
urls[key] = cached_urls[key] = entry
if new_count := len(cached_urls) - cached_count:
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:

View file

@ -106,9 +106,11 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
async def scrape() -> None:
cached_urls = CACHE_FILE.load()
cached_count = len(cached_urls)
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
urls.update(cached_urls)
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
@ -135,29 +137,33 @@ async def scrape() -> None:
log=log,
)
sport, event, link = (
ev["sport"],
ev["event"],
ev["link"],
)
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": "https://gooz.aapmains.net",
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
sport, event, link = (
ev["sport"],
ev["event"],
ev["link"],
)
valid_count += 1
key = f"[{sport}] {event} ({TAG})"
urls[key] = entry
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": "https://gooz.aapmains.net",
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
urls[key] = cached_urls[key] = entry
if new_count := len(cached_urls) - cached_count:
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:

View file

@ -89,9 +89,11 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
async def scrape() -> None:
cached_urls = CACHE_FILE.load()
cached_count = len(cached_urls)
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
urls.update(cached_urls)
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
@ -118,29 +120,33 @@ async def scrape() -> None:
log=log,
)
sport, event, link = (
ev["sport"],
ev["event"],
ev["link"],
)
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": link,
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
sport, event, link = (
ev["sport"],
ev["event"],
ev["link"],
)
valid_count += 1
key = f"[{sport}] {event} ({TAG})"
urls[key] = entry
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": link,
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
urls[key] = cached_urls[key] = entry
if new_count := len(cached_urls) - cached_count:
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else: