This commit is contained in:
doms9 2025-10-11 13:45:14 -04:00
parent 4291e8f407
commit 00000d9766

View file

@ -31,32 +31,32 @@ def validate_category(s: str) -> str:
return s.capitalize()
def get_tvg(sport: str, event: str) -> str:
def get_tvg_id(sport: str, event: str) -> tuple[str | None, str]:
match sport:
case "American Football":
if leagues.is_valid(event, "NFL"):
return "NFL.Dummy.us"
return leagues.info("NFL")
else:
return "NCAA.Sports.Dummy.us"
return leagues.info("NCAA")
case "Basketball":
if leagues.is_valid(event, "NBA"):
return "NBA.Basketball.Dummy.us"
return leagues.info("NBA")
elif leagues.is_valid(event, "WNBA"):
return "WNBA.dummy.us"
return leagues.info("WNBA")
# NCAA
else:
return "Basketball.Dummy.us"
return leagues.info("Basketball")
case "Hockey":
return "NHL.Hockey.Dummy.us"
return leagues.info("NHL")
case _:
return leagues.info(sport)[0]
return leagues.info(sport)
async def refresh_api_cache(
@ -169,6 +169,7 @@ async def get_events(
name = " | ".join(p.strip() for p in parts if p.strip())
logo = urljoin(base_url, poster) if (poster := event.get("poster")) else None
key = f"[{sport}] {name} (STRMD)"
if cached_keys & {key}:
@ -189,9 +190,10 @@ async def get_events(
if not sources:
continue
first_source = sources[0]
source_type = first_source.get("source")
stream_id = first_source.get("id")
source = sources[0]
# source = sources[1] if len(sources) > 1 else sources[0]
source_type = source.get("source")
stream_id = source.get("id")
if not (source_type and stream_id):
continue
@ -255,12 +257,14 @@ async def scrape(client: httpx.AsyncClient) -> None:
key = f"[{sport}] {event} (STRMD)"
tvg_id, pic = get_tvg_id(sport, event)
entry = {
"url": url,
"logo": logo or leagues.info(sport)[1],
"logo": logo or pic,
"base": "https://embedsports.top/",
"timestamp": ts,
"id": get_tvg(sport, event) or "Live.Event.us",
"id": tvg_id or "Live.Event.us",
}
urls[key] = cached_urls[key] = entry