e
This commit is contained in:
parent
4291e8f407
commit
00000d9766
1 changed files with 279 additions and 275 deletions
|
|
@ -31,32 +31,32 @@ def validate_category(s: str) -> str:
|
||||||
return s.capitalize()
|
return s.capitalize()
|
||||||
|
|
||||||
|
|
||||||
def get_tvg(sport: str, event: str) -> str:
|
def get_tvg_id(sport: str, event: str) -> tuple[str | None, str]:
|
||||||
match sport:
|
match sport:
|
||||||
case "American Football":
|
case "American Football":
|
||||||
if leagues.is_valid(event, "NFL"):
|
if leagues.is_valid(event, "NFL"):
|
||||||
return "NFL.Dummy.us"
|
return leagues.info("NFL")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return "NCAA.Sports.Dummy.us"
|
return leagues.info("NCAA")
|
||||||
|
|
||||||
case "Basketball":
|
case "Basketball":
|
||||||
if leagues.is_valid(event, "NBA"):
|
if leagues.is_valid(event, "NBA"):
|
||||||
return "NBA.Basketball.Dummy.us"
|
return leagues.info("NBA")
|
||||||
|
|
||||||
elif leagues.is_valid(event, "WNBA"):
|
elif leagues.is_valid(event, "WNBA"):
|
||||||
return "WNBA.dummy.us"
|
return leagues.info("WNBA")
|
||||||
|
|
||||||
# NCAA
|
# NCAA
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return "Basketball.Dummy.us"
|
return leagues.info("Basketball")
|
||||||
|
|
||||||
case "Hockey":
|
case "Hockey":
|
||||||
return "NHL.Hockey.Dummy.us"
|
return leagues.info("NHL")
|
||||||
|
|
||||||
case _:
|
case _:
|
||||||
return leagues.info(sport)[0]
|
return leagues.info(sport)
|
||||||
|
|
||||||
|
|
||||||
async def refresh_api_cache(
|
async def refresh_api_cache(
|
||||||
|
|
@ -169,6 +169,7 @@ async def get_events(
|
||||||
name = " | ".join(p.strip() for p in parts if p.strip())
|
name = " | ".join(p.strip() for p in parts if p.strip())
|
||||||
|
|
||||||
logo = urljoin(base_url, poster) if (poster := event.get("poster")) else None
|
logo = urljoin(base_url, poster) if (poster := event.get("poster")) else None
|
||||||
|
|
||||||
key = f"[{sport}] {name} (STRMD)"
|
key = f"[{sport}] {name} (STRMD)"
|
||||||
|
|
||||||
if cached_keys & {key}:
|
if cached_keys & {key}:
|
||||||
|
|
@ -189,9 +190,10 @@ async def get_events(
|
||||||
if not sources:
|
if not sources:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
first_source = sources[0]
|
source = sources[0]
|
||||||
source_type = first_source.get("source")
|
# source = sources[1] if len(sources) > 1 else sources[0]
|
||||||
stream_id = first_source.get("id")
|
source_type = source.get("source")
|
||||||
|
stream_id = source.get("id")
|
||||||
|
|
||||||
if not (source_type and stream_id):
|
if not (source_type and stream_id):
|
||||||
continue
|
continue
|
||||||
|
|
@ -255,12 +257,14 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
key = f"[{sport}] {event} (STRMD)"
|
key = f"[{sport}] {event} (STRMD)"
|
||||||
|
|
||||||
|
tvg_id, pic = get_tvg_id(sport, event)
|
||||||
|
|
||||||
entry = {
|
entry = {
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo or leagues.info(sport)[1],
|
"logo": logo or pic,
|
||||||
"base": "https://embedsports.top/",
|
"base": "https://embedsports.top/",
|
||||||
"timestamp": ts,
|
"timestamp": ts,
|
||||||
"id": get_tvg(sport, event) or "Live.Event.us",
|
"id": tvg_id or "Live.Event.us",
|
||||||
}
|
}
|
||||||
|
|
||||||
urls[key] = cached_urls[key] = entry
|
urls[key] = cached_urls[key] = entry
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue