This commit is contained in:
doms9 2025-09-21 10:28:15 -04:00
parent 00000d9c5b
commit 00000d946c
5 changed files with 20 additions and 10 deletions

View file

@ -111,10 +111,12 @@ async def scrape(client: httpx.AsyncClient) -> None:
key = f"[{sport}] {event} (S{i})" key = f"[{sport}] {event} (S{i})"
tvg_id, logo = league_info(sport)
entry = { entry = {
"url": link, "url": link,
"logo": league_info(sport)["logo"], "logo": logo,
"id": league_info(sport)["id"], "id": tvg_id or "Live.Event.us",
} }
urls[key] = entry urls[key] = entry

View file

@ -157,12 +157,14 @@ async def scrape(client: httpx.AsyncClient) -> None:
f"[{sport}] {match_name} (FSTV)" if match_name else f"[{sport}] (FSTV)" f"[{sport}] {match_name} (FSTV)" if match_name else f"[{sport}] (FSTV)"
) )
tvg_id, logo = league_info(sport)
entry = { entry = {
"url": url, "url": url,
"logo": league_info(sport)["logo"], "logo": logo,
"base": base_url, "base": base_url,
"timestamp": now.timestamp(), "timestamp": now.timestamp(),
"id": league_info(sport)["id"], "id": tvg_id or "Live.Event.us",
"href": ev["href"], "href": ev["href"],
} }

View file

@ -108,12 +108,14 @@ async def scrape(client: httpx.AsyncClient) -> None:
key = f"[{sport}] {event} (SBTW)" key = f"[{sport}] {event} (SBTW)"
tvg_id, logo = league_info(sport)
entry = { entry = {
"url": url, "url": url,
"logo": ev["logo"] or league_info(sport)["logo"], "logo": ev["logo"] or logo,
"base": BASE_URL, "base": BASE_URL,
"timestamp": now.timestamp(), "timestamp": now.timestamp(),
"id": league_info(sport)["id"], "id": tvg_id or "Live.Event.us",
} }
urls[key] = entry urls[key] = entry

View file

@ -186,12 +186,14 @@ async def scrape(client: httpx.AsyncClient) -> None:
key = f"[{sport}] {event} (SEAST)" key = f"[{sport}] {event} (SEAST)"
tvg_id, logo = league_info(sport)
entry = { entry = {
"url": url, "url": url,
"logo": league_info(sport)["logo"], "logo": logo,
"base": base_url, "base": base_url,
"timestamp": now.timestamp(), "timestamp": now.timestamp(),
"id": league_info(sport)["id"], "id": tvg_id or "Live.Event.us",
} }
urls[key] = cached_urls[key] = entry urls[key] = cached_urls[key] = entry

View file

@ -54,10 +54,12 @@ async def scrape(client: httpx.AsyncClient) -> None:
channel = url.split("/")[-2] channel = url.split("/")[-2]
tvg_id, logo = league_info(sport)
entry = { entry = {
"url": f"http://origin.thetvapp.to/hls/{channel}/mono.m3u8", "url": f"http://origin.thetvapp.to/hls/{channel}/mono.m3u8",
"logo": league_info(sport)["logo"], "logo": logo,
"id": league_info(sport)["id"], "id": tvg_id or "Live.Event.us",
"base": "https://tvpass.org", "base": "https://tvpass.org",
"timestamp": now.timestamp(), "timestamp": now.timestamp(),
} }