This commit is contained in:
doms9 2025-12-02 15:26:13 -05:00
parent 00000d91bb
commit 00000d9a4d
6 changed files with 13 additions and 24 deletions

View file

@ -49,10 +49,7 @@ async def get_events() -> dict[str, dict[str, str | float]]:
events = {}
pattern = re.compile(
r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?",
re.IGNORECASE,
)
pattern = re.compile(r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?", re.IGNORECASE)
for event in api_data.get("events", []):
event_dt = Time.from_str(event["date"], timezone="UTC")

View file

@ -41,13 +41,7 @@ async def get_events(
cached_keys: set[str],
) -> list[dict[str, str]]:
if not (api_data := API_FILE.load(per_entry=False)):
api_data = await refresh_api_cache(
client,
urljoin(
BASE_URL,
"api/streams",
),
)
api_data = await refresh_api_cache(client, urljoin(BASE_URL, "api/streams"))
API_FILE.write(api_data)

View file

@ -18,7 +18,7 @@ HTML_CACHE = Cache("roxie-html.json", exp=19_800)
BASE_URL = "https://roxiestreams.live"
valid_sports = {
SPORT_ENDPOINTS = {
"fighting": "Fighting",
"mlb": "MLB",
"motorsports": "Racing",
@ -90,7 +90,7 @@ async def refresh_html_cache(
event_dt = Time.from_str(data_start, timezone="PST")
event_sport = valid_sports[sport]
event_sport = SPORT_ENDPOINTS[sport]
key = f"[{event_sport}] {event} ({TAG})"
@ -156,7 +156,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f'Scraping from "{BASE_URL}"')
sport_urls = {sport: urljoin(BASE_URL, sport) for sport in valid_sports}
sport_urls = {sport: urljoin(BASE_URL, sport) for sport in SPORT_ENDPOINTS}
events = await get_events(
client,

View file

@ -35,19 +35,21 @@ async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | flo
events = {}
for _, streams in api_data.get("streams", {}).items():
for streams in api_data.get("streams", {}).values():
if not streams:
continue
for stream in streams:
sport, name = stream["league"], stream["name"]
stream_key = stream["stream_key"]
key = f"[{sport}] {name} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, name)
events[key] = {
"url": urljoin(BASE_URL, f"live/{stream['stream_key']}720p/index.m3u8"),
"url": urljoin(BASE_URL, f"live/{stream_key}720p/index.m3u8"),
"logo": logo,
"base": BASE_URL,
"timestamp": Time.now().timestamp(),

View file

@ -128,8 +128,7 @@ async def get_events(
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
api_data = await refresh_api_cache(
client,
urljoin(url, "api/matches/all-today"),
client, urljoin(url, "api/matches/all-today")
)
API_FILE.write(api_data)
@ -172,8 +171,8 @@ async def get_events(
if not sources:
continue
skip_sources = {"admin", "alpha", "bravo"}
valid_sources = [d for d in sources if d.get("source") not in skip_sources]
skip_types = {"admin", "alpha", "bravo"}
valid_sources = [d for d in sources if d.get("source") not in skip_types]
if not valid_sources:
continue

View file

@ -59,10 +59,7 @@ async def refresh_api_cache(
log.info("Refreshing API cache")
tasks = [
get_api_data(
client,
urljoin(url, f"api/v1/matches/{sport}"),
)
get_api_data(client, urljoin(url, f"api/v1/matches/{sport}"))
for sport in SPORT_ENDPOINTS
]