update M3U8

This commit is contained in:
GitHub Actions Bot 2026-04-20 15:30:48 -04:00
parent f7fa191ce4
commit fa72e64ecf
6 changed files with 440 additions and 305 deletions

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -57,7 +57,10 @@ async def get_api_data() -> dict[str, dict[str, list[dict]]]:
tasks = [
(
sport,
network.request(urljoin(url, "api/v2/stateshot"), log=log),
network.request(
urljoin(url, "api/v2/stateshot"),
log=log,
),
)
for sport, url in API_URLS.items()
]

View file

@ -37,13 +37,13 @@ async def process_event(url: str, url_num: int) -> str | None:
async def get_events() -> dict[str, dict[str, str | float]]:
now = Time.clean(Time.now())
events = []
if not (html_data := await network.request(BASE_URL, log=log)):
return events
now = Time.clean(Time.now())
pattern = re.compile(r"openEmbed\('([^']+)'\)", re.I)
soup = HTMLParser(html_data.content)

View file

@ -47,19 +47,19 @@ async def process_event(url: str, url_num: int) -> str | None:
async def get_events() -> list[dict[str, str]]:
now = Time.clean(Time.now())
events = []
if not (
r := await network.request(
API_URL,
log=log,
params={"pageNumber": 1, "pageSize": 500},
log=log,
)
):
return events
now = Time.clean(Time.now())
api_data: list[dict] = r.json()
for stream_group in api_data:

View file

@ -127,8 +127,8 @@ async def refresh_html_cache(
if not (
html_data := await network.request(
urljoin(BASE_URL, f"events/{date}"),
log=log,
params={"sport_id": sport_id},
log=log,
)
):
return events