e
This commit is contained in:
parent
00000d91bb
commit
00000d9a4d
6 changed files with 13 additions and 24 deletions
|
|
@ -49,10 +49,7 @@ async def get_events() -> dict[str, dict[str, str | float]]:
|
||||||
|
|
||||||
events = {}
|
events = {}
|
||||||
|
|
||||||
pattern = re.compile(
|
pattern = re.compile(r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?", re.IGNORECASE)
|
||||||
r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?",
|
|
||||||
re.IGNORECASE,
|
|
||||||
)
|
|
||||||
|
|
||||||
for event in api_data.get("events", []):
|
for event in api_data.get("events", []):
|
||||||
event_dt = Time.from_str(event["date"], timezone="UTC")
|
event_dt = Time.from_str(event["date"], timezone="UTC")
|
||||||
|
|
|
||||||
|
|
@ -41,13 +41,7 @@ async def get_events(
|
||||||
cached_keys: set[str],
|
cached_keys: set[str],
|
||||||
) -> list[dict[str, str]]:
|
) -> list[dict[str, str]]:
|
||||||
if not (api_data := API_FILE.load(per_entry=False)):
|
if not (api_data := API_FILE.load(per_entry=False)):
|
||||||
api_data = await refresh_api_cache(
|
api_data = await refresh_api_cache(client, urljoin(BASE_URL, "api/streams"))
|
||||||
client,
|
|
||||||
urljoin(
|
|
||||||
BASE_URL,
|
|
||||||
"api/streams",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
API_FILE.write(api_data)
|
API_FILE.write(api_data)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ HTML_CACHE = Cache("roxie-html.json", exp=19_800)
|
||||||
|
|
||||||
BASE_URL = "https://roxiestreams.live"
|
BASE_URL = "https://roxiestreams.live"
|
||||||
|
|
||||||
valid_sports = {
|
SPORT_ENDPOINTS = {
|
||||||
"fighting": "Fighting",
|
"fighting": "Fighting",
|
||||||
"mlb": "MLB",
|
"mlb": "MLB",
|
||||||
"motorsports": "Racing",
|
"motorsports": "Racing",
|
||||||
|
|
@ -90,7 +90,7 @@ async def refresh_html_cache(
|
||||||
|
|
||||||
event_dt = Time.from_str(data_start, timezone="PST")
|
event_dt = Time.from_str(data_start, timezone="PST")
|
||||||
|
|
||||||
event_sport = valid_sports[sport]
|
event_sport = SPORT_ENDPOINTS[sport]
|
||||||
|
|
||||||
key = f"[{event_sport}] {event} ({TAG})"
|
key = f"[{event_sport}] {event} ({TAG})"
|
||||||
|
|
||||||
|
|
@ -156,7 +156,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
log.info(f'Scraping from "{BASE_URL}"')
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
|
|
||||||
sport_urls = {sport: urljoin(BASE_URL, sport) for sport in valid_sports}
|
sport_urls = {sport: urljoin(BASE_URL, sport) for sport in SPORT_ENDPOINTS}
|
||||||
|
|
||||||
events = await get_events(
|
events = await get_events(
|
||||||
client,
|
client,
|
||||||
|
|
|
||||||
|
|
@ -35,19 +35,21 @@ async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | flo
|
||||||
|
|
||||||
events = {}
|
events = {}
|
||||||
|
|
||||||
for _, streams in api_data.get("streams", {}).items():
|
for streams in api_data.get("streams", {}).values():
|
||||||
if not streams:
|
if not streams:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for stream in streams:
|
for stream in streams:
|
||||||
sport, name = stream["league"], stream["name"]
|
sport, name = stream["league"], stream["name"]
|
||||||
|
|
||||||
|
stream_key = stream["stream_key"]
|
||||||
|
|
||||||
key = f"[{sport}] {name} ({TAG})"
|
key = f"[{sport}] {name} ({TAG})"
|
||||||
|
|
||||||
tvg_id, logo = leagues.get_tvg_info(sport, name)
|
tvg_id, logo = leagues.get_tvg_info(sport, name)
|
||||||
|
|
||||||
events[key] = {
|
events[key] = {
|
||||||
"url": urljoin(BASE_URL, f"live/{stream['stream_key']}720p/index.m3u8"),
|
"url": urljoin(BASE_URL, f"live/{stream_key}720p/index.m3u8"),
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": BASE_URL,
|
"base": BASE_URL,
|
||||||
"timestamp": Time.now().timestamp(),
|
"timestamp": Time.now().timestamp(),
|
||||||
|
|
|
||||||
|
|
@ -128,8 +128,7 @@ async def get_events(
|
||||||
|
|
||||||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||||
api_data = await refresh_api_cache(
|
api_data = await refresh_api_cache(
|
||||||
client,
|
client, urljoin(url, "api/matches/all-today")
|
||||||
urljoin(url, "api/matches/all-today"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
API_FILE.write(api_data)
|
API_FILE.write(api_data)
|
||||||
|
|
@ -172,8 +171,8 @@ async def get_events(
|
||||||
if not sources:
|
if not sources:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
skip_sources = {"admin", "alpha", "bravo"}
|
skip_types = {"admin", "alpha", "bravo"}
|
||||||
valid_sources = [d for d in sources if d.get("source") not in skip_sources]
|
valid_sources = [d for d in sources if d.get("source") not in skip_types]
|
||||||
|
|
||||||
if not valid_sources:
|
if not valid_sources:
|
||||||
continue
|
continue
|
||||||
|
|
|
||||||
|
|
@ -59,10 +59,7 @@ async def refresh_api_cache(
|
||||||
log.info("Refreshing API cache")
|
log.info("Refreshing API cache")
|
||||||
|
|
||||||
tasks = [
|
tasks = [
|
||||||
get_api_data(
|
get_api_data(client, urljoin(url, f"api/v1/matches/{sport}"))
|
||||||
client,
|
|
||||||
urljoin(url, f"api/v1/matches/{sport}"),
|
|
||||||
)
|
|
||||||
for sport in SPORT_ENDPOINTS
|
for sport in SPORT_ENDPOINTS
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue