Compare commits

..

3 commits

Author SHA1 Message Date
GitHub Actions Bot
52a81ec34e update M3U8 2025-12-02 15:30:35 -05:00
doms9
00000d9a4d e 2025-12-02 15:26:13 -05:00
doms9
00000d91bb e 2025-12-02 15:06:01 -05:00
10 changed files with 521 additions and 526 deletions

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -49,10 +49,7 @@ async def get_events() -> dict[str, dict[str, str | float]]:
events = {}
pattern = re.compile(
r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?",
re.IGNORECASE,
)
pattern = re.compile(r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?", re.IGNORECASE)
for event in api_data.get("events", []):
event_dt = Time.from_str(event["date"], timezone="UTC")

View file

@ -41,13 +41,7 @@ async def get_events(
cached_keys: set[str],
) -> list[dict[str, str]]:
if not (api_data := API_FILE.load(per_entry=False)):
api_data = await refresh_api_cache(
client,
urljoin(
BASE_URL,
"api/streams",
),
)
api_data = await refresh_api_cache(client, urljoin(BASE_URL, "api/streams"))
API_FILE.write(api_data)

View file

@ -18,7 +18,7 @@ HTML_CACHE = Cache("roxie-html.json", exp=19_800)
BASE_URL = "https://roxiestreams.live"
valid_sports = {
SPORT_ENDPOINTS = {
"fighting": "Fighting",
"mlb": "MLB",
"motorsports": "Racing",
@ -90,7 +90,7 @@ async def refresh_html_cache(
event_dt = Time.from_str(data_start, timezone="PST")
event_sport = valid_sports[sport]
event_sport = SPORT_ENDPOINTS[sport]
key = f"[{event_sport}] {event} ({TAG})"
@ -156,7 +156,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f'Scraping from "{BASE_URL}"')
sport_urls = {sport: urljoin(BASE_URL, sport) for sport in valid_sports}
sport_urls = {sport: urljoin(BASE_URL, sport) for sport in SPORT_ENDPOINTS}
events = await get_events(
client,

View file

@ -71,17 +71,17 @@ async def get_events(
team_1_node = card.css_first(".team1 .team-name")
team_2_node = card.css_first(".team2 .team-name")
if team_1_node and not team_2_node:
event = team_1_node.text(strip=True)
elif team_2_node and not team_1_node:
event = team_2_node.text(strip=True)
elif team_1_node and team_2_node:
event = (
if team_1_node and team_2_node:
event = event = (
f"{team_1_node.text(strip=True)} vs {team_2_node.text(strip=True)}"
)
elif team_1_node:
event = team_1_node.text(strip=True)
elif team_2_node:
event = team_2_node.text(strip=True)
else:
continue

View file

@ -19,6 +19,12 @@ BASE_URL = "https://streambtw.com"
TAG = "STRMBTW"
def fix_league(s: str) -> str:
pattern = re.compile(r"^\w*-\w*", re.IGNORECASE)
return " ".join(s.split("-")) if pattern.search(s) else s
async def process_event(
client: httpx.AsyncClient,
url: str,
@ -62,13 +68,13 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
if not (href := link.attrs.get("href")):
continue
sport = card.css_first("h5.card-title").text(strip=True)
league = card.css_first("h5.card-title").text(strip=True)
name = card.css_first("p.card-text").text(strip=True)
events.append(
{
"sport": sport,
"sport": fix_league(league),
"event": name,
"link": urljoin(BASE_URL, href),
}

View file

@ -35,19 +35,21 @@ async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | flo
events = {}
for _, streams in api_data.get("streams", {}).items():
for streams in api_data.get("streams", {}).values():
if not streams:
continue
for stream in streams:
sport, name = stream["league"], stream["name"]
stream_key = stream["stream_key"]
key = f"[{sport}] {name} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, name)
events[key] = {
"url": urljoin(BASE_URL, f"live/{stream['stream_key']}720p/index.m3u8"),
"url": urljoin(BASE_URL, f"live/{stream_key}720p/index.m3u8"),
"logo": logo,
"base": BASE_URL,
"timestamp": Time.now().timestamp(),

View file

@ -128,8 +128,7 @@ async def get_events(
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
api_data = await refresh_api_cache(
client,
urljoin(url, "api/matches/all-today"),
client, urljoin(url, "api/matches/all-today")
)
API_FILE.write(api_data)
@ -172,8 +171,8 @@ async def get_events(
if not sources:
continue
skip_sources = {"admin", "alpha", "bravo"}
valid_sources = [d for d in sources if d.get("source") not in skip_sources]
skip_types = {"admin", "alpha", "bravo"}
valid_sources = [d for d in sources if d.get("source") not in skip_types]
if not valid_sources:
continue

View file

@ -59,10 +59,7 @@ async def refresh_api_cache(
log.info("Refreshing API cache")
tasks = [
get_api_data(
client,
urljoin(url, f"api/v1/matches/{sport}"),
)
get_api_data(client, urljoin(url, f"api/v1/matches/{sport}"))
for sport in SPORT_ENDPOINTS
]