Compare commits

..

No commits in common. "52a81ec34ead0e39a77e5683c490dc3a853b2c6a" and "a7042a45609c5d307e6c1cd6a8a64e451a28cb2c" have entirely different histories.

10 changed files with 525 additions and 520 deletions

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -49,7 +49,10 @@ async def get_events() -> dict[str, dict[str, str | float]]:
events = {} events = {}
pattern = re.compile(r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?", re.IGNORECASE) pattern = re.compile(
r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?",
re.IGNORECASE,
)
for event in api_data.get("events", []): for event in api_data.get("events", []):
event_dt = Time.from_str(event["date"], timezone="UTC") event_dt = Time.from_str(event["date"], timezone="UTC")

View file

@ -41,7 +41,13 @@ async def get_events(
cached_keys: set[str], cached_keys: set[str],
) -> list[dict[str, str]]: ) -> list[dict[str, str]]:
if not (api_data := API_FILE.load(per_entry=False)): if not (api_data := API_FILE.load(per_entry=False)):
api_data = await refresh_api_cache(client, urljoin(BASE_URL, "api/streams")) api_data = await refresh_api_cache(
client,
urljoin(
BASE_URL,
"api/streams",
),
)
API_FILE.write(api_data) API_FILE.write(api_data)

View file

@ -18,7 +18,7 @@ HTML_CACHE = Cache("roxie-html.json", exp=19_800)
BASE_URL = "https://roxiestreams.live" BASE_URL = "https://roxiestreams.live"
SPORT_ENDPOINTS = { valid_sports = {
"fighting": "Fighting", "fighting": "Fighting",
"mlb": "MLB", "mlb": "MLB",
"motorsports": "Racing", "motorsports": "Racing",
@ -90,7 +90,7 @@ async def refresh_html_cache(
event_dt = Time.from_str(data_start, timezone="PST") event_dt = Time.from_str(data_start, timezone="PST")
event_sport = SPORT_ENDPOINTS[sport] event_sport = valid_sports[sport]
key = f"[{event_sport}] {event} ({TAG})" key = f"[{event_sport}] {event} ({TAG})"
@ -156,7 +156,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
sport_urls = {sport: urljoin(BASE_URL, sport) for sport in SPORT_ENDPOINTS} sport_urls = {sport: urljoin(BASE_URL, sport) for sport in valid_sports}
events = await get_events( events = await get_events(
client, client,

View file

@ -71,17 +71,17 @@ async def get_events(
team_1_node = card.css_first(".team1 .team-name") team_1_node = card.css_first(".team1 .team-name")
team_2_node = card.css_first(".team2 .team-name") team_2_node = card.css_first(".team2 .team-name")
if team_1_node and team_2_node: if team_1_node and not team_2_node:
event = event = (
f"{team_1_node.text(strip=True)} vs {team_2_node.text(strip=True)}"
)
elif team_1_node:
event = team_1_node.text(strip=True) event = team_1_node.text(strip=True)
elif team_2_node: elif team_2_node and not team_1_node:
event = team_2_node.text(strip=True) event = team_2_node.text(strip=True)
elif team_1_node and team_2_node:
event = (
f"{team_1_node.text(strip=True)} vs {team_2_node.text(strip=True)}"
)
else: else:
continue continue

View file

@ -19,12 +19,6 @@ BASE_URL = "https://streambtw.com"
TAG = "STRMBTW" TAG = "STRMBTW"
def fix_league(s: str) -> str:
pattern = re.compile(r"^\w*-\w*", re.IGNORECASE)
return " ".join(s.split("-")) if pattern.search(s) else s
async def process_event( async def process_event(
client: httpx.AsyncClient, client: httpx.AsyncClient,
url: str, url: str,
@ -68,13 +62,13 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
if not (href := link.attrs.get("href")): if not (href := link.attrs.get("href")):
continue continue
league = card.css_first("h5.card-title").text(strip=True) sport = card.css_first("h5.card-title").text(strip=True)
name = card.css_first("p.card-text").text(strip=True) name = card.css_first("p.card-text").text(strip=True)
events.append( events.append(
{ {
"sport": fix_league(league), "sport": sport,
"event": name, "event": name,
"link": urljoin(BASE_URL, href), "link": urljoin(BASE_URL, href),
} }

View file

@ -35,21 +35,19 @@ async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | flo
events = {} events = {}
for streams in api_data.get("streams", {}).values(): for _, streams in api_data.get("streams", {}).items():
if not streams: if not streams:
continue continue
for stream in streams: for stream in streams:
sport, name = stream["league"], stream["name"] sport, name = stream["league"], stream["name"]
stream_key = stream["stream_key"]
key = f"[{sport}] {name} ({TAG})" key = f"[{sport}] {name} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, name) tvg_id, logo = leagues.get_tvg_info(sport, name)
events[key] = { events[key] = {
"url": urljoin(BASE_URL, f"live/{stream_key}720p/index.m3u8"), "url": urljoin(BASE_URL, f"live/{stream['stream_key']}720p/index.m3u8"),
"logo": logo, "logo": logo,
"base": BASE_URL, "base": BASE_URL,
"timestamp": Time.now().timestamp(), "timestamp": Time.now().timestamp(),

View file

@ -128,7 +128,8 @@ async def get_events(
if not (api_data := API_FILE.load(per_entry=False, index=-1)): if not (api_data := API_FILE.load(per_entry=False, index=-1)):
api_data = await refresh_api_cache( api_data = await refresh_api_cache(
client, urljoin(url, "api/matches/all-today") client,
urljoin(url, "api/matches/all-today"),
) )
API_FILE.write(api_data) API_FILE.write(api_data)
@ -171,8 +172,8 @@ async def get_events(
if not sources: if not sources:
continue continue
skip_types = {"admin", "alpha", "bravo"} skip_sources = {"admin", "alpha", "bravo"}
valid_sources = [d for d in sources if d.get("source") not in skip_types] valid_sources = [d for d in sources if d.get("source") not in skip_sources]
if not valid_sources: if not valid_sources:
continue continue

View file

@ -59,7 +59,10 @@ async def refresh_api_cache(
log.info("Refreshing API cache") log.info("Refreshing API cache")
tasks = [ tasks = [
get_api_data(client, urljoin(url, f"api/v1/matches/{sport}")) get_api_data(
client,
urljoin(url, f"api/v1/matches/{sport}"),
)
for sport in SPORT_ENDPOINTS for sport in SPORT_ENDPOINTS
] ]