Compare commits
No commits in common. "52a81ec34ead0e39a77e5683c490dc3a853b2c6a" and "a7042a45609c5d307e6c1cd6a8a64e451a28cb2c" have entirely different histories.
52a81ec34e
...
a7042a4560
10 changed files with 525 additions and 520 deletions
492
M3U8/TV.m3u8
492
M3U8/TV.m3u8
File diff suppressed because it is too large
Load diff
492
M3U8/events.m3u8
492
M3U8/events.m3u8
File diff suppressed because it is too large
Load diff
|
|
@ -49,7 +49,10 @@ async def get_events() -> dict[str, dict[str, str | float]]:
|
|||
|
||||
events = {}
|
||||
|
||||
pattern = re.compile(r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?", re.IGNORECASE)
|
||||
pattern = re.compile(
|
||||
r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
for event in api_data.get("events", []):
|
||||
event_dt = Time.from_str(event["date"], timezone="UTC")
|
||||
|
|
|
|||
|
|
@ -41,7 +41,13 @@ async def get_events(
|
|||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
if not (api_data := API_FILE.load(per_entry=False)):
|
||||
api_data = await refresh_api_cache(client, urljoin(BASE_URL, "api/streams"))
|
||||
api_data = await refresh_api_cache(
|
||||
client,
|
||||
urljoin(
|
||||
BASE_URL,
|
||||
"api/streams",
|
||||
),
|
||||
)
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ HTML_CACHE = Cache("roxie-html.json", exp=19_800)
|
|||
|
||||
BASE_URL = "https://roxiestreams.live"
|
||||
|
||||
SPORT_ENDPOINTS = {
|
||||
valid_sports = {
|
||||
"fighting": "Fighting",
|
||||
"mlb": "MLB",
|
||||
"motorsports": "Racing",
|
||||
|
|
@ -90,7 +90,7 @@ async def refresh_html_cache(
|
|||
|
||||
event_dt = Time.from_str(data_start, timezone="PST")
|
||||
|
||||
event_sport = SPORT_ENDPOINTS[sport]
|
||||
event_sport = valid_sports[sport]
|
||||
|
||||
key = f"[{event_sport}] {event} ({TAG})"
|
||||
|
||||
|
|
@ -156,7 +156,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
sport_urls = {sport: urljoin(BASE_URL, sport) for sport in SPORT_ENDPOINTS}
|
||||
sport_urls = {sport: urljoin(BASE_URL, sport) for sport in valid_sports}
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
|
|
|
|||
|
|
@ -71,17 +71,17 @@ async def get_events(
|
|||
team_1_node = card.css_first(".team1 .team-name")
|
||||
team_2_node = card.css_first(".team2 .team-name")
|
||||
|
||||
if team_1_node and team_2_node:
|
||||
event = event = (
|
||||
f"{team_1_node.text(strip=True)} vs {team_2_node.text(strip=True)}"
|
||||
)
|
||||
|
||||
elif team_1_node:
|
||||
if team_1_node and not team_2_node:
|
||||
event = team_1_node.text(strip=True)
|
||||
|
||||
elif team_2_node:
|
||||
elif team_2_node and not team_1_node:
|
||||
event = team_2_node.text(strip=True)
|
||||
|
||||
elif team_1_node and team_2_node:
|
||||
event = (
|
||||
f"{team_1_node.text(strip=True)} vs {team_2_node.text(strip=True)}"
|
||||
)
|
||||
|
||||
else:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -19,12 +19,6 @@ BASE_URL = "https://streambtw.com"
|
|||
TAG = "STRMBTW"
|
||||
|
||||
|
||||
def fix_league(s: str) -> str:
|
||||
pattern = re.compile(r"^\w*-\w*", re.IGNORECASE)
|
||||
|
||||
return " ".join(s.split("-")) if pattern.search(s) else s
|
||||
|
||||
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
|
|
@ -68,13 +62,13 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
|
|||
if not (href := link.attrs.get("href")):
|
||||
continue
|
||||
|
||||
league = card.css_first("h5.card-title").text(strip=True)
|
||||
sport = card.css_first("h5.card-title").text(strip=True)
|
||||
|
||||
name = card.css_first("p.card-text").text(strip=True)
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": fix_league(league),
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": urljoin(BASE_URL, href),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,21 +35,19 @@ async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | flo
|
|||
|
||||
events = {}
|
||||
|
||||
for streams in api_data.get("streams", {}).values():
|
||||
for _, streams in api_data.get("streams", {}).items():
|
||||
if not streams:
|
||||
continue
|
||||
|
||||
for stream in streams:
|
||||
sport, name = stream["league"], stream["name"]
|
||||
|
||||
stream_key = stream["stream_key"]
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, name)
|
||||
|
||||
events[key] = {
|
||||
"url": urljoin(BASE_URL, f"live/{stream_key}720p/index.m3u8"),
|
||||
"url": urljoin(BASE_URL, f"live/{stream['stream_key']}720p/index.m3u8"),
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": Time.now().timestamp(),
|
||||
|
|
|
|||
|
|
@ -128,7 +128,8 @@ async def get_events(
|
|||
|
||||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||
api_data = await refresh_api_cache(
|
||||
client, urljoin(url, "api/matches/all-today")
|
||||
client,
|
||||
urljoin(url, "api/matches/all-today"),
|
||||
)
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
|
@ -171,8 +172,8 @@ async def get_events(
|
|||
if not sources:
|
||||
continue
|
||||
|
||||
skip_types = {"admin", "alpha", "bravo"}
|
||||
valid_sources = [d for d in sources if d.get("source") not in skip_types]
|
||||
skip_sources = {"admin", "alpha", "bravo"}
|
||||
valid_sources = [d for d in sources if d.get("source") not in skip_sources]
|
||||
|
||||
if not valid_sources:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -59,7 +59,10 @@ async def refresh_api_cache(
|
|||
log.info("Refreshing API cache")
|
||||
|
||||
tasks = [
|
||||
get_api_data(client, urljoin(url, f"api/v1/matches/{sport}"))
|
||||
get_api_data(
|
||||
client,
|
||||
urljoin(url, f"api/v1/matches/{sport}"),
|
||||
)
|
||||
for sport in SPORT_ENDPOINTS
|
||||
]
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue