This commit is contained in:
doms9 2025-11-03 10:52:41 -05:00
parent e49e2586ab
commit 00000d9919
3 changed files with 15 additions and 10 deletions

View file

@ -82,7 +82,7 @@ async def get_events(
for z, stream_url in stream_urls:
if stream_link := channel_info.get(stream_url):
if pattern.search(stream_link):
key = f"[{sport}] {event_name} (PIXEL - {z})"
key = f"[{sport}] {event_name} (PIXL - {z})"
if cached_keys & {key}:
continue

View file

@ -13,12 +13,7 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str]] = {}
BASE_URL = "https://roxiestreams.cc"
SPORT_URLS = {
sport: urljoin(BASE_URL, sport.lower())
for sport in ["Soccer", "MLB", "NBA", "NFL", "Fighting", "Motorsports"]
}
MIRRORS = ["https://roxiestreams.cc", "https://roxiestreams.live"]
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "roxie.json", exp=10_800)
@ -149,11 +144,21 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"')
if not (base_url := await network.get_base(MIRRORS)):
log.warning("No working Roxie mirrors")
CACHE_FILE.write(cached_urls)
return
log.info(f'Scraping from "{base_url}"')
sport_urls = {
sport: urljoin(base_url, sport.lower())
for sport in ["Soccer", "MLB", "NBA", "NFL", "Fighting", "Motorsports"]
}
events = await get_events(
client,
SPORT_URLS,
sport_urls,
set(cached_urls.keys()),
)

View file

@ -42,7 +42,7 @@
{
"NCAA": {
"logo": "https://1000logos.net/wp-content/uploads/2021/12/NCAA-Logo-500x281.png",
"names": ["CBB", "CFB", "NCAAB", "NCAAF"]
"names": ["CBB", "CFB", "NCAAB", "NCAAF", "NCAAB D-I", "NCAAF D-I"]
}
}
],