This commit is contained in:
doms9 2025-11-03 10:52:41 -05:00
parent e49e2586ab
commit 00000d9919
3 changed files with 15 additions and 10 deletions

View file

@ -13,12 +13,7 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str]] = {}
BASE_URL = "https://roxiestreams.cc"
SPORT_URLS = {
sport: urljoin(BASE_URL, sport.lower())
for sport in ["Soccer", "MLB", "NBA", "NFL", "Fighting", "Motorsports"]
}
MIRRORS = ["https://roxiestreams.cc", "https://roxiestreams.live"]
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "roxie.json", exp=10_800)
@ -149,11 +144,21 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"')
if not (base_url := await network.get_base(MIRRORS)):
log.warning("No working Roxie mirrors")
CACHE_FILE.write(cached_urls)
return
log.info(f'Scraping from "{base_url}"')
sport_urls = {
sport: urljoin(base_url, sport.lower())
for sport in ["Soccer", "MLB", "NBA", "NFL", "Fighting", "Motorsports"]
}
events = await get_events(
client,
SPORT_URLS,
sport_urls,
set(cached_urls.keys()),
)