remove nfl webcast (cloudflare protection added)
This commit is contained in:
doms9 2025-12-29 01:54:07 -05:00
parent 168fb9171f
commit 00000d9e80

View file

@ -16,17 +16,17 @@ CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=86_400)
BASE_URLS = {"NFL": "https://nflwebcast.com", "NHL": "https://slapstreams.com"}
BASE_URL = "https://slapstreams.com"
def fix_event(s: str) -> str:
return " vs ".join(s.split("@"))
async def refresh_html_cache(url: str) -> dict[str, dict[str, str | float]]:
async def refresh_html_cache() -> dict[str, dict[str, str | float]]:
events = {}
if not (html_data := await network.request(url, log=log)):
if not (html_data := await network.request(BASE_URL, log=log)):
return events
now = Time.clean(Time.now())
@ -85,11 +85,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
if not (events := HTML_CACHE.load()):
log.info("Refreshing HTML cache")
tasks = [refresh_html_cache(url) for url in BASE_URLS.values()]
results = await asyncio.gather(*tasks)
events = {k: v for data in results for k, v in data.items()}
events = await refresh_html_cache()
HTML_CACHE.write(events)
@ -119,7 +115,7 @@ async def scrape() -> None:
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"')
log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys())
@ -161,7 +157,7 @@ async def scrape() -> None:
entry = {
"url": url,
"logo": logo,
"base": BASE_URLS[sport],
"base": BASE_URL,
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
"link": link,