mirror of
https://github.com/doms9/iptv.git
synced 2026-01-21 03:59:03 +01:00
e
remove nfl webcast (cloudflare protection added)
This commit is contained in:
parent
168fb9171f
commit
00000d9e80
1 changed files with 6 additions and 10 deletions
|
|
@ -16,17 +16,17 @@ CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
|
||||||
|
|
||||||
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=86_400)
|
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=86_400)
|
||||||
|
|
||||||
BASE_URLS = {"NFL": "https://nflwebcast.com", "NHL": "https://slapstreams.com"}
|
BASE_URL = "https://slapstreams.com"
|
||||||
|
|
||||||
|
|
||||||
def fix_event(s: str) -> str:
|
def fix_event(s: str) -> str:
|
||||||
return " vs ".join(s.split("@"))
|
return " vs ".join(s.split("@"))
|
||||||
|
|
||||||
|
|
||||||
async def refresh_html_cache(url: str) -> dict[str, dict[str, str | float]]:
|
async def refresh_html_cache() -> dict[str, dict[str, str | float]]:
|
||||||
events = {}
|
events = {}
|
||||||
|
|
||||||
if not (html_data := await network.request(url, log=log)):
|
if not (html_data := await network.request(BASE_URL, log=log)):
|
||||||
return events
|
return events
|
||||||
|
|
||||||
now = Time.clean(Time.now())
|
now = Time.clean(Time.now())
|
||||||
|
|
@ -85,11 +85,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||||
if not (events := HTML_CACHE.load()):
|
if not (events := HTML_CACHE.load()):
|
||||||
log.info("Refreshing HTML cache")
|
log.info("Refreshing HTML cache")
|
||||||
|
|
||||||
tasks = [refresh_html_cache(url) for url in BASE_URLS.values()]
|
events = await refresh_html_cache()
|
||||||
|
|
||||||
results = await asyncio.gather(*tasks)
|
|
||||||
|
|
||||||
events = {k: v for data in results for k, v in data.items()}
|
|
||||||
|
|
||||||
HTML_CACHE.write(events)
|
HTML_CACHE.write(events)
|
||||||
|
|
||||||
|
|
@ -119,7 +115,7 @@ async def scrape() -> None:
|
||||||
|
|
||||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"')
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
|
|
||||||
events = await get_events(cached_urls.keys())
|
events = await get_events(cached_urls.keys())
|
||||||
|
|
||||||
|
|
@ -161,7 +157,7 @@ async def scrape() -> None:
|
||||||
entry = {
|
entry = {
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": BASE_URLS[sport],
|
"base": BASE_URL,
|
||||||
"timestamp": ts,
|
"timestamp": ts,
|
||||||
"id": tvg_id or "Live.Event.us",
|
"id": tvg_id or "Live.Event.us",
|
||||||
"link": link,
|
"link": link,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue