This commit is contained in:
doms9 2026-04-07 11:26:01 -04:00
parent 7a5724f165
commit 00000d9aa1
5 changed files with 13 additions and 11 deletions

View file

@ -26,12 +26,14 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
if not (api_data := API_FILE.load(per_entry=False)):
log.info("Refreshing API cache")
api_data = {"timestamp": now.timestamp()}
if r := await network.request(
urljoin(API_URL, "api/v1/events/sports"),
log=log,
params={"user": "cdnlivetv", "plan": "free"},
):
api_data = r.json().get("cdn-live-tv", {"timestamp": now.timestamp()})
api_data = r.json().get("cdn-live-tv")
API_FILE.write(api_data)

View file

@ -13,7 +13,7 @@ TAG = "EMBEDHD"
CACHE_FILE = Cache(TAG, exp=5_400)
API_CACHE = Cache(f"{TAG}-api", exp=28_800)
API_FILE = Cache(f"{TAG}-api", exp=28_800)
BASE_URL = "https://embedhd.org"
@ -25,7 +25,7 @@ def fix_league(s: str) -> str:
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
now = Time.clean(Time.now())
if not (api_data := API_CACHE.load(per_entry=False)):
if not (api_data := API_FILE.load(per_entry=False)):
log.info("Refreshing API cache")
api_data = {"timestamp": now.timestamp()}
@ -35,7 +35,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
api_data["timestamp"] = now.timestamp()
API_CACHE.write(api_data)
API_FILE.write(api_data)
events = []

View file

@ -67,7 +67,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
if not (api_req := await network.request(API_URL, log=log)):
return events
elif not (api_data := api_req.json()):
elif not (api_data := api_req.json()) or api_data.get("error"):
return events
for event in api_data:

View file

@ -13,7 +13,7 @@ TAG = "SHARK"
CACHE_FILE = Cache(TAG, exp=10_800)
HTML_CACHE = Cache(f"{TAG}-html", exp=19_800)
HTML_FILE = Cache(f"{TAG}-html", exp=19_800)
BASE_URL = "https://sharkstreams.net"
@ -89,12 +89,12 @@ async def refresh_html_cache(now_ts: float) -> dict[str, dict[str, str | float]]
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
now = Time.clean(Time.now())
if not (events := HTML_CACHE.load()):
if not (events := HTML_FILE.load()):
log.info("Refreshing HTML cache")
events = await refresh_html_cache(now.timestamp())
HTML_CACHE.write(events)
HTML_FILE.write(events)
live = []

View file

@ -15,7 +15,7 @@ TAG = "STRMHUB"
CACHE_FILE = Cache(TAG, exp=10_800)
HTML_CACHE = Cache(f"{TAG}-html", exp=19_800)
HTML_FILE = Cache(f"{TAG}-html", exp=19_800)
BASE_URL = "https://livesports4u.net"
@ -149,7 +149,7 @@ async def refresh_html_cache(
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
now = Time.clean(Time.now())
if not (events := HTML_CACHE.load()):
if not (events := HTML_FILE.load()):
log.info("Refreshing HTML cache")
tasks = [
@ -166,7 +166,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
events = {k: v for data in results for k, v in data.items()}
HTML_CACHE.write(events)
HTML_FILE.write(events)
live = []