mirror of
https://github.com/doms9/iptv.git
synced 2026-04-21 19:46:59 +02:00
e
This commit is contained in:
parent
7a5724f165
commit
00000d9aa1
5 changed files with 13 additions and 11 deletions
|
|
@ -26,12 +26,14 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||||
if not (api_data := API_FILE.load(per_entry=False)):
|
if not (api_data := API_FILE.load(per_entry=False)):
|
||||||
log.info("Refreshing API cache")
|
log.info("Refreshing API cache")
|
||||||
|
|
||||||
|
api_data = {"timestamp": now.timestamp()}
|
||||||
|
|
||||||
if r := await network.request(
|
if r := await network.request(
|
||||||
urljoin(API_URL, "api/v1/events/sports"),
|
urljoin(API_URL, "api/v1/events/sports"),
|
||||||
log=log,
|
log=log,
|
||||||
params={"user": "cdnlivetv", "plan": "free"},
|
params={"user": "cdnlivetv", "plan": "free"},
|
||||||
):
|
):
|
||||||
api_data = r.json().get("cdn-live-tv", {"timestamp": now.timestamp()})
|
api_data = r.json().get("cdn-live-tv")
|
||||||
|
|
||||||
API_FILE.write(api_data)
|
API_FILE.write(api_data)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ TAG = "EMBEDHD"
|
||||||
|
|
||||||
CACHE_FILE = Cache(TAG, exp=5_400)
|
CACHE_FILE = Cache(TAG, exp=5_400)
|
||||||
|
|
||||||
API_CACHE = Cache(f"{TAG}-api", exp=28_800)
|
API_FILE = Cache(f"{TAG}-api", exp=28_800)
|
||||||
|
|
||||||
BASE_URL = "https://embedhd.org"
|
BASE_URL = "https://embedhd.org"
|
||||||
|
|
||||||
|
|
@ -25,7 +25,7 @@ def fix_league(s: str) -> str:
|
||||||
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||||
now = Time.clean(Time.now())
|
now = Time.clean(Time.now())
|
||||||
|
|
||||||
if not (api_data := API_CACHE.load(per_entry=False)):
|
if not (api_data := API_FILE.load(per_entry=False)):
|
||||||
log.info("Refreshing API cache")
|
log.info("Refreshing API cache")
|
||||||
|
|
||||||
api_data = {"timestamp": now.timestamp()}
|
api_data = {"timestamp": now.timestamp()}
|
||||||
|
|
@ -35,7 +35,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||||
|
|
||||||
api_data["timestamp"] = now.timestamp()
|
api_data["timestamp"] = now.timestamp()
|
||||||
|
|
||||||
API_CACHE.write(api_data)
|
API_FILE.write(api_data)
|
||||||
|
|
||||||
events = []
|
events = []
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -67,7 +67,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||||
if not (api_req := await network.request(API_URL, log=log)):
|
if not (api_req := await network.request(API_URL, log=log)):
|
||||||
return events
|
return events
|
||||||
|
|
||||||
elif not (api_data := api_req.json()):
|
elif not (api_data := api_req.json()) or api_data.get("error"):
|
||||||
return events
|
return events
|
||||||
|
|
||||||
for event in api_data:
|
for event in api_data:
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ TAG = "SHARK"
|
||||||
|
|
||||||
CACHE_FILE = Cache(TAG, exp=10_800)
|
CACHE_FILE = Cache(TAG, exp=10_800)
|
||||||
|
|
||||||
HTML_CACHE = Cache(f"{TAG}-html", exp=19_800)
|
HTML_FILE = Cache(f"{TAG}-html", exp=19_800)
|
||||||
|
|
||||||
BASE_URL = "https://sharkstreams.net"
|
BASE_URL = "https://sharkstreams.net"
|
||||||
|
|
||||||
|
|
@ -89,12 +89,12 @@ async def refresh_html_cache(now_ts: float) -> dict[str, dict[str, str | float]]
|
||||||
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||||
now = Time.clean(Time.now())
|
now = Time.clean(Time.now())
|
||||||
|
|
||||||
if not (events := HTML_CACHE.load()):
|
if not (events := HTML_FILE.load()):
|
||||||
log.info("Refreshing HTML cache")
|
log.info("Refreshing HTML cache")
|
||||||
|
|
||||||
events = await refresh_html_cache(now.timestamp())
|
events = await refresh_html_cache(now.timestamp())
|
||||||
|
|
||||||
HTML_CACHE.write(events)
|
HTML_FILE.write(events)
|
||||||
|
|
||||||
live = []
|
live = []
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ TAG = "STRMHUB"
|
||||||
|
|
||||||
CACHE_FILE = Cache(TAG, exp=10_800)
|
CACHE_FILE = Cache(TAG, exp=10_800)
|
||||||
|
|
||||||
HTML_CACHE = Cache(f"{TAG}-html", exp=19_800)
|
HTML_FILE = Cache(f"{TAG}-html", exp=19_800)
|
||||||
|
|
||||||
BASE_URL = "https://livesports4u.net"
|
BASE_URL = "https://livesports4u.net"
|
||||||
|
|
||||||
|
|
@ -149,7 +149,7 @@ async def refresh_html_cache(
|
||||||
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||||
now = Time.clean(Time.now())
|
now = Time.clean(Time.now())
|
||||||
|
|
||||||
if not (events := HTML_CACHE.load()):
|
if not (events := HTML_FILE.load()):
|
||||||
log.info("Refreshing HTML cache")
|
log.info("Refreshing HTML cache")
|
||||||
|
|
||||||
tasks = [
|
tasks = [
|
||||||
|
|
@ -166,7 +166,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||||
|
|
||||||
events = {k: v for data in results for k, v in data.items()}
|
events = {k: v for data in results for k, v in data.items()}
|
||||||
|
|
||||||
HTML_CACHE.write(events)
|
HTML_FILE.write(events)
|
||||||
|
|
||||||
live = []
|
live = []
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue