mirror of
https://github.com/doms9/iptv.git
synced 2026-01-20 03:49:03 +01:00
e
edit window for embedhd.py fix links from ppv.py
This commit is contained in:
parent
438c182753
commit
00000d9bd8
2 changed files with 7 additions and 17 deletions
|
|
@ -37,7 +37,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||||
API_CACHE.write(api_data)
|
API_CACHE.write(api_data)
|
||||||
|
|
||||||
events = []
|
events = []
|
||||||
start_dt = now.delta(minutes=-30)
|
start_dt = now.delta(hours=-1)
|
||||||
end_dt = now.delta(hours=3)
|
end_dt = now.delta(hours=3)
|
||||||
|
|
||||||
for info in api_data.get("days", []):
|
for info in api_data.get("days", []):
|
||||||
|
|
|
||||||
|
|
@ -14,20 +14,14 @@ CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
|
||||||
|
|
||||||
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=19_800)
|
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=19_800)
|
||||||
|
|
||||||
API_MIRRORS = [
|
MIRRORS = [
|
||||||
"https://old.ppv.to/api/streams",
|
"https://old.ppv.to/api/streams",
|
||||||
"https://api.ppvs.su/api/streams",
|
"https://api.ppvs.su/api/streams",
|
||||||
"https://api.ppv.to/api/streams",
|
"https://api.ppv.to/api/streams",
|
||||||
]
|
]
|
||||||
|
|
||||||
BASE_MIRRORS = [
|
|
||||||
"https://old.ppv.to",
|
|
||||||
"https://ppvs.su",
|
|
||||||
"https://ppv.to",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
async def get_events(url: str, cached_keys: list[str]) -> list[dict[str, str]]:
|
||||||
async def get_events(api_url: str, cached_keys: list[str]) -> list[dict[str, str]]:
|
|
||||||
now = Time.clean(Time.now())
|
now = Time.clean(Time.now())
|
||||||
|
|
||||||
if not (api_data := API_FILE.load(per_entry=False)):
|
if not (api_data := API_FILE.load(per_entry=False)):
|
||||||
|
|
@ -35,7 +29,7 @@ async def get_events(api_url: str, cached_keys: list[str]) -> list[dict[str, str
|
||||||
|
|
||||||
api_data = {"timestamp": now.timestamp()}
|
api_data = {"timestamp": now.timestamp()}
|
||||||
|
|
||||||
if r := await network.request(api_url, log=log):
|
if r := await network.request(url, log=log):
|
||||||
api_data: dict = r.json()
|
api_data: dict = r.json()
|
||||||
|
|
||||||
API_FILE.write(api_data)
|
API_FILE.write(api_data)
|
||||||
|
|
@ -93,11 +87,7 @@ async def scrape() -> None:
|
||||||
|
|
||||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
base_url = await network.get_base(BASE_MIRRORS)
|
if not (base_url := await network.get_base(MIRRORS)):
|
||||||
|
|
||||||
api_url = await network.get_base(API_MIRRORS)
|
|
||||||
|
|
||||||
if not (base_url and api_url):
|
|
||||||
log.warning("No working PPV mirrors")
|
log.warning("No working PPV mirrors")
|
||||||
|
|
||||||
CACHE_FILE.write(cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
|
|
@ -106,7 +96,7 @@ async def scrape() -> None:
|
||||||
|
|
||||||
log.info(f'Scraping from "{base_url}"')
|
log.info(f'Scraping from "{base_url}"')
|
||||||
|
|
||||||
events = await get_events(api_url, cached_urls.keys())
|
events = await get_events(base_url, cached_urls.keys())
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
|
|
@ -148,7 +138,7 @@ async def scrape() -> None:
|
||||||
entry = {
|
entry = {
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo or pic,
|
"logo": logo or pic,
|
||||||
"base": base_url,
|
"base": link,
|
||||||
"timestamp": ts,
|
"timestamp": ts,
|
||||||
"id": tvg_id or "Live.Event.us",
|
"id": tvg_id or "Live.Event.us",
|
||||||
"link": link,
|
"link": link,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue