e
This commit is contained in:
parent
760aeb9174
commit
00000d9799
5 changed files with 19 additions and 14 deletions
|
|
@ -131,6 +131,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
|
now = Time.now().timestamp()
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
match_name, url = await network.safe_process(
|
match_name, url = await network.safe_process(
|
||||||
lambda: process_event(
|
lambda: process_event(
|
||||||
|
|
@ -155,7 +157,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": base_url,
|
"base": base_url,
|
||||||
"timestamp": Time.now().timestamp(),
|
"timestamp": now,
|
||||||
"id": tvg_id or "Live.Event.us",
|
"id": tvg_id or "Live.Event.us",
|
||||||
"href": ev["href"],
|
"href": ev["href"],
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -210,7 +210,6 @@ async def get_events(
|
||||||
events: list[dict[str, str]] = []
|
events: list[dict[str, str]] = []
|
||||||
|
|
||||||
now = Time.now()
|
now = Time.now()
|
||||||
|
|
||||||
start_dt = now.delta(minutes=-30)
|
start_dt = now.delta(minutes=-30)
|
||||||
end_dt = now.delta(minutes=30)
|
end_dt = now.delta(minutes=30)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -126,14 +126,17 @@ async def get_events(
|
||||||
base_url: str,
|
base_url: str,
|
||||||
cached_keys: set[str],
|
cached_keys: set[str],
|
||||||
) -> list[dict[str, str]]:
|
) -> list[dict[str, str]]:
|
||||||
|
|
||||||
events: list[dict[str, str]] = []
|
|
||||||
|
|
||||||
if not (api_data := API_FILE.load(nearest_hr=True, per_entry=False)):
|
if not (api_data := API_FILE.load(nearest_hr=True, per_entry=False)):
|
||||||
api_data = await refresh_api_cache(client, urljoin(base_url, "api/streams"))
|
api_data = await refresh_api_cache(client, urljoin(base_url, "api/streams"))
|
||||||
|
|
||||||
API_FILE.write(api_data)
|
API_FILE.write(api_data)
|
||||||
|
|
||||||
|
events: list[dict[str, str]] = []
|
||||||
|
|
||||||
|
now = Time.now()
|
||||||
|
start_dt = now.delta(minutes=-30)
|
||||||
|
end_dt = now.delta(minutes=30)
|
||||||
|
|
||||||
for stream_group in api_data["streams"]:
|
for stream_group in api_data["streams"]:
|
||||||
sport = stream_group["category"]
|
sport = stream_group["category"]
|
||||||
|
|
||||||
|
|
@ -141,10 +144,9 @@ async def get_events(
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for event in stream_group["streams"]:
|
for event in stream_group["streams"]:
|
||||||
name, start_ts, end_ts, logo, uri_name = (
|
name, start_ts, logo, uri_name = (
|
||||||
event["name"],
|
event["name"],
|
||||||
event["starts_at"],
|
event["starts_at"],
|
||||||
event["ends_at"],
|
|
||||||
event["poster"],
|
event["poster"],
|
||||||
event["uri_name"],
|
event["uri_name"],
|
||||||
)
|
)
|
||||||
|
|
@ -154,11 +156,9 @@ async def get_events(
|
||||||
if cached_keys & {key}:
|
if cached_keys & {key}:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
start_dt = Time.from_ts(start_ts).delta(minutes=-30)
|
event_dt = Time.from_ts(start_ts)
|
||||||
|
|
||||||
end_dt = Time.from_ts(end_ts).delta(minutes=30)
|
if not start_dt <= event_dt < end_dt:
|
||||||
|
|
||||||
if not start_dt <= Time.now() < end_dt:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
events.append(
|
events.append(
|
||||||
|
|
@ -167,7 +167,7 @@ async def get_events(
|
||||||
"event": name,
|
"event": name,
|
||||||
"link": urljoin(base_url, f"live/{uri_name}"),
|
"link": urljoin(base_url, f"live/{uri_name}"),
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"timestamp": start_dt.timestamp(),
|
"timestamp": event_dt.timestamp(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -86,6 +86,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
|
now = Time.now().timestamp()
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
url = await network.safe_process(
|
url = await network.safe_process(
|
||||||
lambda: process_event(client, url=ev["link"], url_num=i),
|
lambda: process_event(client, url=ev["link"], url_num=i),
|
||||||
|
|
@ -105,7 +107,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": BASE_URL,
|
"base": BASE_URL,
|
||||||
"timestamp": Time.now().timestamp(),
|
"timestamp": now,
|
||||||
"id": tvg_id or "Live.Event.us",
|
"id": tvg_id or "Live.Event.us",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -33,6 +33,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
log.info(f'Scraping from "{BASE_URL}"')
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
|
|
||||||
|
now = Time.now().timestamp()
|
||||||
|
|
||||||
if not (data := await fetch_m3u8(client)):
|
if not (data := await fetch_m3u8(client)):
|
||||||
log.warning("No M3U8 data received")
|
log.warning("No M3U8 data received")
|
||||||
return
|
return
|
||||||
|
|
@ -62,7 +64,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"id": tvg_id or "Live.Event.us",
|
"id": tvg_id or "Live.Event.us",
|
||||||
"base": "https://tvpass.org",
|
"base": "https://tvpass.org",
|
||||||
"timestamp": Time.now().timestamp(),
|
"timestamp": now,
|
||||||
}
|
}
|
||||||
|
|
||||||
urls[key] = entry
|
urls[key] = entry
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue