e
This commit is contained in:
parent
5a61e2a8d5
commit
00000d9db0
11 changed files with 809 additions and 162 deletions
|
|
@ -1,8 +1,4 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
|
|
@ -40,6 +36,50 @@ MIRRORS = [
|
|||
"https://freeppv.fun",
|
||||
]
|
||||
|
||||
NFL_TEAMS = {
|
||||
"Arizona Cardinals",
|
||||
"Atlanta Falcons",
|
||||
"Baltimore Ravens",
|
||||
"Buffalo Bills",
|
||||
"Carolina Panthers",
|
||||
"Chicago Bears",
|
||||
"Cincinnati Bengals",
|
||||
"Cleveland Browns",
|
||||
"Dallas Cowboys",
|
||||
"Denver Broncos",
|
||||
"Detroit Lions",
|
||||
"Green Bay Packers",
|
||||
"Houston Texans",
|
||||
"Indianapolis Colts",
|
||||
"Jacksonville Jaguars",
|
||||
"Kansas City Chiefs",
|
||||
"Las Vegas Raiders",
|
||||
"Los Angeles Chargers",
|
||||
"Los Angeles Rams",
|
||||
"Miami Dolphins",
|
||||
"Minnesota Vikings",
|
||||
"New England Patriots",
|
||||
"New Orleans Saints",
|
||||
"New York Giants",
|
||||
"New York Jets",
|
||||
"Philadelphia Eagles",
|
||||
"Pittsburgh Steelers",
|
||||
"San Francisco 49ers",
|
||||
"Seattle Seahawks",
|
||||
"Tampa Bay Buccaneers",
|
||||
"Tennessee Titans",
|
||||
"Washington Redskins",
|
||||
}
|
||||
|
||||
|
||||
def is_nfl(event: str) -> bool:
|
||||
try:
|
||||
t1, t2 = event.split(" vs. ")
|
||||
|
||||
return t1 in NFL_TEAMS or t2 in NFL_TEAMS
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient, url: str
|
||||
|
|
@ -110,14 +150,12 @@ async def process_event(url: str, url_num: int) -> str | None:
|
|||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
api_url: str,
|
||||
base_url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
events: list[dict[str, str]] = []
|
||||
|
||||
base_url = re.match(r"(https?://.+?)/", api_url)[1]
|
||||
|
||||
if not (
|
||||
api_data := load_cache(
|
||||
API_FILE,
|
||||
|
|
@ -126,8 +164,9 @@ async def get_events(
|
|||
per_entry=False,
|
||||
)
|
||||
):
|
||||
api_data = await refresh_api_cache(client, api_url)
|
||||
API_FILE.write_text(json.dumps(api_data, indent=2), encoding="utf-8")
|
||||
api_data = await refresh_api_cache(client, urljoin(base_url, "api/streams"))
|
||||
|
||||
write_cache(API_FILE, api_data)
|
||||
|
||||
for stream_group in api_data["streams"]:
|
||||
sport = stream_group["category"]
|
||||
|
|
@ -168,8 +207,8 @@ async def get_events(
|
|||
return events
|
||||
|
||||
|
||||
async def main(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = load_cache(CACHE_FILE, exp=10800)
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
|
|
@ -184,7 +223,7 @@ async def main(client: httpx.AsyncClient) -> None:
|
|||
|
||||
events = await get_events(
|
||||
client,
|
||||
urljoin(base_url, "api/streams"),
|
||||
base_url,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
|
|
@ -200,6 +239,11 @@ async def main(client: httpx.AsyncClient) -> None:
|
|||
if url:
|
||||
sport, event = ev["sport"], ev["event"]
|
||||
|
||||
if sport == "American Football":
|
||||
tvg_id = "NFL.Dummy.us" if is_nfl(event) else "NCAA.Sports.Dummy.us"
|
||||
else:
|
||||
tvg_id = league_info(sport)[0]
|
||||
|
||||
key = f"[{sport}] {event} (PPV)"
|
||||
|
||||
entry = {
|
||||
|
|
@ -207,7 +251,7 @@ async def main(client: httpx.AsyncClient) -> None:
|
|||
"logo": ev["logo"],
|
||||
"base": base_url,
|
||||
"timestamp": now.timestamp(),
|
||||
"id": league_info(sport)["id"],
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue