e
This commit is contained in:
parent
acb19b7886
commit
00000d9990
10 changed files with 718 additions and 747 deletions
|
|
@ -5,7 +5,7 @@ from urllib.parse import urljoin
|
|||
import httpx
|
||||
from selectolax.parser import HTMLParser, Node
|
||||
|
||||
from .utils import get_base, get_logger, league_info
|
||||
from .utils import get_base, get_logger, leagues
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
|
@ -111,7 +111,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
|
||||
key = f"[{sport}] {event} (S{i})"
|
||||
|
||||
tvg_id, logo = league_info(sport)
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
entry = {
|
||||
"url": link,
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from selectolax.parser import HTMLParser
|
|||
from .utils import (
|
||||
get_base,
|
||||
get_logger,
|
||||
league_info,
|
||||
leagues,
|
||||
load_cache,
|
||||
now,
|
||||
safe_process_event,
|
||||
|
|
@ -157,7 +157,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
f"[{sport}] {match_name} (FSTV)" if match_name else f"[{sport}] (FSTV)"
|
||||
)
|
||||
|
||||
tvg_id, logo = league_info(sport)
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from .utils import (
|
|||
TZ,
|
||||
capture_req,
|
||||
get_logger,
|
||||
league_info,
|
||||
leagues,
|
||||
load_cache,
|
||||
new_browser,
|
||||
now,
|
||||
|
|
@ -308,10 +308,10 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
|
||||
key = f"[{sport}: {event}] {title} (LTVSX)"
|
||||
|
||||
tvg_id, logo = league_info(event)
|
||||
tvg_id, logo = leagues.info(event)
|
||||
|
||||
if not tvg_id:
|
||||
tvg_id, logo = league_info(sport)
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ from .utils import (
|
|||
capture_req,
|
||||
get_base,
|
||||
get_logger,
|
||||
league_info,
|
||||
leagues,
|
||||
load_cache,
|
||||
new_browser,
|
||||
now,
|
||||
|
|
@ -36,49 +36,30 @@ MIRRORS = [
|
|||
"https://freeppv.fun",
|
||||
]
|
||||
|
||||
NFL_TEAMS = {
|
||||
"Arizona Cardinals",
|
||||
"Atlanta Falcons",
|
||||
"Baltimore Ravens",
|
||||
"Buffalo Bills",
|
||||
"Carolina Panthers",
|
||||
"Chicago Bears",
|
||||
"Cincinnati Bengals",
|
||||
"Cleveland Browns",
|
||||
"Dallas Cowboys",
|
||||
"Denver Broncos",
|
||||
"Detroit Lions",
|
||||
"Green Bay Packers",
|
||||
"Houston Texans",
|
||||
"Indianapolis Colts",
|
||||
"Jacksonville Jaguars",
|
||||
"Kansas City Chiefs",
|
||||
"Las Vegas Raiders",
|
||||
"Los Angeles Chargers",
|
||||
"Los Angeles Rams",
|
||||
"Miami Dolphins",
|
||||
"Minnesota Vikings",
|
||||
"New England Patriots",
|
||||
"New Orleans Saints",
|
||||
"New York Giants",
|
||||
"New York Jets",
|
||||
"Philadelphia Eagles",
|
||||
"Pittsburgh Steelers",
|
||||
"San Francisco 49ers",
|
||||
"Seattle Seahawks",
|
||||
"Tampa Bay Buccaneers",
|
||||
"Tennessee Titans",
|
||||
"Washington Commanders",
|
||||
}
|
||||
|
||||
def get_tvg(sport: str, event: str) -> str | None:
|
||||
match sport:
|
||||
case "American Football":
|
||||
if leagues.is_valid(event, "NFL"):
|
||||
return "NFL.Dummy.us"
|
||||
|
||||
def is_nfl(event: str) -> bool:
|
||||
try:
|
||||
t1, t2 = event.split(" vs. ")
|
||||
else:
|
||||
return "NCAA.Sports.Dummy.us"
|
||||
|
||||
return t1 in NFL_TEAMS or t2 in NFL_TEAMS
|
||||
except ValueError:
|
||||
return event.lower() == "nfl redzone"
|
||||
case "Basketball":
|
||||
if leagues.is_valid(event, "NBA"):
|
||||
return "NBA.Basketball.Dummy.us"
|
||||
|
||||
elif leagues.is_valid(event, "WNBA"):
|
||||
return "WNBA.dummy.us"
|
||||
|
||||
# NCAA
|
||||
|
||||
else:
|
||||
return "Basketball.Dummy.us"
|
||||
|
||||
case _:
|
||||
return
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
|
|
@ -239,11 +220,6 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
if url:
|
||||
sport, event = ev["sport"], ev["event"]
|
||||
|
||||
if sport == "American Football":
|
||||
tvg_id = "NFL.Dummy.us" if is_nfl(event) else "NCAA.Sports.Dummy.us"
|
||||
else:
|
||||
tvg_id = league_info(sport)[0]
|
||||
|
||||
key = f"[{sport}] {event} (PPV)"
|
||||
|
||||
entry = {
|
||||
|
|
@ -251,7 +227,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
"logo": ev["logo"],
|
||||
"base": base_url,
|
||||
"timestamp": now.timestamp(),
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"id": get_tvg(sport, event) or "Live.Event.us",
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
|
|
|||
|
|
@ -5,14 +5,7 @@ from urllib.parse import urljoin
|
|||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import (
|
||||
get_logger,
|
||||
league_info,
|
||||
load_cache,
|
||||
now,
|
||||
safe_process_event,
|
||||
write_cache,
|
||||
)
|
||||
from .utils import get_logger, leagues, load_cache, now, safe_process_event, write_cache
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
|
@ -108,7 +101,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
|
||||
key = f"[{sport}] {event} (SBTW)"
|
||||
|
||||
tvg_id, logo = league_info(sport)
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from .utils import (
|
|||
capture_req,
|
||||
get_base,
|
||||
get_logger,
|
||||
league_info,
|
||||
leagues,
|
||||
load_cache,
|
||||
new_browser,
|
||||
now,
|
||||
|
|
@ -184,9 +184,13 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
if url:
|
||||
sport, event = ev["sport"], ev["event"]
|
||||
|
||||
key = f"[{sport}] {event} (SEAST)"
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
tvg_id, logo = league_info(sport)
|
||||
if sport == "NBA" and leagues.is_valid(event, "WNBA"):
|
||||
sport, tvg_id = "WNBA", "WNBA.dummy.us"
|
||||
logo = leagues.info("WNBA")[1]
|
||||
|
||||
key = f"[{sport}] {event} (SEAST)"
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from pathlib import Path
|
|||
|
||||
import httpx
|
||||
|
||||
from .utils import get_logger, league_info, load_cache, now, write_cache
|
||||
from .utils import get_logger, leagues, load_cache, now, write_cache
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
|
@ -54,7 +54,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
|
||||
channel = url.split("/")[-2]
|
||||
|
||||
tvg_id, logo = league_info(sport)
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
entry = {
|
||||
"url": f"http://origin.thetvapp.to/hls/{channel}/mono.m3u8",
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from .cache import load_cache, write_cache
|
||||
from .config import TZ, league_info, now
|
||||
from .config import TZ, leagues, now
|
||||
from .logger import get_logger
|
||||
from .network import CLIENT, UA, capture_req, get_base, new_browser, safe_process_event
|
||||
|
||||
|
|
@ -10,7 +10,7 @@ __all__ = [
|
|||
"capture_req",
|
||||
"get_base",
|
||||
"get_logger",
|
||||
"league_info",
|
||||
"leagues",
|
||||
"load_cache",
|
||||
"new_browser",
|
||||
"now",
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
|
|
@ -12,26 +13,40 @@ live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png"
|
|||
|
||||
leagues_file = Path(__file__).parent / "leagues.json"
|
||||
|
||||
LEAGUES: dict[str, dict[str, str]] = json.loads(
|
||||
leagues_file.read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
class Leagues:
|
||||
def __init__(self) -> None:
|
||||
self.data = json.loads(leagues_file.read_text(encoding="utf-8"))
|
||||
|
||||
def teams(self, league: str) -> list[str]:
|
||||
return self.data["teams"].get(league, [])
|
||||
|
||||
def info(self, name: str) -> tuple[str | None, str]:
|
||||
name = name.upper()
|
||||
|
||||
if match := next(
|
||||
(
|
||||
(tvg_id, league_data.get("logo"))
|
||||
for tvg_id, leagues in self.data["leagues"].items()
|
||||
for league_entry in leagues
|
||||
for league_name, league_data in league_entry.items()
|
||||
if name == league_name or name in league_data.get("names", [])
|
||||
),
|
||||
None,
|
||||
):
|
||||
tvg_id, logo = match
|
||||
|
||||
return (tvg_id, logo or live_img)
|
||||
|
||||
return (None, live_img)
|
||||
|
||||
def is_valid(self, event: str, league: str) -> bool:
|
||||
if match := re.search(r"(\-|vs.?)", event):
|
||||
t1, t2 = event.split(match[1])
|
||||
|
||||
return any(t in self.teams(league) for t in (t1.strip(), t2.strip()))
|
||||
|
||||
return event.lower() == "nfl redzone" if league == "NFL" else False
|
||||
|
||||
|
||||
def league_info(name: str) -> tuple[str | None, str]:
|
||||
name = name.upper()
|
||||
|
||||
if match := next(
|
||||
(
|
||||
(tvg_id, league_data.get("logo"))
|
||||
for tvg_id, leagues in LEAGUES.items()
|
||||
for league_entry in leagues
|
||||
for league_name, league_data in league_entry.items()
|
||||
if name == league_name or name in league_data.get("names", [])
|
||||
),
|
||||
None,
|
||||
):
|
||||
tvg_id, logo = match
|
||||
|
||||
return (tvg_id, logo or live_img)
|
||||
|
||||
return (None, live_img)
|
||||
leagues = Leagues()
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue