This commit is contained in:
doms9 2025-11-03 11:51:52 -05:00
parent fbee979f85
commit 00000d9fe7
2 changed files with 11 additions and 13 deletions

View file

@ -10,7 +10,7 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "lotus.json", exp=10_800)
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "lotus.json", exp=3_600)
API_CACHE = Cache(Path(__file__).parent / "caches" / "lotus_api.json", exp=28_800)
@ -60,9 +60,6 @@ async def get_events(
events: list[dict[str, str]] = []
start_dt = now.delta(minutes=-30).timestamp()
end_dt = now.delta(minutes=30).timestamp()
for info in api_data["days"]:
day = Time.from_str(info["day_et"], "%Y-%m-%d")
@ -75,16 +72,13 @@ async def get_events(
if event_league == "channel tv":
continue
sport = fix_league(event_league)
event_name = event["title"]
event_ts: int = event["ts_et"]
event_streams: list[dict] = event["streams"]
if not (event_link := event_streams[0].get("link")):
continue
if not start_dt <= event_ts <= end_dt:
continue
sport = fix_league(event_league)
event_name = event["title"]
key = f"[{sport}] {event_name} (LOTUS)"
@ -96,7 +90,6 @@ async def get_events(
"sport": sport,
"event": event_name,
"link": event_link,
"timestamp": event_ts,
}
)
@ -121,6 +114,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)")
if events:
now = Time.now().timestamp()
async with async_playwright() as p:
browser, context = await network.browser(p, browser="brave")
@ -140,7 +135,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
)
if url:
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
sport, event = ev["sport"], ev["event"]
tvg_id, logo = leagues.get_tvg_info(sport, event)
@ -149,8 +144,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
entry = {
"url": url,
"logo": logo,
"base": "",
"timestamp": ts,
"base": "https://vividmosaica.com/",
"timestamp": now,
"id": tvg_id or "Live.Event.us",
}