This commit is contained in:
doms9 2025-10-29 03:21:18 -04:00
parent 8609c0a39e
commit 00000d90b7
11 changed files with 501 additions and 187 deletions

View file

@ -174,36 +174,46 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)")
async with async_playwright() as p:
browser, context = await network.browser(p, browser="brave")
if events:
async with async_playwright() as p:
browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1):
handler = partial(process_event, url=ev["link"], url_num=i, context=context)
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
url=ev["link"],
url_num=i,
context=context,
)
url = await network.safe_process(handler, url_num=i, log=log)
url = await network.safe_process(
handler,
url_num=i,
log=log,
)
if url:
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
if url:
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
tvg_id, logo = leagues.info(sport)
tvg_id, logo = leagues.info(sport)
if sport == "NBA" and leagues.is_valid(event, "WNBA"):
sport = "WNBA"
tvg_id, logo = leagues.info("WNBA")
if sport == "NBA" and leagues.is_valid(event, "WNBA"):
sport = "WNBA"
tvg_id, logo = leagues.info("WNBA")
key = f"[{sport}] {event} (SEAST)"
key = f"[{sport}] {event} (SEAST)"
entry = {
"url": url,
"logo": logo,
"base": "https://embedsports.top/",
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
}
entry = {
"url": url,
"logo": logo,
"base": "https://embedsports.top/",
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
}
urls[key] = cached_urls[key] = entry
urls[key] = cached_urls[key] = entry
await browser.close()
await browser.close()
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")