This commit is contained in:
doms9 2026-01-26 22:55:04 -05:00
parent 00000d9d52
commit 00000d927f

View file

@ -46,7 +46,7 @@ async def process_event(
timeout=15_000, timeout=15_000,
) )
await page.wait_for_timeout(2_000) await page.wait_for_timeout(1_500)
buttons = await page.query_selector_all(".lnktbj a[href*='webplayer']") buttons = await page.query_selector_all(".lnktbj a[href*='webplayer']")
@ -63,7 +63,7 @@ async def process_event(
href = await btn.get_attribute("href") href = await btn.get_attribute("href")
break break
if not href: else:
log.warning(f"URL {url_num}) No available stream links.") log.warning(f"URL {url_num}) No available stream links.")
return return
@ -109,7 +109,7 @@ async def process_event(
async def refresh_xml_cache(now_ts: float) -> dict[str, dict[str, str | float]]: async def refresh_xml_cache(now_ts: float) -> dict[str, dict[str, str | float]]:
log.info("Refreshing HTML cache") log.info("Refreshing XML cache")
events = {} events = {}
@ -186,14 +186,14 @@ async def scrape(browser: Browser) -> None:
log.info(f"Loaded {cached_count} event(s) from cache") log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"') log.info('Scraping from "https://livetv.sx/enx/"')
events = await get_events(cached_urls.keys()) events = await get_events(cached_urls.keys())
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
if events: if events:
async with network.event_context(browser, stealth=False) as context: async with network.event_context(browser) as context:
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
async with network.event_page(context) as page: async with network.event_page(context) as page:
handler = partial( handler = partial(