mirror of
https://github.com/doms9/iptv.git
synced 2026-03-07 11:18:25 +01:00
e
edit scraping for roxie.py
This commit is contained in:
parent
925618bf5e
commit
00000d961e
1 changed files with 65 additions and 3 deletions
|
|
@ -2,7 +2,7 @@ import asyncio
|
|||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from playwright.async_api import Browser
|
||||
from playwright.async_api import Browser, Page
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
|
@ -74,6 +74,69 @@ async def refresh_html_cache(
|
|||
return events
|
||||
|
||||
|
||||
async def process_event(
|
||||
url: str,
|
||||
url_num: int,
|
||||
page: Page,
|
||||
) -> tuple[str | None, str | None]:
|
||||
|
||||
captured: list[str] = []
|
||||
|
||||
got_one = asyncio.Event()
|
||||
|
||||
handler = partial(
|
||||
network.capture_req,
|
||||
captured=captured,
|
||||
got_one=got_one,
|
||||
)
|
||||
|
||||
page.on("request", handler)
|
||||
|
||||
try:
|
||||
await page.goto(
|
||||
url,
|
||||
wait_until="domcontentloaded",
|
||||
timeout=15_000,
|
||||
)
|
||||
|
||||
if btn := await page.wait_for_selector(
|
||||
"button:has-text('Stream 1')",
|
||||
timeout=8_000,
|
||||
):
|
||||
await btn.click()
|
||||
|
||||
wait_task = asyncio.create_task(got_one.wait())
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(wait_task, timeout=6)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
|
||||
return
|
||||
|
||||
finally:
|
||||
if not wait_task.done():
|
||||
wait_task.cancel()
|
||||
|
||||
try:
|
||||
await wait_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
if captured:
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return captured[0]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
log.warning(f"URL {url_num}) Exception while processing: {e}")
|
||||
return
|
||||
|
||||
finally:
|
||||
page.remove_listener("request", handler)
|
||||
|
||||
|
||||
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
|
|
@ -136,11 +199,10 @@ async def scrape(browser: Browser) -> None:
|
|||
for i, ev in enumerate(events, start=1):
|
||||
async with network.event_page(context) as page:
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
page=page,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue