This commit is contained in:
doms9 2025-10-09 20:18:37 -04:00
parent cc38eca60b
commit 00000d9383
4 changed files with 333 additions and 298 deletions

View file

@ -7,7 +7,7 @@ from functools import partial
from pathlib import Path
import httpx
from playwright.async_api import async_playwright
from playwright.async_api import BrowserContext, async_playwright
from .utils import Cache, Time, get_logger, leagues, network
@ -89,85 +89,105 @@ async def fetch_xml_stream(url: str, ssl_ctx: ssl.SSLContext) -> io.BytesIO | No
return
async def process_event(url: str, url_num: int) -> str | None:
async with async_playwright() as p:
browser, context = await network.browser(p, ignore_https_errors=True)
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
page = await context.new_page()
captured: list[str] = []
captured: list[str] = []
got_one = asyncio.Event()
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
handler = partial(network.capture_req, captured=captured, got_one=got_one)
popup = None
popup = None
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=10_000,
)
btn = await page.query_selector(".lnkhdr > tbody > tr > td:nth-child(2)")
if btn:
try:
await btn.click()
await page.wait_for_timeout(500)
except Exception as e:
log.debug(f"URL {url_num}) Failed to click Browser Links tab: {e}")
return
else:
log.warning(f"URL {url_num}) Browser Links tab not found")
return
link_img = await page.query_selector(
"tr:nth-child(2) > td:nth-child(1) td:nth-child(6) img"
)
if not link_img:
log.warning(f"URL {url_num}) No browser link to click.")
return
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=10_000,
)
btn = await page.query_selector(".lnkhdr > tbody > tr > td:nth-child(2)")
if btn:
try:
await btn.click()
await page.wait_for_timeout(500)
except Exception as e:
log.debug(f"URL {url_num}) Failed to click Browser Links tab: {e}")
return
else:
log.warning(f"URL {url_num}) Browser Links tab not found")
return
link_img = await page.query_selector(
"tr:nth-child(2) > td:nth-child(1) td:nth-child(6) img"
)
if not link_img:
log.warning(f"URL {url_num}) No browser link to click.")
return
page.on("request", handler)
try:
async with page.expect_popup(timeout=5_000) as popup_info:
try:
await link_img.click()
except Exception as e:
log.debug(f"URL {url_num}) Click failed: {e}")
popup = await popup_info.value
popup.on("request", handler)
except Exception:
async with page.expect_popup(timeout=5_000) as popup_info:
try:
await link_img.click()
except Exception as e:
log.debug(f"URL {url_num}) Fallback click failed: {e}")
log.debug(f"URL {url_num}) Click failed: {e}")
wait_task = asyncio.create_task(got_one.wait())
popup = await popup_info.value
popup.on("request", handler)
except Exception:
try:
await asyncio.wait_for(wait_task, timeout=15)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
await link_img.click()
except Exception as e:
log.debug(f"URL {url_num}) Fallback click failed: {e}")
finally:
if not wait_task.done():
wait_task.cancel()
wait_task = asyncio.create_task(got_one.wait())
try:
await wait_task
except asyncio.CancelledError:
pass
try:
await asyncio.wait_for(wait_task, timeout=15)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
page.remove_listener("request", handler)
if popup:
popup.remove_listener("request", handler)
await popup.close()
await page.close()
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured")
return
except Exception:
try:
page.remove_listener("request", handler)
if popup:
@ -176,29 +196,8 @@ async def process_event(url: str, url_num: int) -> str | None:
await popup.close()
await page.close()
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured")
return
except Exception:
try:
page.remove_listener("request", handler)
if popup:
popup.remove_listener("request", handler)
await popup.close()
await page.close()
except Exception:
pass
await browser.close()
pass
async def get_events(
@ -289,39 +288,48 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)")
for i, ev in enumerate(events, start=1):
link = ev["link"]
async with async_playwright() as p:
browser, context = await network.browser(p, ignore_https_errors=True)
url = await network.safe_process(
lambda: process_event(link, url_num=i),
url_num=i,
log=log,
)
for i, ev in enumerate(events, start=1):
link = ev["link"]
if url:
sport, event, title, ts = (
ev["sport"],
ev["event"],
ev["title"],
ev["timestamp"],
url = await network.safe_process(
lambda: process_event(
link,
url_num=i,
context=context,
),
url_num=i,
log=log,
)
key = f"[{sport}: {event}] {title} (LTVSX)"
if url:
sport, event, title, ts = (
ev["sport"],
ev["event"],
ev["title"],
ev["timestamp"],
)
tvg_id, logo = leagues.info(event)
key = f"[{sport}: {event}] {title} (LTVSX)"
if not tvg_id:
tvg_id, logo = leagues.info(sport)
tvg_id, logo = leagues.info(event)
entry = {
"url": url,
"logo": logo,
"id": tvg_id or "Live.Event.us",
"base": "https://livetv.sx/enx/",
"timestamp": ts,
}
if not tvg_id:
tvg_id, logo = leagues.info(sport)
urls[key] = cached_urls[key] = entry
entry = {
"url": url,
"logo": logo,
"id": tvg_id or "Live.Event.us",
"base": "https://livetv.sx/enx/",
"timestamp": ts,
}
urls[key] = cached_urls[key] = entry
await browser.close()
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")

View file

@ -4,7 +4,7 @@ from pathlib import Path
from urllib.parse import urljoin
import httpx
from playwright.async_api import async_playwright
from playwright.async_api import BrowserContext, async_playwright
from .utils import Cache, Time, get_logger, leagues, network
@ -68,60 +68,60 @@ async def refresh_api_cache(
return r.json()
async def process_event(url: str, url_num: int) -> str | None:
async with async_playwright() as p:
browser, context = await network.browser(p)
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
page = await context.new_page()
captured: list[str] = []
captured: list[str] = []
got_one = asyncio.Event()
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
page.remove_listener("request", handler)
await page.close()
await browser.close()
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def get_events(
@ -199,32 +199,41 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)")
for i, ev in enumerate(events, start=1):
url = await network.safe_process(
lambda: process_event(ev["link"], url_num=i),
url_num=i,
log=log,
)
async with async_playwright() as p:
browser, context = await network.browser(p)
if url:
sport, event, logo, ts = (
ev["sport"],
ev["event"],
ev["logo"],
ev["timestamp"],
for i, ev in enumerate(events, start=1):
url = await network.safe_process(
lambda: process_event(
ev["link"],
url_num=i,
context=context,
),
url_num=i,
log=log,
)
key = f"[{sport}] {event} (PPV)"
if url:
sport, event, logo, ts = (
ev["sport"],
ev["event"],
ev["logo"],
ev["timestamp"],
)
entry = {
"url": url,
"logo": logo,
"base": base_url,
"timestamp": ts,
"id": get_tvg(sport, event) or "Live.Event.us",
}
key = f"[{sport}] {event} (PPV)"
urls[key] = cached_urls[key] = entry
entry = {
"url": url,
"logo": logo,
"base": base_url,
"timestamp": ts,
"id": get_tvg(sport, event) or "Live.Event.us",
}
urls[key] = cached_urls[key] = entry
await browser.close()
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")

View file

@ -4,7 +4,7 @@ from pathlib import Path
from urllib.parse import urljoin
import httpx
from playwright.async_api import async_playwright
from playwright.async_api import BrowserContext, async_playwright
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
@ -34,56 +34,56 @@ MIRRORS = [
]
async def process_event(url: str, url_num: int) -> str | None:
async with async_playwright() as p:
browser, context = await network.browser(p, browser="brave")
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
page = await context.new_page()
captured: list[str] = []
captured: list[str] = []
got_one = asyncio.Event()
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
page.on("request", handler)
try:
await page.goto(url, wait_until="domcontentloaded", timeout=15_000)
wait_task = asyncio.create_task(got_one.wait())
try:
await page.goto(url, wait_until="domcontentloaded", timeout=15_000)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
page.remove_listener("request", handler)
await page.close()
await browser.close()
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def get_events(
@ -173,33 +173,42 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)")
for i, ev in enumerate(events, start=1):
url = await network.safe_process(
lambda: process_event(ev["link"], url_num=i),
url_num=i,
log=log,
)
async with async_playwright() as p:
browser, context = await network.browser(p, browser="brave")
if url:
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
for i, ev in enumerate(events, start=1):
url = await network.safe_process(
lambda: process_event(
ev["link"],
url_num=i,
context=context,
),
url_num=i,
log=log,
)
tvg_id, logo = leagues.info(sport)
if url:
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
if sport == "NBA" and leagues.is_valid(event, "WNBA"):
sport = "WNBA"
tvg_id, logo = leagues.info("WNBA")
tvg_id, logo = leagues.info(sport)
key = f"[{sport}] {event} (SEAST)"
if sport == "NBA" and leagues.is_valid(event, "WNBA"):
sport = "WNBA"
tvg_id, logo = leagues.info("WNBA")
entry = {
"url": url,
"logo": logo,
"base": "https://embedsports.top/",
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
}
key = f"[{sport}] {event} (SEAST)"
urls[key] = cached_urls[key] = entry
entry = {
"url": url,
"logo": logo,
"base": "https://embedsports.top/",
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
}
urls[key] = cached_urls[key] = entry
await browser.close()
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")

View file

@ -4,7 +4,7 @@ from functools import partial
from pathlib import Path
import httpx
from playwright.async_api import async_playwright
from playwright.async_api import BrowserContext, async_playwright
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
@ -20,56 +20,56 @@ HTML_CACHE = Cache(Path(__file__).parent / "caches" / "streamed_php.json", exp=8
BASE_URL = "https://streamed.site/webmaster.php"
async def process_event(url: str, url_num: int) -> str | None:
async with async_playwright() as p:
browser, context = await network.browser(p, browser="brave")
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
page = await context.new_page()
captured: list[str] = []
captured: list[str] = []
got_one = asyncio.Event()
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
page.on("request", handler)
try:
await page.goto(url, wait_until="domcontentloaded", timeout=15_000)
wait_task = asyncio.create_task(got_one.wait())
try:
await page.goto(url, wait_until="domcontentloaded", timeout=15_000)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
page.remove_listener("request", handler)
await page.close()
await browser.close()
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def refresh_html_cache(client: httpx.AsyncClient, url: str) -> dict[str, str]:
@ -168,29 +168,38 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)")
for i, ev in enumerate(events, start=1):
url = await network.safe_process(
lambda: process_event(ev["link"], url_num=i),
url_num=i,
log=log,
)
async with async_playwright() as p:
browser, context = await network.browser(p, browser="brave")
if url:
sport, event, ts = ev["sport"], ev["event"], ev["event_ts"]
for i, ev in enumerate(events, start=1):
url = await network.safe_process(
lambda: process_event(
ev["link"],
url_num=i,
context=context,
),
url_num=i,
log=log,
)
tvg_id, logo = leagues.info(sport)
if url:
sport, event, ts = ev["sport"], ev["event"], ev["event_ts"]
key = f"[{sport}] {event} (STRMD)"
tvg_id, logo = leagues.info(sport)
entry = {
"url": url,
"logo": logo,
"base": "",
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
}
key = f"[{sport}] {event} (STRMD)"
urls[key] = cached_urls[key] = entry
entry = {
"url": url,
"logo": logo,
"base": "",
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
}
urls[key] = cached_urls[key] = entry
await browser.close()
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")