This commit is contained in:
doms9 2025-10-30 15:38:34 -04:00
parent 566f5c2c80
commit 00000d980a
12 changed files with 337 additions and 547 deletions

View file

@ -22,6 +22,7 @@ epg_urls = [
client = httpx.AsyncClient( client = httpx.AsyncClient(
timeout=5, timeout=5,
follow_redirects=True, follow_redirects=True,
http2=True,
headers={ headers={
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0" "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
}, },
@ -185,3 +186,8 @@ async def main() -> None:
if __name__ == "__main__": if __name__ == "__main__":
asyncio.run(main()) asyncio.run(main())
try:
asyncio.run(client.aclose())
except Exception:
pass

View file

@ -1,5 +1,6 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import asyncio import asyncio
import re
from pathlib import Path from pathlib import Path
from scrapers import ( from scrapers import (
@ -30,7 +31,9 @@ def load_base() -> tuple[list[str], int]:
data = BASE_FILE.read_text(encoding="utf-8") data = BASE_FILE.read_text(encoding="utf-8")
last_chnl_num = int(data.split("tvg-chno=")[-1].split('"')[1]) pattern = re.compile(r'tvg-chno="(\d+)"')
last_chnl_num = max(map(int, pattern.findall(data)), default=0)
return data.splitlines(), last_chnl_num return data.splitlines(), last_chnl_num
@ -113,3 +116,8 @@ async def main() -> None:
if __name__ == "__main__": if __name__ == "__main__":
asyncio.run(main()) asyncio.run(main())
try:
asyncio.run(network.client.aclose())
except Exception:
pass

View file

@ -1,10 +1,9 @@
import asyncio
from functools import partial from functools import partial
from pathlib import Path from pathlib import Path
from urllib.parse import urljoin from urllib.parse import urljoin
import httpx import httpx
from playwright.async_api import BrowserContext, async_playwright from playwright.async_api import async_playwright
from .utils import Cache, Time, get_logger, leagues, network from .utils import Cache, Time, get_logger, leagues, network
@ -41,62 +40,6 @@ async def refresh_api_cache(
return r.json() return r.json()
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=6)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def get_events( async def get_events(
client: httpx.AsyncClient, client: httpx.AsyncClient,
base_url: str, base_url: str,
@ -186,10 +129,12 @@ async def scrape(client: httpx.AsyncClient) -> None:
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(
process_event, network.process_event,
url=ev["link"], url=ev["link"],
url_num=i, url_num=i,
context=context, context=context,
timeout=6,
log=log,
) )
url = await network.safe_process( url = await network.safe_process(

View file

@ -4,7 +4,7 @@ from pathlib import Path
from urllib.parse import urljoin from urllib.parse import urljoin
import httpx import httpx
from playwright.async_api import BrowserContext, async_playwright from playwright.async_api import async_playwright
from selectolax.parser import HTMLParser from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network from .utils import Cache, Time, get_logger, leagues, network
@ -25,62 +25,6 @@ CACHE_FILE = Cache(Path(__file__).parent / "caches" / "roxie.json", exp=10_800)
HTML_CACHE = Cache(Path(__file__).parent / "caches" / "roxie_html.json", exp=28_800) HTML_CACHE = Cache(Path(__file__).parent / "caches" / "roxie_html.json", exp=28_800)
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=15)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def refresh_html_cache( async def refresh_html_cache(
client: httpx.AsyncClient, client: httpx.AsyncClient,
url: str, url: str,
@ -196,10 +140,12 @@ async def scrape(client: httpx.AsyncClient) -> None:
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(
process_event, network.process_event,
url=ev["link"], url=ev["link"],
url_num=i, url_num=i,
context=context, context=context,
timeout=15,
log=log,
) )
url = await network.safe_process( url = await network.safe_process(

View file

@ -1,10 +1,9 @@
import asyncio
from functools import partial from functools import partial
from pathlib import Path from pathlib import Path
from urllib.parse import urljoin from urllib.parse import urljoin
import httpx import httpx
from playwright.async_api import BrowserContext, async_playwright from playwright.async_api import async_playwright
from selectolax.parser import HTMLParser from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network from .utils import Cache, Time, get_logger, leagues, network
@ -33,57 +32,6 @@ MIRRORS = [
] ]
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(url, wait_until="domcontentloaded", timeout=15_000)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def get_events( async def get_events(
client: httpx.AsyncClient, client: httpx.AsyncClient,
url: str, url: str,
@ -176,14 +124,18 @@ async def scrape(client: httpx.AsyncClient) -> None:
if events: if events:
async with async_playwright() as p: async with async_playwright() as p:
try:
browser, context = await network.browser(p, browser="brave") browser, context = await network.browser(p, browser="brave")
except Exception:
browser, context = await network.browser(p)
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(
process_event, network.process_event,
url=ev["link"], url=ev["link"],
url_num=i, url_num=i,
context=context, context=context,
log=log,
) )
url = await network.safe_process( url = await network.safe_process(
@ -195,11 +147,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
if url: if url:
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"] sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
tvg_id, logo = leagues.info(sport) tvg_id, logo = leagues.get_tvg_info(sport, event)
if sport == "NBA" and leagues.is_valid(event, "WNBA"):
sport = "WNBA"
tvg_id, logo = leagues.info("WNBA")
key = f"[{sport}] {event} (SEAST)" key = f"[{sport}] {event} (SEAST)"

View file

@ -1,10 +1,9 @@
import asyncio
import re import re
from functools import partial from functools import partial
from pathlib import Path from pathlib import Path
import httpx import httpx
from playwright.async_api import BrowserContext, async_playwright from playwright.async_api import async_playwright
from selectolax.parser import HTMLParser from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network from .utils import Cache, Time, get_logger, leagues, network
@ -20,58 +19,6 @@ HTML_CACHE = Cache(Path(__file__).parent / "caches" / "streamed_php.json", exp=2
BASE_URL = "https://streamed.site/webmaster.php" BASE_URL = "https://streamed.site/webmaster.php"
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(url, wait_until="domcontentloaded", timeout=15_000)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def refresh_html_cache( async def refresh_html_cache(
client: httpx.AsyncClient, url: str client: httpx.AsyncClient, url: str
) -> dict[str, str | float]: ) -> dict[str, str | float]:
@ -180,10 +127,11 @@ async def scrape(client: httpx.AsyncClient) -> None:
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(
process_event, network.process_event,
url=ev["link"], url=ev["link"],
url_num=i, url_num=i,
context=context, context=context,
log=log,
) )
url = await network.safe_process( url = await network.safe_process(

View file

@ -1,4 +1,3 @@
import asyncio
import re import re
from functools import partial from functools import partial
from pathlib import Path from pathlib import Path
@ -6,7 +5,7 @@ from typing import Any
from urllib.parse import urljoin from urllib.parse import urljoin
import httpx import httpx
from playwright.async_api import BrowserContext, async_playwright from playwright.async_api import async_playwright
from .utils import Cache, Time, get_logger, leagues, network from .utils import Cache, Time, get_logger, leagues, network
@ -50,62 +49,6 @@ async def refresh_api_cache(
return data return data
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=10)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def get_events( async def get_events(
client: httpx.AsyncClient, client: httpx.AsyncClient,
base_url: str, base_url: str,
@ -211,10 +154,11 @@ async def scrape(client: httpx.AsyncClient) -> None:
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(
process_event, network.process_event,
url=ev["link"], url=ev["link"],
url_num=i, url_num=i,
context=context, context=context,
log=log,
) )
url = await network.safe_process( url = await network.safe_process(

View file

@ -1,10 +1,9 @@
import asyncio
from functools import partial from functools import partial
from pathlib import Path from pathlib import Path
from urllib.parse import urljoin from urllib.parse import urljoin
import httpx import httpx
from playwright.async_api import BrowserContext, async_playwright from playwright.async_api import async_playwright
from .utils import Cache, Time, get_logger, leagues, network from .utils import Cache, Time, get_logger, leagues, network
@ -39,62 +38,6 @@ async def refresh_api_cache(
return data return data
async def process_event(
url: str,
url_num: int,
context: BrowserContext,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(network.capture_req, captured=captured, got_one=got_one)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=6)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
async def get_events( async def get_events(
client: httpx.AsyncClient, client: httpx.AsyncClient,
url: str, url: str,
@ -172,10 +115,12 @@ async def scrape(client: httpx.AsyncClient) -> None:
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(
process_event, network.process_event,
url=ev["link"], url=ev["link"],
url_num=i, url_num=i,
context=context, context=context,
timeout=6,
log=log,
) )
url = await network.safe_process( url = await network.safe_process(

View file

@ -151,7 +151,7 @@ class Leagues:
else self.info("NCAA") else self.info("NCAA")
) )
case "Basketball": case "Basketball" | "NBA":
if self.is_valid(event, "NBA"): if self.is_valid(event, "NBA"):
return self.info("NBA") return self.info("NBA")

View file

@ -2,6 +2,7 @@ import asyncio
import logging import logging
import re import re
from collections.abc import Callable from collections.abc import Callable
from functools import partial
from typing import Any from typing import Any
import httpx import httpx
@ -22,6 +23,7 @@ class Network:
timeout=5, timeout=5,
follow_redirects=True, follow_redirects=True,
headers={"User-Agent": Network.UA}, headers={"User-Agent": Network.UA},
http2=True,
) )
self._logger = get_logger("network") self._logger = get_logger("network")
@ -82,14 +84,14 @@ class Network:
req: Request, req: Request,
captured: list[str], captured: list[str],
got_one: asyncio.Event, got_one: asyncio.Event,
patterns: list[str] | None = None,
) -> None: ) -> None:
if not patterns: invalids = ["amazonaws", "knitcdn"]
patterns = ["amazonaws", "knitcdn"]
escaped = [re.escape(i) for i in invalids]
pattern = re.compile( pattern = re.compile(
rf"^(?!.*({'|'.join(patterns)})).*\.m3u8", rf"^(?!.*({'|'.join(escaped)})).*\.m3u8",
re.IGNORECASE, re.IGNORECASE,
) )
@ -97,6 +99,68 @@ class Network:
captured.append(req.url) captured.append(req.url)
got_one.set() got_one.set()
async def process_event(
self,
url: str,
url_num: int,
context: BrowserContext,
timeout: int | float = 10,
log: logging.Logger | None = None,
) -> str | None:
page = await context.new_page()
captured: list[str] = []
got_one = asyncio.Event()
handler = partial(
self.capture_req,
captured=captured,
got_one=got_one,
)
page.on("request", handler)
try:
await page.goto(
url,
wait_until="domcontentloaded",
timeout=15_000,
)
wait_task = asyncio.create_task(got_one.wait())
try:
await asyncio.wait_for(wait_task, timeout=timeout)
except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return
finally:
if not wait_task.done():
wait_task.cancel()
try:
await wait_task
except asyncio.CancelledError:
pass
if captured:
log.info(f"URL {url_num}) Captured M3U8")
return captured[0]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
return
except Exception as e:
log.warning(f"URL {url_num}) Exception while processing: {e}")
return
finally:
page.remove_listener("request", handler)
await page.close()
@staticmethod @staticmethod
async def browser( async def browser(
playwright: Playwright, playwright: Playwright,

View file

@ -3,7 +3,7 @@ name = "iptv"
version = "0.0.3" version = "0.0.3"
requires-python = ">=3.10" requires-python = ">=3.10"
dependencies = [ dependencies = [
"httpx>=0.28.1", "httpx[http2]>=0.28.1",
"playwright>=1.55.0", "playwright>=1.55.0",
"pytz>=2025.2", "pytz>=2025.2",
"selectolax>=0.4.0", "selectolax>=0.4.0",

52
uv.lock generated
View file

@ -19,11 +19,11 @@ wheels = [
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2025.8.3" version = "2025.10.5"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" },
] ]
[[package]] [[package]]
@ -98,6 +98,28 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
] ]
[[package]]
name = "h2"
version = "4.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "hpack" },
{ name = "hyperframe" },
]
sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" },
]
[[package]]
name = "hpack"
version = "4.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" },
]
[[package]] [[package]]
name = "httpcore" name = "httpcore"
version = "1.0.9" version = "1.0.9"
@ -126,13 +148,27 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
] ]
[package.optional-dependencies]
http2 = [
{ name = "h2" },
]
[[package]]
name = "hyperframe"
version = "6.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" },
]
[[package]] [[package]]
name = "idna" name = "idna"
version = "3.10" version = "3.11"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
] ]
[[package]] [[package]]
@ -140,7 +176,7 @@ name = "iptv"
version = "0.0.3" version = "0.0.3"
source = { virtual = "." } source = { virtual = "." }
dependencies = [ dependencies = [
{ name = "httpx" }, { name = "httpx", extra = ["http2"] },
{ name = "playwright" }, { name = "playwright" },
{ name = "pytz" }, { name = "pytz" },
{ name = "selectolax" }, { name = "selectolax" },
@ -148,7 +184,7 @@ dependencies = [
[package.metadata] [package.metadata]
requires-dist = [ requires-dist = [
{ name = "httpx", specifier = ">=0.28.1" }, { name = "httpx", extras = ["http2"], specifier = ">=0.28.1" },
{ name = "playwright", specifier = ">=1.55.0" }, { name = "playwright", specifier = ">=1.55.0" },
{ name = "pytz", specifier = ">=2025.2" }, { name = "pytz", specifier = ">=2025.2" },
{ name = "selectolax", specifier = ">=0.4.0" }, { name = "selectolax", specifier = ">=0.4.0" },