e
This commit is contained in:
parent
7103b0f1c4
commit
00000d9937
17 changed files with 597 additions and 524 deletions
|
|
@ -3,7 +3,7 @@ import asyncio
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from scrapers import fstv, livetvsx, ppv, streambtw, streameast, streamed, tvpass
|
from scrapers import fstv, livetvsx, ppv, streambtw, streameast, streamed, tvpass
|
||||||
from scrapers.utils import CLIENT, UA, get_logger
|
from scrapers.utils import get_logger, network
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
@ -12,7 +12,7 @@ BASE_FILE = Path(__file__).parent / "base.m3u8"
|
||||||
M3U8_FILE = Path(__file__).parent / "TV.m3u8"
|
M3U8_FILE = Path(__file__).parent / "TV.m3u8"
|
||||||
|
|
||||||
|
|
||||||
def vanilla_fetch() -> tuple[list[str], int]:
|
def load_base() -> tuple[list[str], int]:
|
||||||
log.info("Fetching base M3U8")
|
log.info("Fetching base M3U8")
|
||||||
|
|
||||||
data = BASE_FILE.read_text(encoding="utf-8")
|
data = BASE_FILE.read_text(encoding="utf-8")
|
||||||
|
|
@ -23,19 +23,19 @@ def vanilla_fetch() -> tuple[list[str], int]:
|
||||||
|
|
||||||
|
|
||||||
async def main() -> None:
|
async def main() -> None:
|
||||||
base_m3u8, tvg_chno = vanilla_fetch()
|
base_m3u8, tvg_chno = load_base()
|
||||||
|
|
||||||
tasks = [
|
tasks = [
|
||||||
asyncio.create_task(fstv.scrape(CLIENT)),
|
asyncio.create_task(fstv.scrape(network.client)),
|
||||||
asyncio.create_task(livetvsx.scrape(CLIENT)),
|
asyncio.create_task(livetvsx.scrape(network.client)),
|
||||||
asyncio.create_task(ppv.scrape(CLIENT)),
|
asyncio.create_task(ppv.scrape(network.client)),
|
||||||
asyncio.create_task(streambtw.scrape(CLIENT)),
|
asyncio.create_task(streambtw.scrape(network.client)),
|
||||||
asyncio.create_task(streameast.scrape(CLIENT)),
|
asyncio.create_task(streameast.scrape(network.client)),
|
||||||
asyncio.create_task(streamed.scrape(CLIENT)),
|
asyncio.create_task(streamed.scrape(network.client)),
|
||||||
asyncio.create_task(tvpass.scrape(CLIENT)),
|
asyncio.create_task(tvpass.scrape(network.client)),
|
||||||
]
|
]
|
||||||
|
|
||||||
await asyncio.gather(*tasks)
|
await asyncio.gather(*tasks, return_exceptions=True)
|
||||||
|
|
||||||
additions = (
|
additions = (
|
||||||
fstv.urls
|
fstv.urls
|
||||||
|
|
@ -55,10 +55,12 @@ async def main() -> None:
|
||||||
):
|
):
|
||||||
live_events.extend(
|
live_events.extend(
|
||||||
(
|
(
|
||||||
f'\n#EXTINF:-1 tvg-chno="{chnl_num}" tvg-id="{info["id"]}" tvg-name="{event}" tvg-logo="{info["logo"]}" group-title="Live Events",{event}',
|
f'\n#EXTINF:-1 tvg-chno="{chnl_num}" tvg-id="{info["id"]}" ',
|
||||||
|
f'tvg-name="{event}" tvg-logo="{info["logo"]}" ',
|
||||||
|
f'group-title="Live Events",{event}',
|
||||||
f'#EXTVLCOPT:http-referrer={info["base"]}',
|
f'#EXTVLCOPT:http-referrer={info["base"]}',
|
||||||
f'#EXTVLCOPT:http-origin={info["base"]}',
|
f'#EXTVLCOPT:http-origin={info["base"]}',
|
||||||
f"#EXTVLCOPT:http-user-agent={UA}",
|
f"#EXTVLCOPT:http-user-agent={network.UA}",
|
||||||
info["url"],
|
info["url"],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -4,15 +4,7 @@ from urllib.parse import unquote, urljoin
|
||||||
import httpx
|
import httpx
|
||||||
from selectolax.parser import HTMLParser
|
from selectolax.parser import HTMLParser
|
||||||
|
|
||||||
from .utils import (
|
from .utils import Cache, Time, get_logger, leagues, network
|
||||||
get_base,
|
|
||||||
get_logger,
|
|
||||||
leagues,
|
|
||||||
load_cache,
|
|
||||||
now,
|
|
||||||
safe_process_event,
|
|
||||||
write_cache,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
@ -25,7 +17,7 @@ MIRRORS = [
|
||||||
"https://fstv.us",
|
"https://fstv.us",
|
||||||
]
|
]
|
||||||
|
|
||||||
CACHE_FILE = Path(__file__).parent / "caches" / "fstv.json"
|
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "fstv.json", exp=10_800)
|
||||||
|
|
||||||
|
|
||||||
async def get_events(
|
async def get_events(
|
||||||
|
|
@ -39,7 +31,7 @@ async def get_events(
|
||||||
r = await client.get(base_url)
|
r = await client.get(base_url)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.error(f'Failed to fetch "{base_url}"\n{e}')
|
log.error(f'Failed to fetch "{base_url}": {e}')
|
||||||
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
@ -115,20 +107,20 @@ async def process_event(
|
||||||
|
|
||||||
if src := ifr.attributes.get("src", ""):
|
if src := ifr.attributes.get("src", ""):
|
||||||
log.info(f"URL {url_num}) Captured M3U8")
|
log.info(f"URL {url_num}) Captured M3U8")
|
||||||
return match_name, unquote(src).split("link=")[-1]
|
return match_name or "", unquote(src).split("link=")[-1]
|
||||||
|
|
||||||
|
|
||||||
async def scrape(client: httpx.AsyncClient) -> None:
|
async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
cached_urls = CACHE_FILE.load()
|
||||||
cached_hrefs = {entry["href"] for entry in cached_urls.values()}
|
cached_hrefs = {entry["href"] for entry in cached_urls.values()}
|
||||||
cached_count = len(cached_urls)
|
cached_count = len(cached_urls)
|
||||||
urls.update(cached_urls)
|
urls.update(cached_urls)
|
||||||
|
|
||||||
log.info(f"Collected {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
if not (base_url := await get_base(client, MIRRORS)):
|
if not (base_url := await network.get_base(MIRRORS)):
|
||||||
log.warning("No working FSTV mirrors")
|
log.warning("No working FSTV mirrors")
|
||||||
write_cache(CACHE_FILE, cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
return
|
return
|
||||||
|
|
||||||
events = await get_events(
|
events = await get_events(
|
||||||
|
|
@ -140,7 +132,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
match_name, url = await safe_process_event(
|
match_name, url = await network.safe_process(
|
||||||
lambda: process_event(
|
lambda: process_event(
|
||||||
client,
|
client,
|
||||||
ev["link"],
|
ev["link"],
|
||||||
|
|
@ -163,8 +155,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": base_url,
|
"base": base_url,
|
||||||
"timestamp": now.timestamp(),
|
"timestamp": Time.now().timestamp(),
|
||||||
"id": tvg_id,
|
"id": tvg_id or "Live.Event.us",
|
||||||
"href": ev["href"],
|
"href": ev["href"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -175,4 +167,4 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
else:
|
else:
|
||||||
log.info("No new events found")
|
log.info("No new events found")
|
||||||
|
|
||||||
write_cache(CACHE_FILE, cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
|
|
|
||||||
|
|
@ -2,24 +2,14 @@ import asyncio
|
||||||
import io
|
import io
|
||||||
import ssl
|
import ssl
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
from datetime import datetime, timedelta
|
from datetime import timedelta
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from playwright.async_api import async_playwright
|
from playwright.async_api import async_playwright
|
||||||
|
|
||||||
from .utils import (
|
from .utils import Cache, Time, get_logger, leagues, network
|
||||||
TZ,
|
|
||||||
capture_req,
|
|
||||||
get_logger,
|
|
||||||
leagues,
|
|
||||||
load_cache,
|
|
||||||
new_browser,
|
|
||||||
now,
|
|
||||||
safe_process_event,
|
|
||||||
write_cache,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
@ -36,7 +26,7 @@ CERT_BUNDLE_URLS = [
|
||||||
|
|
||||||
CERT_FILE = Path(__file__).parent / "caches" / "cached-cert.pem"
|
CERT_FILE = Path(__file__).parent / "caches" / "cached-cert.pem"
|
||||||
|
|
||||||
CACHE_FILE = Path(__file__).parent / "caches" / "livetvsx.json"
|
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "livetvsx.json", exp=10_800)
|
||||||
|
|
||||||
|
|
||||||
async def write_to_cert(
|
async def write_to_cert(
|
||||||
|
|
@ -64,9 +54,9 @@ async def refresh_cert_cache(client: httpx.AsyncClient) -> ssl.SSLContext:
|
||||||
|
|
||||||
async def get_cert(client: httpx.AsyncClient) -> ssl.SSLContext:
|
async def get_cert(client: httpx.AsyncClient) -> ssl.SSLContext:
|
||||||
if CERT_FILE.is_file():
|
if CERT_FILE.is_file():
|
||||||
mtime = datetime.fromtimestamp(CERT_FILE.stat().st_mtime, TZ)
|
mtime = Time.from_ts(CERT_FILE.stat().st_mtime)
|
||||||
|
|
||||||
if now - mtime < timedelta(days=30):
|
if Time.now() - mtime < timedelta(days=30):
|
||||||
return ssl.create_default_context(cafile=CERT_FILE)
|
return ssl.create_default_context(cafile=CERT_FILE)
|
||||||
|
|
||||||
log.info("Refreshing cached certificate")
|
log.info("Refreshing cached certificate")
|
||||||
|
|
@ -101,7 +91,7 @@ async def fetch_xml_stream(url: str, ssl_ctx: ssl.SSLContext) -> io.BytesIO | No
|
||||||
|
|
||||||
async def process_event(url: str, url_num: int) -> str | None:
|
async def process_event(url: str, url_num: int) -> str | None:
|
||||||
async with async_playwright() as p:
|
async with async_playwright() as p:
|
||||||
browser, context = await new_browser(p, ignore_https_errors=True)
|
browser, context = await network.browser(p, ignore_https_errors=True)
|
||||||
|
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
|
|
@ -109,7 +99,7 @@ async def process_event(url: str, url_num: int) -> str | None:
|
||||||
|
|
||||||
got_one = asyncio.Event()
|
got_one = asyncio.Event()
|
||||||
|
|
||||||
handler = partial(capture_req, captured=captured, got_one=got_one)
|
handler = partial(network.capture_req, captured=captured, got_one=got_one)
|
||||||
|
|
||||||
popup = None
|
popup = None
|
||||||
|
|
||||||
|
|
@ -149,9 +139,7 @@ async def process_event(url: str, url_num: int) -> str | None:
|
||||||
try:
|
try:
|
||||||
await link_img.click()
|
await link_img.click()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.debug(
|
log.debug(f"URL {url_num}) Click failed: {e}")
|
||||||
f"URL {url_num}) Click failed (popup might have already been opened): {e}"
|
|
||||||
)
|
|
||||||
|
|
||||||
popup = await popup_info.value
|
popup = await popup_info.value
|
||||||
|
|
||||||
|
|
@ -194,8 +182,9 @@ async def process_event(url: str, url_num: int) -> str | None:
|
||||||
|
|
||||||
return captured[-1]
|
return captured[-1]
|
||||||
|
|
||||||
log.warning(f"URL {url_num}) No M3U8 captured in popup or inline playback.")
|
log.warning(f"URL {url_num}) No M3U8 captured")
|
||||||
return
|
return
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
try:
|
try:
|
||||||
page.remove_listener("request", handler)
|
page.remove_listener("request", handler)
|
||||||
|
|
@ -220,73 +209,84 @@ async def get_events(
|
||||||
|
|
||||||
events: list[dict[str, str]] = []
|
events: list[dict[str, str]] = []
|
||||||
|
|
||||||
start_dt = now - timedelta(minutes=30)
|
now = Time.now()
|
||||||
end_dt = now + timedelta(minutes=30)
|
|
||||||
|
|
||||||
if buffer := await fetch_xml_stream(url, ssl_ctx):
|
start_dt = now.delta(minutes=-30)
|
||||||
pub_date_format = "%a, %d %b %Y %H:%M:%S %z"
|
end_dt = now.delta(minutes=30)
|
||||||
|
|
||||||
for _, elem in ET.iterparse(buffer, events=("end",)):
|
if not (buffer := await fetch_xml_stream(url, ssl_ctx)):
|
||||||
if elem.tag == "item":
|
return events
|
||||||
title = elem.findtext("title")
|
|
||||||
desc = elem.findtext("description")
|
|
||||||
pub_date = elem.findtext("pubDate")
|
|
||||||
link = elem.findtext("link")
|
|
||||||
|
|
||||||
try:
|
pub_date_format = "%a, %d %b %Y %H:%M:%S %z"
|
||||||
dt = datetime.strptime(pub_date, pub_date_format)
|
|
||||||
dt = dt.astimezone(TZ)
|
|
||||||
except Exception:
|
|
||||||
elem.clear()
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not start_dt <= dt <= end_dt:
|
for _, elem in ET.iterparse(buffer, events=("end",)):
|
||||||
elem.clear()
|
if elem.tag == "item":
|
||||||
continue
|
title = elem.findtext("title") or ""
|
||||||
|
desc = elem.findtext("description") or ""
|
||||||
sport, event = (
|
pub_date = elem.findtext("pubDate") or ""
|
||||||
(
|
link = elem.findtext("link") or ""
|
||||||
desc.split(".")[0].strip(),
|
|
||||||
" ".join(p.strip() for p in desc.split(".")[1:]),
|
|
||||||
)
|
|
||||||
if desc
|
|
||||||
else ("", "")
|
|
||||||
)
|
|
||||||
|
|
||||||
key = f"[{sport}: {event}] {title} (LTVSX)"
|
|
||||||
|
|
||||||
if cached_keys & {key}:
|
|
||||||
elem.clear()
|
|
||||||
continue
|
|
||||||
|
|
||||||
events.append(
|
|
||||||
{
|
|
||||||
"sport": sport,
|
|
||||||
"event": event,
|
|
||||||
"title": title,
|
|
||||||
"link": link,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
if not all([title, pub_date, link]):
|
||||||
elem.clear()
|
elem.clear()
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
event_dt = Time.from_str(pub_date, pub_date_format)
|
||||||
|
except Exception:
|
||||||
|
elem.clear()
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not start_dt <= event_dt <= end_dt:
|
||||||
|
elem.clear()
|
||||||
|
continue
|
||||||
|
|
||||||
|
if desc:
|
||||||
|
parts = desc.split(".")
|
||||||
|
sport = parts[0].strip() if parts else ""
|
||||||
|
event = parts[1].strip() if parts else ""
|
||||||
|
else:
|
||||||
|
sport, event = "", ""
|
||||||
|
|
||||||
|
key = f"[{sport}: {event}] {title} (LTVSX)"
|
||||||
|
|
||||||
|
if cached_keys & {key}:
|
||||||
|
elem.clear()
|
||||||
|
continue
|
||||||
|
|
||||||
|
events.append(
|
||||||
|
{
|
||||||
|
"sport": sport,
|
||||||
|
"event": event,
|
||||||
|
"title": title,
|
||||||
|
"link": link,
|
||||||
|
"timestamp": event_dt.timestamp(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
elem.clear()
|
||||||
|
|
||||||
return events
|
return events
|
||||||
|
|
||||||
|
|
||||||
async def scrape(client: httpx.AsyncClient) -> None:
|
async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
cached_urls = CACHE_FILE.load()
|
||||||
cached_count = len(cached_urls)
|
cached_count = len(cached_urls)
|
||||||
urls.update(cached_urls)
|
urls.update(cached_urls)
|
||||||
|
|
||||||
log.info(f"Collected {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
log.info(f'Scraping from "{BASE_URL}"')
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
|
|
||||||
cert = await get_cert(client)
|
ssl_ctx = await get_cert(client)
|
||||||
|
|
||||||
|
if not ssl_ctx:
|
||||||
|
log.error("Failed to create SSL context, aborting")
|
||||||
|
CACHE_FILE.write(cached_urls)
|
||||||
|
return
|
||||||
|
|
||||||
events = await get_events(
|
events = await get_events(
|
||||||
BASE_URL,
|
BASE_URL,
|
||||||
cert,
|
ssl_ctx,
|
||||||
set(cached_urls.keys()),
|
set(cached_urls.keys()),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -295,16 +295,19 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
link = ev["link"]
|
link = ev["link"]
|
||||||
|
|
||||||
url = await safe_process_event(
|
url = await network.safe_process(
|
||||||
lambda: process_event(link, url_num=i),
|
lambda: process_event(link, url_num=i),
|
||||||
url_num=i,
|
url_num=i,
|
||||||
log=log,
|
log=log,
|
||||||
)
|
)
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
sport = ev["sport"]
|
sport, event, title, ts = (
|
||||||
event = ev["event"]
|
ev["sport"],
|
||||||
title = ev["title"]
|
ev["event"],
|
||||||
|
ev["title"],
|
||||||
|
ev["timestamp"],
|
||||||
|
)
|
||||||
|
|
||||||
key = f"[{sport}: {event}] {title} (LTVSX)"
|
key = f"[{sport}: {event}] {title} (LTVSX)"
|
||||||
|
|
||||||
|
|
@ -316,9 +319,9 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
entry = {
|
entry = {
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"id": tvg_id,
|
"id": tvg_id or "Live.Event.us",
|
||||||
"base": "https://livetv.sx/enx/",
|
"base": "https://livetv.sx/enx/",
|
||||||
"timestamp": now.timestamp(),
|
"timestamp": ts,
|
||||||
}
|
}
|
||||||
|
|
||||||
urls[key] = cached_urls[key] = entry
|
urls[key] = cached_urls[key] = entry
|
||||||
|
|
@ -328,4 +331,4 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
else:
|
else:
|
||||||
log.info("No new events found")
|
log.info("No new events found")
|
||||||
|
|
||||||
write_cache(CACHE_FILE, cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
@ -7,26 +6,15 @@ from urllib.parse import urljoin
|
||||||
import httpx
|
import httpx
|
||||||
from playwright.async_api import async_playwright
|
from playwright.async_api import async_playwright
|
||||||
|
|
||||||
from .utils import (
|
from .utils import Cache, Time, get_logger, leagues, network
|
||||||
TZ,
|
|
||||||
capture_req,
|
|
||||||
get_base,
|
|
||||||
get_logger,
|
|
||||||
leagues,
|
|
||||||
load_cache,
|
|
||||||
new_browser,
|
|
||||||
now,
|
|
||||||
safe_process_event,
|
|
||||||
write_cache,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
urls: dict[str, dict[str, str | float]] = {}
|
urls: dict[str, dict[str, str | float]] = {}
|
||||||
|
|
||||||
API_FILE = Path(__file__).parent / "caches" / "ppv_api.json"
|
API_FILE = Cache(Path(__file__).parent / "caches" / "ppv_api.json", exp=86_400)
|
||||||
|
|
||||||
CACHE_FILE = Path(__file__).parent / "caches" / "ppv.json"
|
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "ppv.json", exp=10_800)
|
||||||
|
|
||||||
MIRRORS = [
|
MIRRORS = [
|
||||||
"https://ppvs.su",
|
"https://ppvs.su",
|
||||||
|
|
@ -37,7 +25,7 @@ MIRRORS = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def get_tvg(sport: str, event: str) -> str | None:
|
def get_tvg(sport: str, event: str) -> str:
|
||||||
match sport:
|
match sport:
|
||||||
case "American Football":
|
case "American Football":
|
||||||
if leagues.is_valid(event, "NFL"):
|
if leagues.is_valid(event, "NFL"):
|
||||||
|
|
@ -79,7 +67,7 @@ async def refresh_api_cache(
|
||||||
|
|
||||||
async def process_event(url: str, url_num: int) -> str | None:
|
async def process_event(url: str, url_num: int) -> str | None:
|
||||||
async with async_playwright() as p:
|
async with async_playwright() as p:
|
||||||
browser, context = await new_browser(p)
|
browser, context = await network.browser(p)
|
||||||
|
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
|
|
@ -87,12 +75,16 @@ async def process_event(url: str, url_num: int) -> str | None:
|
||||||
|
|
||||||
got_one = asyncio.Event()
|
got_one = asyncio.Event()
|
||||||
|
|
||||||
handler = partial(capture_req, captured=captured, got_one=got_one)
|
handler = partial(network.capture_req, captured=captured, got_one=got_one)
|
||||||
|
|
||||||
page.on("request", handler)
|
page.on("request", handler)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await page.goto(url, wait_until="domcontentloaded", timeout=15_000)
|
await page.goto(
|
||||||
|
url,
|
||||||
|
wait_until="domcontentloaded",
|
||||||
|
timeout=15_000,
|
||||||
|
)
|
||||||
|
|
||||||
wait_task = asyncio.create_task(got_one.wait())
|
wait_task = asyncio.create_task(got_one.wait())
|
||||||
|
|
||||||
|
|
@ -137,25 +129,18 @@ async def get_events(
|
||||||
|
|
||||||
events: list[dict[str, str]] = []
|
events: list[dict[str, str]] = []
|
||||||
|
|
||||||
if not (
|
if not (api_data := API_FILE.load(nearest_hr=True, per_entry=False)):
|
||||||
api_data := load_cache(
|
|
||||||
API_FILE,
|
|
||||||
exp=86_400,
|
|
||||||
nearest_hr=True,
|
|
||||||
per_entry=False,
|
|
||||||
)
|
|
||||||
):
|
|
||||||
api_data = await refresh_api_cache(client, urljoin(base_url, "api/streams"))
|
api_data = await refresh_api_cache(client, urljoin(base_url, "api/streams"))
|
||||||
|
|
||||||
write_cache(API_FILE, api_data)
|
API_FILE.write(api_data)
|
||||||
|
|
||||||
for stream_group in api_data["streams"]:
|
for stream_group in api_data.get("streams", []):
|
||||||
sport = stream_group["category"]
|
sport = stream_group("category", [])
|
||||||
|
|
||||||
if sport == "24/7 Streams":
|
if sport == "24/7 Streams":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for event in stream_group["streams"]:
|
for event in stream_group.get("streams", []):
|
||||||
name, start_ts, end_ts, logo, uri_name = (
|
name, start_ts, end_ts, logo, uri_name = (
|
||||||
event["name"],
|
event["name"],
|
||||||
event["starts_at"],
|
event["starts_at"],
|
||||||
|
|
@ -169,11 +154,11 @@ async def get_events(
|
||||||
if cached_keys & {key}:
|
if cached_keys & {key}:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
start_dt = datetime.fromtimestamp(start_ts, tz=TZ) - timedelta(minutes=30)
|
start_dt = Time.from_ts(start_ts).delta(minutes=-30)
|
||||||
|
|
||||||
end_dt = datetime.fromtimestamp(end_ts, tz=TZ) + timedelta(minutes=30)
|
end_dt = Time.from_ts(end_ts).delta(minutes=30)
|
||||||
|
|
||||||
if not start_dt <= now < end_dt:
|
if not start_dt <= Time.now() < end_dt:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
events.append(
|
events.append(
|
||||||
|
|
@ -182,6 +167,7 @@ async def get_events(
|
||||||
"event": name,
|
"event": name,
|
||||||
"link": urljoin(base_url, f"live/{uri_name}"),
|
"link": urljoin(base_url, f"live/{uri_name}"),
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
|
"timestamp": start_dt.timestamp(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -189,15 +175,15 @@ async def get_events(
|
||||||
|
|
||||||
|
|
||||||
async def scrape(client: httpx.AsyncClient) -> None:
|
async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
cached_urls = CACHE_FILE.load()
|
||||||
cached_count = len(cached_urls)
|
cached_count = len(cached_urls)
|
||||||
urls.update(cached_urls)
|
urls.update(cached_urls)
|
||||||
|
|
||||||
log.info(f"Collected {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
if not (base_url := await get_base(client, MIRRORS)):
|
if not (base_url := await network.get_base(MIRRORS)):
|
||||||
log.warning("No working PPV mirrors")
|
log.warning("No working PPV mirrors")
|
||||||
write_cache(CACHE_FILE, cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
return
|
return
|
||||||
|
|
||||||
log.info(f'Scraping from "{base_url}"')
|
log.info(f'Scraping from "{base_url}"')
|
||||||
|
|
@ -211,14 +197,19 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
url = await safe_process_event(
|
url = await network.safe_process(
|
||||||
lambda: process_event(ev["link"], url_num=i),
|
lambda: process_event(ev["link"], url_num=i),
|
||||||
url_num=i,
|
url_num=i,
|
||||||
log=log,
|
log=log,
|
||||||
)
|
)
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
sport, event, logo = ev["sport"], ev["event"], ev["logo"]
|
sport, event, logo, ts = (
|
||||||
|
ev["sport"],
|
||||||
|
ev["event"],
|
||||||
|
ev["logo"],
|
||||||
|
ev["timestamp"],
|
||||||
|
)
|
||||||
|
|
||||||
key = f"[{sport}] {event} (PPV)"
|
key = f"[{sport}] {event} (PPV)"
|
||||||
|
|
||||||
|
|
@ -226,7 +217,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": base_url,
|
"base": base_url,
|
||||||
"timestamp": now.timestamp(),
|
"timestamp": ts,
|
||||||
"id": get_tvg(sport, event) or "Live.Event.us",
|
"id": get_tvg(sport, event) or "Live.Event.us",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -237,4 +228,4 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
else:
|
else:
|
||||||
log.info("No new events found")
|
log.info("No new events found")
|
||||||
|
|
||||||
write_cache(CACHE_FILE, cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ from urllib.parse import urljoin
|
||||||
import httpx
|
import httpx
|
||||||
from selectolax.parser import HTMLParser
|
from selectolax.parser import HTMLParser
|
||||||
|
|
||||||
from .utils import get_logger, leagues, load_cache, now, safe_process_event, write_cache
|
from .utils import Cache, Time, get_logger, leagues, network
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
@ -13,7 +13,7 @@ urls: dict[str, dict[str, str]] = {}
|
||||||
|
|
||||||
BASE_URL = "https://streambtw.com/"
|
BASE_URL = "https://streambtw.com/"
|
||||||
|
|
||||||
CACHE_FILE = Path(__file__).parent / "caches" / "streambtw.json"
|
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "streambtw.json", exp=86_400)
|
||||||
|
|
||||||
|
|
||||||
async def process_event(
|
async def process_event(
|
||||||
|
|
@ -45,7 +45,7 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
|
||||||
r = await client.get(BASE_URL)
|
r = await client.get(BASE_URL)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.error(f'Failed to fetch "{BASE_URL}"\n{e}')
|
log.error(f'Failed to fetch "{BASE_URL}": {e}')
|
||||||
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
@ -60,22 +60,24 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
|
||||||
|
|
||||||
link = card.css_first("a.btn.btn-primary")
|
link = card.css_first("a.btn.btn-primary")
|
||||||
|
|
||||||
if href := link.attrs.get("href"):
|
if not (href := link.attrs.get("href")):
|
||||||
events.append(
|
continue
|
||||||
{
|
|
||||||
"sport": sport,
|
events.append(
|
||||||
"event": name,
|
{
|
||||||
"link": urljoin(BASE_URL, href),
|
"sport": sport,
|
||||||
}
|
"event": name,
|
||||||
)
|
"link": urljoin(BASE_URL, href),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
return events
|
return events
|
||||||
|
|
||||||
|
|
||||||
async def scrape(client: httpx.AsyncClient) -> None:
|
async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
if cached := load_cache(CACHE_FILE, exp=86_400, nearest_hr=True):
|
if cached := CACHE_FILE.load():
|
||||||
urls.update(cached)
|
urls.update(cached)
|
||||||
log.info(f"Collected {len(urls)} event(s) from cache")
|
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||||
return
|
return
|
||||||
|
|
||||||
log.info(f'Scraping from "{BASE_URL}"')
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
|
|
@ -85,10 +87,11 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
url = await safe_process_event(
|
url = await network.safe_process(
|
||||||
lambda: process_event(client, url=ev["link"], url_num=i),
|
lambda: process_event(client, url=ev["link"], url_num=i),
|
||||||
url_num=i,
|
url_num=i,
|
||||||
log=log,
|
log=log,
|
||||||
|
timeout=10,
|
||||||
)
|
)
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
|
|
@ -102,12 +105,12 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": BASE_URL,
|
"base": BASE_URL,
|
||||||
"timestamp": now.timestamp(),
|
"timestamp": Time.now().timestamp(),
|
||||||
"id": tvg_id,
|
"id": tvg_id or "Live.Event.us",
|
||||||
}
|
}
|
||||||
|
|
||||||
urls[key] = entry
|
urls[key] = entry
|
||||||
|
|
||||||
log.info(f"Collected {len(urls)} event(s)")
|
log.info(f"Collected {len(urls)} event(s)")
|
||||||
|
|
||||||
write_cache(CACHE_FILE, urls)
|
CACHE_FILE.write(urls)
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
@ -8,24 +7,13 @@ import httpx
|
||||||
from playwright.async_api import async_playwright
|
from playwright.async_api import async_playwright
|
||||||
from selectolax.parser import HTMLParser
|
from selectolax.parser import HTMLParser
|
||||||
|
|
||||||
from .utils import (
|
from .utils import Cache, Time, get_logger, leagues, network
|
||||||
TZ,
|
|
||||||
capture_req,
|
|
||||||
get_base,
|
|
||||||
get_logger,
|
|
||||||
leagues,
|
|
||||||
load_cache,
|
|
||||||
new_browser,
|
|
||||||
now,
|
|
||||||
safe_process_event,
|
|
||||||
write_cache,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
urls: dict[str, dict[str, str | float]] = {}
|
urls: dict[str, dict[str, str | float]] = {}
|
||||||
|
|
||||||
CACHE_FILE = Path(__file__).parent / "caches" / "streameast.json"
|
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "streameast.json", exp=10_800)
|
||||||
|
|
||||||
MIRRORS = [
|
MIRRORS = [
|
||||||
"https://streameast.ga",
|
"https://streameast.ga",
|
||||||
|
|
@ -48,7 +36,7 @@ MIRRORS = [
|
||||||
|
|
||||||
async def process_event(url: str, url_num: int) -> str | None:
|
async def process_event(url: str, url_num: int) -> str | None:
|
||||||
async with async_playwright() as p:
|
async with async_playwright() as p:
|
||||||
browser, context = await new_browser(p, browser="brave")
|
browser, context = await network.browser(p, browser="brave")
|
||||||
|
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
|
|
@ -56,7 +44,7 @@ async def process_event(url: str, url_num: int) -> str | None:
|
||||||
|
|
||||||
got_one = asyncio.Event()
|
got_one = asyncio.Event()
|
||||||
|
|
||||||
handler = partial(capture_req, captured=captured, got_one=got_one)
|
handler = partial(network.capture_req, captured=captured, got_one=got_one)
|
||||||
|
|
||||||
page.on("request", handler)
|
page.on("request", handler)
|
||||||
|
|
||||||
|
|
@ -114,8 +102,10 @@ async def get_events(
|
||||||
soup = HTMLParser(r.text)
|
soup = HTMLParser(r.text)
|
||||||
events = []
|
events = []
|
||||||
|
|
||||||
start_dt = now - timedelta(minutes=30)
|
now = Time.now()
|
||||||
end_dt = now + timedelta(minutes=30)
|
|
||||||
|
start_dt = now.delta(minutes=-30)
|
||||||
|
end_dt = now.delta(minutes=30)
|
||||||
|
|
||||||
for section in soup.css("div.se-sport-section"):
|
for section in soup.css("div.se-sport-section"):
|
||||||
if not (sport := section.attributes.get("data-sport-name", "").strip()):
|
if not (sport := section.attributes.get("data-sport-name", "").strip()):
|
||||||
|
|
@ -147,7 +137,7 @@ async def get_events(
|
||||||
if cached_keys & {key}:
|
if cached_keys & {key}:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
event_dt = datetime.fromtimestamp(timestamp, TZ)
|
event_dt = Time.from_ts(timestamp)
|
||||||
|
|
||||||
if time_text == "LIVE" or (start_dt <= event_dt < end_dt):
|
if time_text == "LIVE" or (start_dt <= event_dt < end_dt):
|
||||||
events.append(
|
events.append(
|
||||||
|
|
@ -155,6 +145,7 @@ async def get_events(
|
||||||
"sport": sport,
|
"sport": sport,
|
||||||
"event": name,
|
"event": name,
|
||||||
"link": href,
|
"link": href,
|
||||||
|
"timestamp": timestamp,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -162,15 +153,15 @@ async def get_events(
|
||||||
|
|
||||||
|
|
||||||
async def scrape(client: httpx.AsyncClient) -> None:
|
async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
cached_urls = CACHE_FILE.load()
|
||||||
cached_count = len(cached_urls)
|
cached_count = len(cached_urls)
|
||||||
urls.update(cached_urls)
|
urls.update(cached_urls)
|
||||||
|
|
||||||
log.info(f"Collected {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
if not (base_url := await get_base(client, MIRRORS)):
|
if not (base_url := await network.get_base(MIRRORS)):
|
||||||
log.warning("No working StreamEast mirrors")
|
log.warning("No working StreamEast mirrors")
|
||||||
write_cache(CACHE_FILE, cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
return
|
return
|
||||||
|
|
||||||
log.info(f'Scraping from "{base_url}"')
|
log.info(f'Scraping from "{base_url}"')
|
||||||
|
|
@ -184,14 +175,14 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
url = await safe_process_event(
|
url = await network.safe_process(
|
||||||
lambda: process_event(ev["link"], url_num=i),
|
lambda: process_event(ev["link"], url_num=i),
|
||||||
url_num=i,
|
url_num=i,
|
||||||
log=log,
|
log=log,
|
||||||
)
|
)
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
sport, event = ev["sport"], ev["event"]
|
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
|
||||||
|
|
||||||
tvg_id, logo = leagues.info(sport)
|
tvg_id, logo = leagues.info(sport)
|
||||||
|
|
||||||
|
|
@ -205,8 +196,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": base_url,
|
"base": base_url,
|
||||||
"timestamp": now.timestamp(),
|
"timestamp": ts,
|
||||||
"id": tvg_id,
|
"id": tvg_id or "Live.Event.us",
|
||||||
}
|
}
|
||||||
|
|
||||||
urls[key] = cached_urls[key] = entry
|
urls[key] = cached_urls[key] = entry
|
||||||
|
|
@ -216,4 +207,4 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
else:
|
else:
|
||||||
log.info("No new events found")
|
log.info("No new events found")
|
||||||
|
|
||||||
write_cache(CACHE_FILE, cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
import re
|
import re
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
@ -8,40 +7,22 @@ import httpx
|
||||||
from playwright.async_api import async_playwright
|
from playwright.async_api import async_playwright
|
||||||
from selectolax.parser import HTMLParser
|
from selectolax.parser import HTMLParser
|
||||||
|
|
||||||
from .utils import (
|
from .utils import Cache, Time, get_logger, leagues, network
|
||||||
TZ,
|
|
||||||
capture_req,
|
|
||||||
get_logger,
|
|
||||||
leagues,
|
|
||||||
load_cache,
|
|
||||||
new_browser,
|
|
||||||
now,
|
|
||||||
safe_process_event,
|
|
||||||
write_cache,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
urls: dict[str, dict[str, str | float]] = {}
|
urls: dict[str, dict[str, str | float]] = {}
|
||||||
|
|
||||||
CACHE_FILE = Path(__file__).parent / "caches" / "streamed.json"
|
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "streamed.json", exp=10_800)
|
||||||
|
|
||||||
HTML_CACHE = Path(__file__).parent / "caches" / "streamed_php.json"
|
HTML_CACHE = Cache(Path(__file__).parent / "caches" / "streamed_php.json", exp=86_400)
|
||||||
|
|
||||||
base_url = "https://streamed.site/webmaster.php"
|
BASE_URL = "https://streamed.site/webmaster.php"
|
||||||
|
|
||||||
|
|
||||||
def get_date(s: str) -> datetime:
|
|
||||||
try:
|
|
||||||
return datetime.strptime(s, "%Y-%m-%d %H:%M %Z").astimezone(TZ)
|
|
||||||
except ValueError:
|
|
||||||
s = s.replace("ET", "").strip()
|
|
||||||
return datetime.strptime(s, "%Y-%m-%d %H:%M").astimezone(TZ)
|
|
||||||
|
|
||||||
|
|
||||||
async def process_event(url: str, url_num: int) -> str | None:
|
async def process_event(url: str, url_num: int) -> str | None:
|
||||||
async with async_playwright() as p:
|
async with async_playwright() as p:
|
||||||
browser, context = await new_browser(p, browser="brave")
|
browser, context = await network.browser(p, browser="brave")
|
||||||
|
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
|
|
@ -49,7 +30,7 @@ async def process_event(url: str, url_num: int) -> str | None:
|
||||||
|
|
||||||
got_one = asyncio.Event()
|
got_one = asyncio.Event()
|
||||||
|
|
||||||
handler = partial(capture_req, captured=captured, got_one=got_one)
|
handler = partial(network.capture_req, captured=captured, got_one=got_one)
|
||||||
|
|
||||||
page.on("request", handler)
|
page.on("request", handler)
|
||||||
|
|
||||||
|
|
@ -102,12 +83,13 @@ async def refresh_html_cache(client: httpx.AsyncClient, url: str) -> dict[str, s
|
||||||
|
|
||||||
soup = HTMLParser(r.text)
|
soup = HTMLParser(r.text)
|
||||||
events = {}
|
events = {}
|
||||||
|
now = Time.now().to_tz("EST")
|
||||||
|
|
||||||
for row in soup.css("div.wrap div.row"):
|
for row in soup.css("div.wrap div.row"):
|
||||||
if not (date := row.css_first("div.date")):
|
if not (date := row.css_first("div.date")):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
event_dt = get_date(date.text(strip=True))
|
event_dt = Time.from_str(date.text(strip=True)).to_tz("EST")
|
||||||
|
|
||||||
if event_dt.date() != now.date():
|
if event_dt.date() != now.date():
|
||||||
continue
|
continue
|
||||||
|
|
@ -134,8 +116,8 @@ async def refresh_html_cache(client: httpx.AsyncClient, url: str) -> dict[str, s
|
||||||
"sport": sport,
|
"sport": sport,
|
||||||
"event": event,
|
"event": event,
|
||||||
"link": f"https://streamed.site/set.php?{m[1]}",
|
"link": f"https://streamed.site/set.php?{m[1]}",
|
||||||
"event_ts": event_dt.timestamp(),
|
|
||||||
"timestamp": now.timestamp(),
|
"timestamp": now.timestamp(),
|
||||||
|
"event_ts": event_dt.timestamp(),
|
||||||
}
|
}
|
||||||
|
|
||||||
return events
|
return events
|
||||||
|
|
@ -147,13 +129,15 @@ async def get_events(
|
||||||
cached_keys: set[str],
|
cached_keys: set[str],
|
||||||
) -> list[dict[str, str]]:
|
) -> list[dict[str, str]]:
|
||||||
|
|
||||||
if not (events := load_cache(HTML_CACHE, exp=86_400, nearest_hr=True)):
|
if not (events := HTML_CACHE.load(nearest_hr=True)):
|
||||||
events = await refresh_html_cache(client, url)
|
events = await refresh_html_cache(client, url)
|
||||||
write_cache(HTML_CACHE, events)
|
HTML_CACHE.write(events)
|
||||||
|
|
||||||
live = []
|
live = []
|
||||||
start_ts = (now - timedelta(minutes=30)).timestamp()
|
now = Time.now().to_tz("EST")
|
||||||
end_ts = (now + timedelta(minutes=30)).timestamp()
|
|
||||||
|
start_ts = now.delta(minutes=-30).to_tz("EST").timestamp()
|
||||||
|
end_ts = now.delta(minutes=30).to_tz("EST").timestamp()
|
||||||
|
|
||||||
for k, v in events.items():
|
for k, v in events.items():
|
||||||
if cached_keys & {k}:
|
if cached_keys & {k}:
|
||||||
|
|
@ -168,31 +152,31 @@ async def get_events(
|
||||||
|
|
||||||
|
|
||||||
async def scrape(client: httpx.AsyncClient) -> None:
|
async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
cached_urls = CACHE_FILE.load()
|
||||||
cached_count = len(cached_urls)
|
cached_count = len(cached_urls)
|
||||||
urls.update(cached_urls)
|
urls.update(cached_urls)
|
||||||
|
|
||||||
log.info(f"Collected {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
log.info(f'Scraping from "{base_url}"')
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
|
|
||||||
events = await get_events(
|
events = await get_events(
|
||||||
client,
|
client,
|
||||||
base_url,
|
BASE_URL,
|
||||||
set(cached_urls.keys()),
|
set(cached_urls.keys()),
|
||||||
)
|
)
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
url = await safe_process_event(
|
url = await network.safe_process(
|
||||||
lambda: process_event(ev["link"], url_num=i),
|
lambda: process_event(ev["link"], url_num=i),
|
||||||
url_num=i,
|
url_num=i,
|
||||||
log=log,
|
log=log,
|
||||||
)
|
)
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
sport, event = ev["sport"], ev["event"]
|
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
|
||||||
|
|
||||||
tvg_id, logo = leagues.info(sport)
|
tvg_id, logo = leagues.info(sport)
|
||||||
|
|
||||||
|
|
@ -202,8 +186,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": "https://streamed.site/",
|
"base": "https://streamed.site/",
|
||||||
"timestamp": now.timestamp(),
|
"timestamp": ts,
|
||||||
"id": tvg_id,
|
"id": tvg_id or "Live.Event.us",
|
||||||
}
|
}
|
||||||
|
|
||||||
urls[key] = cached_urls[key] = entry
|
urls[key] = cached_urls[key] = entry
|
||||||
|
|
@ -213,4 +197,4 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
else:
|
else:
|
||||||
log.info("No new events found")
|
log.info("No new events found")
|
||||||
|
|
||||||
write_cache(CACHE_FILE, cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
|
|
|
||||||
|
|
@ -3,15 +3,15 @@ from pathlib import Path
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
from .utils import get_logger, leagues, load_cache, now, write_cache
|
from .utils import Cache, Time, get_logger, leagues
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger()
|
||||||
|
|
||||||
urls: dict[str, dict[str, str]] = {}
|
urls: dict[str, dict[str, str]] = {}
|
||||||
|
|
||||||
BASE_URL = "https://tvpass.org/playlist/m3u"
|
BASE_URL = "https://tvpass.org/playlist/m3u"
|
||||||
|
|
||||||
CACHE_FILE = Path(__file__).parent / "caches" / "tvpass.json"
|
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "tvpass.json", exp=86_400)
|
||||||
|
|
||||||
|
|
||||||
async def fetch_m3u8(client: httpx.AsyncClient) -> list[str]:
|
async def fetch_m3u8(client: httpx.AsyncClient) -> list[str]:
|
||||||
|
|
@ -19,21 +19,25 @@ async def fetch_m3u8(client: httpx.AsyncClient) -> list[str]:
|
||||||
r = await client.get(BASE_URL)
|
r = await client.get(BASE_URL)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.error(f'Failed to fetch "{BASE_URL}"\n{e}')
|
log.error(f'Failed to fetch "{BASE_URL}": {e}')
|
||||||
return []
|
return []
|
||||||
|
|
||||||
return r.text.splitlines()
|
return r.text.splitlines()
|
||||||
|
|
||||||
|
|
||||||
async def scrape(client: httpx.AsyncClient) -> None:
|
async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
if cached := load_cache(CACHE_FILE, exp=86_400, nearest_hr=True):
|
if cached := CACHE_FILE.load(nearest_hr=True):
|
||||||
urls.update(cached)
|
urls.update(cached)
|
||||||
log.info(f"Collected {len(urls)} event(s) from cache")
|
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||||
return
|
return
|
||||||
|
|
||||||
log.info(f'Scraping from "{BASE_URL}"')
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
|
|
||||||
for i, line in enumerate(data := await fetch_m3u8(client)):
|
if not (data := await fetch_m3u8(client)):
|
||||||
|
log.warning("No M3U8 data received")
|
||||||
|
return
|
||||||
|
|
||||||
|
for i, line in enumerate(data):
|
||||||
if line.startswith("#EXTINF"):
|
if line.startswith("#EXTINF"):
|
||||||
tvg_id_match = re.search(r'tvg-id="([^"]*)"', line)
|
tvg_id_match = re.search(r'tvg-id="([^"]*)"', line)
|
||||||
tvg_name_match = re.search(r'tvg-name="([^"]*)"', line)
|
tvg_name_match = re.search(r'tvg-name="([^"]*)"', line)
|
||||||
|
|
@ -56,13 +60,13 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
entry = {
|
entry = {
|
||||||
"url": f"http://origin.thetvapp.to/hls/{channel}/mono.m3u8",
|
"url": f"http://origin.thetvapp.to/hls/{channel}/mono.m3u8",
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"id": tvg_id,
|
"id": tvg_id or "Live.Event.us",
|
||||||
"base": "https://tvpass.org",
|
"base": "https://tvpass.org",
|
||||||
"timestamp": now.timestamp(),
|
"timestamp": Time.now().timestamp(),
|
||||||
}
|
}
|
||||||
|
|
||||||
urls[key] = entry
|
urls[key] = entry
|
||||||
|
|
||||||
write_cache(CACHE_FILE, urls)
|
CACHE_FILE.write(urls)
|
||||||
|
|
||||||
log.info(f"Cached {len(urls)} event(s)")
|
log.info(f"Cached {len(urls)} event(s)")
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,12 @@
|
||||||
from .cache import load_cache, write_cache
|
from .caching import Cache
|
||||||
from .config import TZ, leagues, now
|
from .config import Time, leagues
|
||||||
from .logger import get_logger
|
from .logger import get_logger
|
||||||
from .network import CLIENT, UA, capture_req, get_base, new_browser, safe_process_event
|
from .webwork import network
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"CLIENT",
|
"Cache",
|
||||||
"TZ",
|
"Time",
|
||||||
"UA",
|
|
||||||
"capture_req",
|
|
||||||
"get_base",
|
|
||||||
"get_logger",
|
"get_logger",
|
||||||
"leagues",
|
"leagues",
|
||||||
"load_cache",
|
"network",
|
||||||
"new_browser",
|
|
||||||
"now",
|
|
||||||
"safe_process_event",
|
|
||||||
"write_cache",
|
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
import json
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from .config import now
|
|
||||||
|
|
||||||
|
|
||||||
def near_hr(dt: datetime) -> float:
|
|
||||||
return dt.replace(minute=0, second=0, microsecond=0).timestamp()
|
|
||||||
|
|
||||||
|
|
||||||
def is_fresh(
|
|
||||||
entry: dict,
|
|
||||||
nearest_hr: bool,
|
|
||||||
exp: int,
|
|
||||||
) -> bool:
|
|
||||||
ts: float | int = entry.get("timestamp", 31496400)
|
|
||||||
|
|
||||||
if nearest_hr:
|
|
||||||
ts = near_hr(datetime.fromtimestamp(ts))
|
|
||||||
|
|
||||||
return now.timestamp() - ts < exp
|
|
||||||
|
|
||||||
|
|
||||||
def load_cache(
|
|
||||||
file: Path,
|
|
||||||
exp: int | float,
|
|
||||||
nearest_hr: bool = False,
|
|
||||||
per_entry: bool = True,
|
|
||||||
) -> dict[str, dict[str, str | float]]:
|
|
||||||
try:
|
|
||||||
data: dict = json.loads(file.read_text(encoding="utf-8"))
|
|
||||||
except (FileNotFoundError, json.JSONDecodeError):
|
|
||||||
return {}
|
|
||||||
|
|
||||||
if per_entry:
|
|
||||||
return {k: v for k, v in data.items() if is_fresh(v, nearest_hr, exp)}
|
|
||||||
|
|
||||||
ts: float | int = data.get("timestamp", 31496400)
|
|
||||||
|
|
||||||
if nearest_hr:
|
|
||||||
ts = near_hr(datetime.fromtimestamp(ts))
|
|
||||||
|
|
||||||
return data if now.timestamp() - ts < exp else {}
|
|
||||||
|
|
||||||
|
|
||||||
def write_cache(file: Path, data: dict) -> None:
|
|
||||||
file.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
file.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
|
||||||
65
M3U8/scrapers/utils/caching.py
Normal file
65
M3U8/scrapers/utils/caching.py
Normal file
|
|
@ -0,0 +1,65 @@
|
||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from .config import Time
|
||||||
|
|
||||||
|
|
||||||
|
class Cache:
|
||||||
|
def __init__(self, file: Path, exp: int | float) -> None:
|
||||||
|
self.file = file
|
||||||
|
self.exp = exp
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def near_hr(dt: datetime) -> float:
|
||||||
|
return dt.replace(minute=0, second=0, microsecond=0).timestamp()
|
||||||
|
|
||||||
|
def is_fresh(
|
||||||
|
self,
|
||||||
|
entry: dict,
|
||||||
|
nearest_hr: bool,
|
||||||
|
) -> bool:
|
||||||
|
ts: float | int = entry.get("timestamp", 31496400)
|
||||||
|
|
||||||
|
if nearest_hr:
|
||||||
|
ts = self.near_hr(Time.from_ts(ts))
|
||||||
|
|
||||||
|
return Time.now().timestamp() - ts < self.exp
|
||||||
|
|
||||||
|
def load(
|
||||||
|
self,
|
||||||
|
nearest_hr: bool = False,
|
||||||
|
per_entry: bool = True,
|
||||||
|
) -> dict[str, dict[str, str | float]]:
|
||||||
|
try:
|
||||||
|
data: dict = json.loads(self.file.read_text(encoding="utf-8"))
|
||||||
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if per_entry:
|
||||||
|
return {k: v for k, v in data.items() if self.is_fresh(v, nearest_hr)}
|
||||||
|
|
||||||
|
ts: float | int = data.get("timestamp", 31496400)
|
||||||
|
|
||||||
|
if nearest_hr:
|
||||||
|
ts = self.near_hr(Time.from_ts(ts))
|
||||||
|
|
||||||
|
return data if self.is_fresh({"timestamp": ts}, False) else {}
|
||||||
|
|
||||||
|
def write(self, data: dict) -> None:
|
||||||
|
self.file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
self.file.write_text(
|
||||||
|
json.dumps(
|
||||||
|
data,
|
||||||
|
indent=2,
|
||||||
|
ensure_ascii=False,
|
||||||
|
),
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["Cache"]
|
||||||
|
|
@ -1,27 +1,79 @@
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
from datetime import datetime
|
from datetime import datetime, timedelta, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
TZ = pytz.timezone("America/New_York")
|
ZONES = {"ET": pytz.timezone("America/New_York"), "UTC": timezone.utc}
|
||||||
|
|
||||||
now = datetime.now(TZ)
|
ZONES["EDT"] = ZONES["EST"] = ZONES["ET"]
|
||||||
|
|
||||||
live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png"
|
|
||||||
|
|
||||||
leagues_file = Path(__file__).parent / "leagues.json"
|
class Time(datetime):
|
||||||
|
TZ = timezone.utc
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def now(cls) -> "Time":
|
||||||
|
dt = datetime.now(cls.TZ)
|
||||||
|
return cls.fromtimestamp(dt.timestamp(), tz=cls.TZ)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_ts(cls, ts: int | float) -> "Time":
|
||||||
|
return cls.fromtimestamp(ts, tz=cls.TZ)
|
||||||
|
|
||||||
|
def delta(self, **kwargs) -> "Time":
|
||||||
|
new_dt = super().__add__(timedelta(**kwargs))
|
||||||
|
return self.__class__.fromtimestamp(new_dt.timestamp(), tz=new_dt.tzinfo)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_str(cls, s: str, fmt: str | None = None) -> "Time":
|
||||||
|
pattern = r"\b(ET|UTC|EST|EDT)\b"
|
||||||
|
|
||||||
|
match = re.search(pattern, s)
|
||||||
|
|
||||||
|
tz = ZONES.get(match[1]) if match else cls.TZ
|
||||||
|
|
||||||
|
cleaned_str = re.sub(pattern, "", s).strip()
|
||||||
|
|
||||||
|
if fmt:
|
||||||
|
dt = datetime.strptime(cleaned_str, fmt)
|
||||||
|
else:
|
||||||
|
formats = [
|
||||||
|
"%Y-%m-%d %H:%M",
|
||||||
|
"%Y-%m-%d %H:%M:%S",
|
||||||
|
]
|
||||||
|
|
||||||
|
for frmt in formats:
|
||||||
|
try:
|
||||||
|
dt = datetime.strptime(cleaned_str, frmt)
|
||||||
|
break
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
return cls.from_ts(31496400)
|
||||||
|
|
||||||
|
dt = tz.localize(dt) if hasattr(tz, "localize") else dt.replace(tzinfo=tz)
|
||||||
|
|
||||||
|
return cls.fromtimestamp(dt.astimezone(cls.TZ).timestamp(), tz=cls.TZ)
|
||||||
|
|
||||||
|
def to_tz(self, tzone: str) -> "Time":
|
||||||
|
dt = self.astimezone(ZONES[tzone])
|
||||||
|
return self.__class__.fromtimestamp(dt.timestamp(), tz=ZONES[tzone])
|
||||||
|
|
||||||
|
|
||||||
class Leagues:
|
class Leagues:
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.data = json.loads(leagues_file.read_text(encoding="utf-8"))
|
self.data = json.loads(
|
||||||
|
(Path(__file__).parent / "leagues.json").read_text(encoding="utf-8")
|
||||||
|
)
|
||||||
|
|
||||||
|
self.live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png"
|
||||||
|
|
||||||
def teams(self, league: str) -> list[str]:
|
def teams(self, league: str) -> list[str]:
|
||||||
return self.data["teams"].get(league, [])
|
return self.data["teams"].get(league, [])
|
||||||
|
|
||||||
def info(self, name: str) -> tuple[str | str]:
|
def info(self, name: str) -> tuple[str | None, str]:
|
||||||
name = name.upper()
|
name = name.upper()
|
||||||
|
|
||||||
if match := next(
|
if match := next(
|
||||||
|
|
@ -36,9 +88,9 @@ class Leagues:
|
||||||
):
|
):
|
||||||
tvg_id, logo = match
|
tvg_id, logo = match
|
||||||
|
|
||||||
return (tvg_id, logo or live_img)
|
return (tvg_id, logo or self.live_img)
|
||||||
|
|
||||||
return ("Live.Event.us", live_img)
|
return (None, self.live_img)
|
||||||
|
|
||||||
def is_valid(self, event: str, league: str) -> bool:
|
def is_valid(self, event: str, league: str) -> bool:
|
||||||
if match := re.search(r"(\-|vs.?)", event):
|
if match := re.search(r"(\-|vs.?)", event):
|
||||||
|
|
@ -48,5 +100,11 @@ class Leagues:
|
||||||
|
|
||||||
return event.lower() == "nfl redzone" if league == "NFL" else False
|
return event.lower() == "nfl redzone" if league == "NFL" else False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def league_names(self) -> list[str]:
|
||||||
|
return self.data["teams"].keys()
|
||||||
|
|
||||||
|
|
||||||
leagues = Leagues()
|
leagues = Leagues()
|
||||||
|
|
||||||
|
__all__ = ["leagues", "Time"]
|
||||||
|
|
|
||||||
|
|
@ -10,22 +10,23 @@ LOG_FMT = (
|
||||||
)
|
)
|
||||||
|
|
||||||
COLORS = {
|
COLORS = {
|
||||||
"DEBUG": "\033[37m",
|
"DEBUG": "\033[36m",
|
||||||
"INFO": "\033[32m",
|
"INFO": "\033[32m",
|
||||||
"WARNING": "\033[33m",
|
"WARNING": "\033[33m",
|
||||||
"ERROR": "\033[31m",
|
"ERROR": "\033[31m",
|
||||||
"CRITICAL": "\033[41m",
|
"CRITICAL": "\033[1;41m",
|
||||||
"reset": "\033[0m",
|
"reset": "\033[0m",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class ColorFormatter(logging.Formatter):
|
class ColorFormatter(logging.Formatter):
|
||||||
def format(self, record) -> str:
|
def format(self, record) -> str:
|
||||||
color = COLORS.get(record.levelname, "")
|
color = COLORS.get(record.levelname, COLORS["reset"])
|
||||||
levelname = record.levelname
|
levelname = record.levelname
|
||||||
record.levelname = f"{color}{levelname}{COLORS['reset']}"
|
record.levelname = f"{color}{levelname:<8}{COLORS['reset']}"
|
||||||
formatted = super().format(record)
|
formatted = super().format(record)
|
||||||
record.levelname = levelname
|
record.levelname = levelname
|
||||||
|
|
||||||
return formatted
|
return formatted
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -41,5 +42,9 @@ def get_logger(name: str | None = None) -> logging.Logger:
|
||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
logger.addHandler(handler)
|
logger.addHandler(handler)
|
||||||
logger.setLevel(logging.INFO)
|
logger.setLevel(logging.INFO)
|
||||||
|
logger.propagate = False
|
||||||
|
|
||||||
return logger
|
return logger
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["get_logger", "ColorFormatter"]
|
||||||
|
|
|
||||||
|
|
@ -1,154 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
from collections.abc import Callable
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
from playwright.async_api import Browser, BrowserContext, Playwright, Request
|
|
||||||
|
|
||||||
UA = (
|
|
||||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
|
||||||
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
|
||||||
"Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
|
|
||||||
)
|
|
||||||
|
|
||||||
CLIENT = httpx.AsyncClient(
|
|
||||||
timeout=5,
|
|
||||||
follow_redirects=True,
|
|
||||||
headers={"User-Agent": UA},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def check_status(client: httpx.AsyncClient, url: str) -> bool:
|
|
||||||
try:
|
|
||||||
r = await client.get(url)
|
|
||||||
r.raise_for_status()
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return r.status_code == 200
|
|
||||||
|
|
||||||
|
|
||||||
async def get_base(client: httpx.AsyncClient, mirrors: list[str]) -> str | None:
|
|
||||||
tasks = [check_status(client, link) for link in mirrors]
|
|
||||||
results = await asyncio.gather(*tasks)
|
|
||||||
|
|
||||||
try:
|
|
||||||
return [url for url, ok in zip(mirrors, results) if ok][0]
|
|
||||||
except IndexError:
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
async def safe_process_event(
|
|
||||||
fn: Callable,
|
|
||||||
url_num: int,
|
|
||||||
timeout: int | float = 15,
|
|
||||||
log: logging.Logger | None = None,
|
|
||||||
) -> Any | None:
|
|
||||||
|
|
||||||
if not log:
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
task = asyncio.create_task(fn())
|
|
||||||
|
|
||||||
try:
|
|
||||||
return await asyncio.wait_for(task, timeout=timeout)
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
log.warning(f"URL {url_num}) Timed out after {timeout}s, skipping event")
|
|
||||||
|
|
||||||
task.cancel()
|
|
||||||
|
|
||||||
try:
|
|
||||||
await task
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
log.debug(f"URL {url_num}) Ignore exception after timeout: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
def capture_req(
|
|
||||||
req: Request,
|
|
||||||
captured: list[str],
|
|
||||||
got_one: asyncio.Event,
|
|
||||||
) -> None:
|
|
||||||
valid_m3u8 = re.compile(r"^(?!.*(amazonaws|knitcdn)).*\.m3u8")
|
|
||||||
|
|
||||||
if valid_m3u8.search(req.url):
|
|
||||||
captured.append(req.url)
|
|
||||||
got_one.set()
|
|
||||||
|
|
||||||
|
|
||||||
async def new_browser(
|
|
||||||
playwright: Playwright,
|
|
||||||
browser: str = "firefox",
|
|
||||||
ignore_https_errors: bool = False,
|
|
||||||
) -> tuple[Browser, BrowserContext]:
|
|
||||||
|
|
||||||
if browser == "brave":
|
|
||||||
brwsr = await playwright.chromium.connect_over_cdp("http://localhost:9222")
|
|
||||||
context = brwsr.contexts[0]
|
|
||||||
else:
|
|
||||||
brwsr = await playwright.firefox.launch(headless=True)
|
|
||||||
|
|
||||||
context = await brwsr.new_context(
|
|
||||||
user_agent=UA,
|
|
||||||
ignore_https_errors=ignore_https_errors,
|
|
||||||
viewport={"width": 1366, "height": 768},
|
|
||||||
device_scale_factor=1,
|
|
||||||
locale="en-US",
|
|
||||||
timezone_id="America/New_York",
|
|
||||||
color_scheme="dark",
|
|
||||||
permissions=["geolocation"],
|
|
||||||
extra_http_headers={
|
|
||||||
"Accept-Language": "en-US,en;q=0.9",
|
|
||||||
"Upgrade-Insecure-Requests": "1",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
await context.add_init_script(
|
|
||||||
"""
|
|
||||||
Object.defineProperty(navigator, 'webdriver', {get: () => undefined});
|
|
||||||
|
|
||||||
Object.defineProperty(navigator, 'languages', {
|
|
||||||
get: () => ['en-US', 'en']
|
|
||||||
});
|
|
||||||
|
|
||||||
Object.defineProperty(navigator, 'plugins', {
|
|
||||||
get: () => [1, 2, 3, 4]
|
|
||||||
});
|
|
||||||
|
|
||||||
const elementDescriptor = Object.getOwnPropertyDescriptor(HTMLElement.prototype, 'offsetHeight');
|
|
||||||
Object.defineProperty(HTMLDivElement.prototype, 'offsetHeight', {
|
|
||||||
...elementDescriptor,
|
|
||||||
get: function() {
|
|
||||||
if (this.id === 'modernizr') { return 24; }
|
|
||||||
return elementDescriptor.get.apply(this);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Object.defineProperty(window.screen, 'width', { get: () => 1366 });
|
|
||||||
Object.defineProperty(window.screen, 'height', { get: () => 768 });
|
|
||||||
|
|
||||||
const getParameter = WebGLRenderingContext.prototype. getParameter;
|
|
||||||
WebGLRenderingContext.prototype.getParameter = function (param) {
|
|
||||||
if (param === 37445) return "Intel Inc."; // UNMASKED_VENDOR_WEBGL
|
|
||||||
if (param === 37446) return "Intel Iris OpenGL Engine"; // UNMASKED_RENDERER_WEBGL
|
|
||||||
return getParameter.apply(this, [param]);
|
|
||||||
};
|
|
||||||
|
|
||||||
const observer = new MutationObserver(mutations => {
|
|
||||||
mutations.forEach(mutation => {
|
|
||||||
mutation.addedNodes.forEach(node => {
|
|
||||||
if (node.tagName === 'IFRAME' && node.hasAttribute('sandbox')) {
|
|
||||||
node.removeAttribute('sandbox');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
observer.observe(document.documentElement, { childList: true, subtree: true });
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
return brwsr, context
|
|
||||||
174
M3U8/scrapers/utils/webwork.py
Normal file
174
M3U8/scrapers/utils/webwork.py
Normal file
|
|
@ -0,0 +1,174 @@
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from collections.abc import Callable
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from playwright.async_api import Browser, BrowserContext, Playwright, Request
|
||||||
|
|
||||||
|
from .logger import get_logger
|
||||||
|
|
||||||
|
|
||||||
|
class Network:
|
||||||
|
UA = (
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
||||||
|
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||||
|
"Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.client = httpx.AsyncClient(
|
||||||
|
timeout=5,
|
||||||
|
follow_redirects=True,
|
||||||
|
headers={"User-Agent": Network.UA},
|
||||||
|
)
|
||||||
|
|
||||||
|
self._logger = get_logger("network")
|
||||||
|
|
||||||
|
async def check_status(self, url: str) -> bool:
|
||||||
|
try:
|
||||||
|
r = await self.client.get(url)
|
||||||
|
r.raise_for_status()
|
||||||
|
return r.status_code == 200
|
||||||
|
except (httpx.HTTPError, httpx.TimeoutException) as e:
|
||||||
|
self._logger.debug(f"Status check failed for {url}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def get_base(self, mirrors: list[str]) -> str | None:
|
||||||
|
tasks = [self.check_status(link) for link in mirrors]
|
||||||
|
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||||
|
|
||||||
|
working_mirrors = [
|
||||||
|
mirror for mirror, success in zip(mirrors, results) if success
|
||||||
|
]
|
||||||
|
|
||||||
|
return working_mirrors[0] if working_mirrors else None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def safe_process(
|
||||||
|
fn: Callable,
|
||||||
|
url_num: int,
|
||||||
|
timeout: int | float = 15,
|
||||||
|
log: logging.Logger | None = None,
|
||||||
|
) -> Any | None:
|
||||||
|
|
||||||
|
if not log:
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
task = asyncio.create_task(fn())
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await asyncio.wait_for(task, timeout=timeout)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
log.warning(f"URL {url_num}) Timed out after {timeout}s, skipping event")
|
||||||
|
|
||||||
|
task.cancel()
|
||||||
|
|
||||||
|
try:
|
||||||
|
await task
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
log.debug(f"URL {url_num}) Ignore exception after timeout: {e}")
|
||||||
|
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
log.error(f"URL {url_num}) Unexpected error: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def capture_req(
|
||||||
|
req: Request,
|
||||||
|
captured: list[str],
|
||||||
|
got_one: asyncio.Event,
|
||||||
|
patterns: list[str] | None = None,
|
||||||
|
) -> None:
|
||||||
|
if not patterns:
|
||||||
|
patterns = ["amazonaws", "knitcdn"]
|
||||||
|
|
||||||
|
pattern = re.compile(rf"^.*\.m3u8(?!.*({'|'.join(patterns)}))")
|
||||||
|
|
||||||
|
if pattern.search(req.url):
|
||||||
|
captured.append(req.url)
|
||||||
|
got_one.set()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def browser(
|
||||||
|
playwright: Playwright,
|
||||||
|
browser: str = "firefox",
|
||||||
|
ignore_https_errors: bool = False,
|
||||||
|
) -> tuple[Browser, BrowserContext]:
|
||||||
|
|
||||||
|
if browser == "brave":
|
||||||
|
brwsr = await playwright.chromium.connect_over_cdp("http://localhost:9222")
|
||||||
|
context = brwsr.contexts[0]
|
||||||
|
else:
|
||||||
|
brwsr = await playwright.firefox.launch(headless=True)
|
||||||
|
|
||||||
|
context = await brwsr.new_context(
|
||||||
|
user_agent=Network.UA,
|
||||||
|
ignore_https_errors=ignore_https_errors,
|
||||||
|
viewport={"width": 1366, "height": 768},
|
||||||
|
device_scale_factor=1,
|
||||||
|
locale="en-US",
|
||||||
|
timezone_id="America/New_York",
|
||||||
|
color_scheme="dark",
|
||||||
|
permissions=["geolocation"],
|
||||||
|
extra_http_headers={
|
||||||
|
"Accept-Language": "en-US,en;q=0.9",
|
||||||
|
"Upgrade-Insecure-Requests": "1",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
await context.add_init_script(
|
||||||
|
"""
|
||||||
|
Object.defineProperty(navigator, 'webdriver', {get: () => undefined});
|
||||||
|
|
||||||
|
Object.defineProperty(navigator, 'languages', {
|
||||||
|
get: () => ['en-US', 'en']
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(navigator, 'plugins', {
|
||||||
|
get: () => [1, 2, 3, 4]
|
||||||
|
});
|
||||||
|
|
||||||
|
const elementDescriptor = Object.getOwnPropertyDescriptor(HTMLElement.prototype, 'offsetHeight');
|
||||||
|
Object.defineProperty(HTMLDivElement.prototype, 'offsetHeight', {
|
||||||
|
...elementDescriptor,
|
||||||
|
get: function() {
|
||||||
|
if (this.id === 'modernizr') { return 24; }
|
||||||
|
return elementDescriptor.get.apply(this);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(window.screen, 'width', { get: () => 1366 });
|
||||||
|
Object.defineProperty(window.screen, 'height', { get: () => 768 });
|
||||||
|
|
||||||
|
const getParameter = WebGLRenderingContext.prototype. getParameter;
|
||||||
|
WebGLRenderingContext.prototype.getParameter = function (param) {
|
||||||
|
if (param === 37445) return "Intel Inc."; // UNMASKED_VENDOR_WEBGL
|
||||||
|
if (param === 37446) return "Intel Iris OpenGL Engine"; // UNMASKED_RENDERER_WEBGL
|
||||||
|
return getParameter.apply(this, [param]);
|
||||||
|
};
|
||||||
|
|
||||||
|
const observer = new MutationObserver(mutations => {
|
||||||
|
mutations.forEach(mutation => {
|
||||||
|
mutation.addedNodes.forEach(node => {
|
||||||
|
if (node.tagName === 'IFRAME' && node.hasAttribute('sandbox')) {
|
||||||
|
node.removeAttribute('sandbox');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
observer.observe(document.documentElement, { childList: true, subtree: true });
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
return brwsr, context
|
||||||
|
|
||||||
|
|
||||||
|
network = Network()
|
||||||
|
|
||||||
|
__all__ = ["network"]
|
||||||
|
|
@ -6,5 +6,5 @@ dependencies = [
|
||||||
"httpx>=0.28.1",
|
"httpx>=0.28.1",
|
||||||
"playwright>=1.55.0",
|
"playwright>=1.55.0",
|
||||||
"pytz>=2025.2",
|
"pytz>=2025.2",
|
||||||
"selectolax>=0.3.33",
|
"selectolax>=0.4.0",
|
||||||
]
|
]
|
||||||
|
|
|
||||||
108
uv.lock
generated
108
uv.lock
generated
|
|
@ -4,7 +4,7 @@ requires-python = ">=3.10"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
version = "4.10.0"
|
version = "4.11.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||||
|
|
@ -12,9 +12,9 @@ dependencies = [
|
||||||
{ name = "sniffio" },
|
{ name = "sniffio" },
|
||||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" },
|
{ url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -151,7 +151,7 @@ requires-dist = [
|
||||||
{ name = "httpx", specifier = ">=0.28.1" },
|
{ name = "httpx", specifier = ">=0.28.1" },
|
||||||
{ name = "playwright", specifier = ">=1.55.0" },
|
{ name = "playwright", specifier = ">=1.55.0" },
|
||||||
{ name = "pytz", specifier = ">=2025.2" },
|
{ name = "pytz", specifier = ">=2025.2" },
|
||||||
{ name = "selectolax", specifier = ">=0.3.33" },
|
{ name = "selectolax", specifier = ">=0.4.0" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -196,52 +196,64 @@ wheels = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "selectolax"
|
name = "selectolax"
|
||||||
version = "0.3.34"
|
version = "0.4.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/bf/8c/8bbe1b17098b4e2a63a251361870303c37ad4c3170536277096575c24ca4/selectolax-0.3.34.tar.gz", hash = "sha256:c2cdb30b60994f1e0b74574dd408f1336d2fadd68a3ebab8ea573740dcbf17e2", size = 4706599, upload-time = "2025-08-28T23:17:44.131Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/7b/e0/c62a6bbbadaef976f1f27e08f1188bd7d1bf10380e4265d46737a47f8db9/selectolax-0.4.0.tar.gz", hash = "sha256:0387798f42b36ce24bc19d599ecd6ebe56ee559fe108d43978fac371cece15c7", size = 4753183, upload-time = "2025-09-28T17:52:51.97Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/0d/3c/93e5569ac6ab51ea316692d66c42c13585cb712bc7efb1da2d243edc1010/selectolax-0.3.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c1abfa86809a191a8cef9b1e1f6b0fe055663525b6b383b0d1db5631964a044", size = 1995463, upload-time = "2025-08-28T23:16:23.465Z" },
|
{ url = "https://files.pythonhosted.org/packages/57/80/09fae49a5493f43efead22d1ff7c19279d22569f1e742da0e2476ac41e0a/selectolax-0.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5afd58bad4a5bbaef114f6c61278ec33f39d46b04c2b6a98032dac892f21f5f8", size = 2027872, upload-time = "2025-09-28T17:51:11.804Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/30/3f/aeb8540f3802ce666992eb44eb4961915ef0274435566060cc1d09ffd8bc/selectolax-0.3.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c4d9c343041dcfc36c54e250dc8fc3523594153afb4697ee6c295a95f63bef3", size = 1987843, upload-time = "2025-08-28T23:16:26.065Z" },
|
{ url = "https://files.pythonhosted.org/packages/33/1a/fb195109748b2dd3104383a9f8f856f3c1715443fe920f8d0e44419cd34a/selectolax-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ae7b3ca4bb5f19cdd624e105d606bef8aa24a9590b6de9610d83573af649abb0", size = 2020754, upload-time = "2025-09-28T17:51:14.094Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/2c/04/3bf3a322c8699442995a03dfeb0b877ad93a92490dc0c53ae897fedf3e0f/selectolax-0.3.34-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45f9fecd7d7b1f699a4e2633338c15fe1b2e57671a1e07263aa046a80edf0109", size = 2196214, upload-time = "2025-08-28T23:16:27.78Z" },
|
{ url = "https://files.pythonhosted.org/packages/96/ac/1dc7c52cfc3b3ead65308e134baae7154a7b0cd23865384a8d460cc9ffe8/selectolax-0.4.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:12ca3568e5a4da1f0731b0b817b54b5b1d9f8107ae1ed33ba82a18626ad67708", size = 2218933, upload-time = "2025-09-28T17:51:15.924Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/12/2f/c0e1d572fa381c8a85bdd6d8f8b521d7ec4baca193bab784482ed4ea3895/selectolax-0.3.34-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9bdfaf8c62c55076e37ca755f06d5063fd8ba4dad1c48918218c482e0a0c5a6", size = 2234164, upload-time = "2025-08-28T23:16:30.178Z" },
|
{ url = "https://files.pythonhosted.org/packages/6f/4d/6d3615ea159dc6777f7ab9bd89c54d86c2a97d50ceee43b69eae643a85d1/selectolax-0.4.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a4ee361e70c1918dd8f34b77fbdced755c532c1e5867e4b8bea169efb25f409e", size = 2252992, upload-time = "2025-09-28T17:51:17.698Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/89/28/973216412c44eb1759644be4656496f173bb61eb3301fa2908308b10e41b/selectolax-0.3.34-cp310-cp310-win32.whl", hash = "sha256:4be1d9a2fa4de9fde0bff733e67192be0cc8052526afd9f7d58ce507c15f994f", size = 1690571, upload-time = "2025-08-28T23:16:31.856Z" },
|
{ url = "https://files.pythonhosted.org/packages/78/2d/cf95afeed588ec9deb3f7b0851dcad7d556c557053cdda985f01100e04cb/selectolax-0.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3556c983ad4afa382e3ad79d113fd3159c009e7c6697837afd0fbc01af7f42bc", size = 2233420, upload-time = "2025-09-28T17:51:19.383Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/4b/99/3cb9d5d2cfa534b83284c3331370dd569978ab0aae4ffb5a789818f69f69/selectolax-0.3.34-cp310-cp310-win_amd64.whl", hash = "sha256:5b3c8b87b2df5145b838ae51534e1becaac09123706b9ed417b21a9b702c6bb9", size = 1791091, upload-time = "2025-08-28T23:16:33.28Z" },
|
{ url = "https://files.pythonhosted.org/packages/43/6b/2ab3b5f0ac78088921c12f852123c194dc8de349ccf3c361f1c5b64c5317/selectolax-0.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d6ac41d5344078274e53d5c50e9091718444a1705f8b14b33ac913b53a52b6eb", size = 2260444, upload-time = "2025-09-28T17:51:21.267Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/a0/85/73f3598b50abe48cc49d7568a4bbf0afc4d56518d8eebd1f30016a72db6f/selectolax-0.3.34-cp310-cp310-win_arm64.whl", hash = "sha256:cedc440a25b9e96549b762a552be883e92770d1d01f632b3aa46fb6af93fcb5f", size = 1745110, upload-time = "2025-08-28T23:16:35.013Z" },
|
{ url = "https://files.pythonhosted.org/packages/4e/91/d9a14f5fb8c0d3be47c493b6eea69c50f45ecb11ebb400410abbb819cccb/selectolax-0.4.0-cp310-cp310-win32.whl", hash = "sha256:ab1c62395e9698a96dd0f0b5bc2da8b6d25a6e6675f35ddd182e16957f9346e6", size = 1706495, upload-time = "2025-09-28T17:51:23.164Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/e4/1e/146ce1f51d472677777422ce4442eff081ac54331667a6558a98f7c47e6c/selectolax-0.3.34-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa1abb8ca78c832808661a9ac13f7fe23fbab4b914afb5d99b7f1349cc78586a", size = 2002899, upload-time = "2025-08-28T23:16:36.774Z" },
|
{ url = "https://files.pythonhosted.org/packages/c5/28/27d84208fd2b0242bf11d9b33fde47309b5b7d3ac96c976e610c911e2d2b/selectolax-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:7b4b830b897bc2c5ad572dce657d213b7106ee364b8e0bbd42e5d723fd91a1b6", size = 1808661, upload-time = "2025-09-28T17:51:25.005Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3b/9f/30cb6a68d2e6d75da8fa3910d1f80a9b8b7338689894d24cfddb184adeaa/selectolax-0.3.34-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:88596b9f250ce238b7830e5987780031ffd645db257f73dcd816ec93523d7c04", size = 1994898, upload-time = "2025-08-28T23:16:38.153Z" },
|
{ url = "https://files.pythonhosted.org/packages/1c/e2/1d5c71e9586989893e5471a9589b06ce155ac9f06fedd8bff891833e7a4e/selectolax-0.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:3115a5bf21d1b8ab9b86f854d162c6dd72bcc1088d428713157fc4cae1984038", size = 1757244, upload-time = "2025-09-28T17:51:26.687Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/79/63/b207c894c54b426ae69e08c85a5dc85c22f4d10872efd17a599f8dfb94e2/selectolax-0.3.34-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7755dfe7dd7455ca1f7194c631d409508fa26be8db94874760a27ae27d98a1c3", size = 2209542, upload-time = "2025-08-28T23:16:39.975Z" },
|
{ url = "https://files.pythonhosted.org/packages/09/d1/376e99ea710f66c157b0457fa971b2cb7dce3ff20fa98528dc9e44a4c917/selectolax-0.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:930ce6aae62b39ced18047eae916fe93970a33d77016ac4158d971caa510b2cf", size = 2034524, upload-time = "2025-09-28T17:51:28.444Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3b/50/76285317fafbb80f01853c05256c584f32184371f609ecb9f0bab372a785/selectolax-0.3.34-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:579fdefcb302a7cc632a094ec69e7db24865ec475b1f34f5b2f0e9d05d8ec428", size = 2242685, upload-time = "2025-08-28T23:16:41.327Z" },
|
{ url = "https://files.pythonhosted.org/packages/cf/25/d46756188af9ddf2ed49a1cd1eaba00930389de76ce4933d00608aa2804e/selectolax-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e05cf9a525973740e526009ad3c4b20bb94f21456a47b039ec3cfabfe58bebca", size = 2028783, upload-time = "2025-09-28T17:51:30.227Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/e3/ba/56be3f947cda174f1923fc08b216b63ec5c433c41b7b02c9ac05170a2b30/selectolax-0.3.34-cp311-cp311-win32.whl", hash = "sha256:a568d2f4581d54c74ec44102d189fe255efed2d8160fda927b3d8ed41fe69178", size = 1690600, upload-time = "2025-08-28T23:16:43.071Z" },
|
{ url = "https://files.pythonhosted.org/packages/80/5d/37e10b9de79f2817f3f40a83ad0b359b5bf0691ac47c65130beda6df0d11/selectolax-0.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8141421be2a048b3a63428c46f894d382c2dd0f15c5e17737afda7745e759dbf", size = 2229394, upload-time = "2025-09-28T17:51:32.152Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/53/f2/d44f44fe11a28be31a3f55cff298364cd5f0baa2dca2eed20d4e4ac230f5/selectolax-0.3.34-cp311-cp311-win_amd64.whl", hash = "sha256:ff0853d10a7e8f807113a155e93cd612a41aedd009fac02992f10c388fcdd6fe", size = 1792859, upload-time = "2025-08-28T23:16:44.77Z" },
|
{ url = "https://files.pythonhosted.org/packages/36/8d/acfc2e24a12c18217a732b90f9fb52ba8b0f95381b7385b66df3145dc4fd/selectolax-0.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7150d2f559d4f5a2c7da4e3825f84937dddff109b63c80abf2252490b5974a6", size = 2263336, upload-time = "2025-09-28T17:51:34.307Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/c3/a1/d4168930f1b377befa2e3b6db1a81a600c12a800689aef047ec4407ac933/selectolax-0.3.34-cp311-cp311-win_arm64.whl", hash = "sha256:f28ebdb0f376dae6f2e80d41731076ce4891403584f15cec13593f561cfb4db0", size = 1744909, upload-time = "2025-08-28T23:16:46.526Z" },
|
{ url = "https://files.pythonhosted.org/packages/07/84/5134208f3df2df61282facbe6d8dade408be03a06ac3a31b227a7f2c443d/selectolax-0.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4c2ba376da5946c924f6cae22dfc9f3e8b229d4255c8545f7827827ce154a9", size = 2244521, upload-time = "2025-09-28T17:51:35.674Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d8/eb/6cf9dd52e20922ea5d5be8f8c448e9501a8503ad6c8d7f70be737f76e76b/selectolax-0.3.34-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a913371fe79d6f795fc36c0c0753aab1593e198af78dc0654a7615a6581ada14", size = 2003247, upload-time = "2025-08-28T23:16:48.206Z" },
|
{ url = "https://files.pythonhosted.org/packages/04/b8/817bbcea6fa2d6bfecd3979236b7252fe22357cb3a89c412cc79178b96f8/selectolax-0.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:83de91f71fca28fdfe9ad61989b5c93bdfa00b238cc08416d5431c8f78806bf6", size = 2269845, upload-time = "2025-09-28T17:51:37.599Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/bc/ec/58682f69bddfe1b64e44e235e6ad2585742b4d8d805c4c0d7f2a9c0d97f5/selectolax-0.3.34-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:11b0e913897727563b2689b38a63696a21084c3c7fd93042dc8af259a4020809", size = 1995861, upload-time = "2025-08-28T23:16:49.641Z" },
|
{ url = "https://files.pythonhosted.org/packages/d5/7e/dd4d4c0181955133c56b1bd8f7f39f4d08131cbf40695e337b820998a27e/selectolax-0.4.0-cp311-cp311-win32.whl", hash = "sha256:7abe15df5a53f4e9a19c016ea1df4220ec0cac8cb3a3ff591cbf7b505803ee07", size = 1706390, upload-time = "2025-09-28T17:51:39.18Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ec/68/38b8bc49d19feefd20d65661ee0ffad8537cb21bdbeaa23be3cf42f7445d/selectolax-0.3.34-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b49f0e0af267274c39a0dc7e807c556ecf2e189f44cf95dd5d2398f36c17ce9", size = 2202808, upload-time = "2025-08-28T23:16:51.564Z" },
|
{ url = "https://files.pythonhosted.org/packages/d4/35/d5fa9607c4214fe4336f44d32d41a6e65776b070c35058feb97f88a04038/selectolax-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:b3c0c61ab5a1548532263652a617b22f9cf033089bfc4556309d583e5a65667e", size = 1810119, upload-time = "2025-09-28T17:51:40.931Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/2d/42/1f5e5fc7c9ac362ac259a94d2d4040e1e2c57ab2c552db7bd830884d610a/selectolax-0.3.34-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0a5a1a8b62e204aba7030b49c5b696ee24cabb243ba757328eb54681a74340c", size = 2241858, upload-time = "2025-08-28T23:16:53.589Z" },
|
{ url = "https://files.pythonhosted.org/packages/6c/5c/5c6a9758e4a2d9de42a7bed2f96154064c19768ab3c1aa6fe152127e320a/selectolax-0.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:115f6b36c75c0141ea5bd9669bfdc4c55686b3944c81d5ef399d08bf541a46f6", size = 1757002, upload-time = "2025-09-28T17:51:42.322Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/90/d7/fa4d3f8fa1cfdeb36f0a4f14db362fa45b913679a15e82f3618fd62526ce/selectolax-0.3.34-cp312-cp312-win32.whl", hash = "sha256:cb49af5de5b5e99068bc7845687b40d4ded88c5e80868a7f1aa004f2380c2444", size = 1686857, upload-time = "2025-08-28T23:16:55.247Z" },
|
{ url = "https://files.pythonhosted.org/packages/a4/b7/4927bb03c54a12eb7d81da5171e18a7bd0e9eabe909f798aba15dfa69feb/selectolax-0.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4f6e6bfe035cfc7962efdb63ca7b591e8dbf1d1a5226d9af8c0735a395983e79", size = 2037651, upload-time = "2025-09-28T17:51:43.825Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/5b/2c/f0112f0de82e7b72773f5289481fa8e31c65992d687a36048ade7e2d5703/selectolax-0.3.34-cp312-cp312-win_amd64.whl", hash = "sha256:33862576e7d9bb015b1580752316cc4b0ca2fb54347cb671fabb801c8032c67e", size = 1789407, upload-time = "2025-08-28T23:16:56.622Z" },
|
{ url = "https://files.pythonhosted.org/packages/e0/31/8c2e64ee8e92d532e627efcd0fff8eb3e12018c4e3642f400545d113da07/selectolax-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c727a1d5dec654291a58aca34966495f97f20c6d93ad3bfb67a3a8cc5c58e3a", size = 2030040, upload-time = "2025-09-28T17:51:45.248Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/fc/09/8972f0fed462739ad8a5fe1f0191f19edfc95ff7c1b09de4c940c9fe744c/selectolax-0.3.34-cp312-cp312-win_arm64.whl", hash = "sha256:8a663d762c9b6e64888489293d9b37d6727ac8f447dca221e044b61203c0f1e1", size = 1737885, upload-time = "2025-08-28T23:16:58.161Z" },
|
{ url = "https://files.pythonhosted.org/packages/46/cf/008ce4af155c5cafe0801825805ae9f92c38c801022b4a6f295439c44321/selectolax-0.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec5ddc25e94ac93c353ef361c513bac40e45648ee7a8a36d5d6911d6daa5689b", size = 2223653, upload-time = "2025-09-28T17:51:46.994Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d0/29/eeb77d1a77599023387d4d00655960dfa3d760557b42a65ef347e29b40b0/selectolax-0.3.34-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2bb74e079098d758bd3d5c77b1c66c90098de305e4084b60981e561acf52c12a", size = 2001199, upload-time = "2025-08-28T23:16:59.467Z" },
|
{ url = "https://files.pythonhosted.org/packages/25/2d/fce2dc92a2b05c9ba0d56c49dfa1ac66d4c6e67b09930bd413667d3d26fd/selectolax-0.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:534a4cf50371406594efd551395acb47166e54cf06b97d24d373fdf5ff3d3436", size = 2263895, upload-time = "2025-09-28T17:51:48.537Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/21/80/326b9dd2901b64c3c654db9e8841ddc412b9c2af0047b7d43290bbb276be/selectolax-0.3.34-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cc39822f714e6e434ceb893e1ccff873f3f88c8db8226ba2f8a5f4a7a0e2aa29", size = 1994171, upload-time = "2025-08-28T23:17:01.206Z" },
|
{ url = "https://files.pythonhosted.org/packages/37/07/de96522669a8888db748ed550362d51dbaa36c8230a00cafe59c50351dbe/selectolax-0.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c61fa0662bdd8525f3d9fef6d8041faef39e7e9fe12cc9ef068dc34a791000b", size = 2238855, upload-time = "2025-09-28T17:51:50.182Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/15/af/1265e4f9429b3c3cf098ba08cb3264d7e16990ed3029d89e9890012aae76/selectolax-0.3.34-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:181b67949ec23b4f11b6f2e426ba9904dd25c73d12c2cb22caf8fae21a363e99", size = 2196092, upload-time = "2025-08-28T23:17:02.574Z" },
|
{ url = "https://files.pythonhosted.org/packages/f7/cd/082ba63397894939c9671eaf5521291218e0431453fae91ce741dd57686a/selectolax-0.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cb89d5ca84f2523063d06d7e23ebd8cb30d2e431920478ba14a02ae2a7f0c51d", size = 2269371, upload-time = "2025-09-28T17:51:51.563Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/1c/41/e67100abd8b0b2a5e1d5d7fa864c31d31e9a2c0bbd08ce4e951235f13143/selectolax-0.3.34-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0b09f9d7b22bbb633966ac2019ec059caf735a5bdb4a5784bab0f4db2198fd6a", size = 2233674, upload-time = "2025-08-28T23:17:03.928Z" },
|
{ url = "https://files.pythonhosted.org/packages/cb/80/765e674f548a645afcfc9c4270fce01704663732be4d3f37f49da2524b81/selectolax-0.4.0-cp312-cp312-win32.whl", hash = "sha256:9a088736aed7a3b5583188679612e6a278155328d6650a27a96ab0753d1b49d0", size = 1703051, upload-time = "2025-09-28T17:51:53.115Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3a/24/7ad043805c9292b4f535071c223d10aad7703b4460d68de1dce9dcf21d3f/selectolax-0.3.34-cp313-cp313-win32.whl", hash = "sha256:6e2ae8a984f82c9373e8a5ec0450f67603fde843fed73675f5187986e9e45b59", size = 1686489, upload-time = "2025-08-28T23:17:05.341Z" },
|
{ url = "https://files.pythonhosted.org/packages/cd/41/6df32a5d5c7328f384d7d439d7be2d10b416c3d9af02bea1c91bbced2c5f/selectolax-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b7cc3105bfda0d478d7220a681117c688abcf58580c1bb0bd5acd668c9192270", size = 1805133, upload-time = "2025-09-28T17:51:54.629Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/6b/79/62666fbfcd847c0cfc2b75b496bfa8382d765e7a3d5a2c792004760a6e61/selectolax-0.3.34-cp313-cp313-win_amd64.whl", hash = "sha256:96acd5414aaf0bb8677258ff7b0f494953b2621f71be1e3d69e01743545509ec", size = 1789924, upload-time = "2025-08-28T23:17:06.708Z" },
|
{ url = "https://files.pythonhosted.org/packages/7d/ba/cc92689a5b04e15e91133a7d1ccbdeb459170e2ee2d2d6ff2f72940f3c21/selectolax-0.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:67d44890c128b5fc797dc3a55a168a41551bc619f3cd3c6819d06a742fab4ef4", size = 1750358, upload-time = "2025-09-28T17:51:56.126Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/5d/b5/0bb579210a7de36d97c359016e77119513d3e810c61e99ade72089bc1b4d/selectolax-0.3.34-cp313-cp313-win_arm64.whl", hash = "sha256:1d309fd17ba72bb46a282154f75752ed7746de6f00e2c1eec4cd421dcdadf008", size = 1737480, upload-time = "2025-08-28T23:17:08.575Z" },
|
{ url = "https://files.pythonhosted.org/packages/cd/01/bb46431df329ca472292425cc9244c934f1cc3d09beb8cd9db096b9284f0/selectolax-0.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec605e9a8d85d1e1118b9a07347cc4cc81714c8b7b0ae8be9c8b515f2dda52c2", size = 2037193, upload-time = "2025-09-28T17:51:57.532Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/b8/5c/ab87e8ecb3c6aa1053d1c6d1eba0e47e292cc72aff0f6fbb89d920d4d87c/selectolax-0.3.34-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:3e9c4197563c9b62b56dd7545bfd993ce071fd40b8779736e9bc59813f014c23", size = 2000587, upload-time = "2025-08-28T23:17:10.327Z" },
|
{ url = "https://files.pythonhosted.org/packages/2a/60/2c8c8b8b78db646f5eef4c7ecbc2198db778f35ef3d656fb855dd109fbe0/selectolax-0.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aef60a57920883d02574330de203d6ea984c33152bd2285ff6e88b978feeea5c", size = 2029085, upload-time = "2025-09-28T17:51:59.112Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/72/8e/5c08bd5628f73ab582696f8349138a569115a0fd6ab71842e4115ceec4ff/selectolax-0.3.34-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f96eaa0da764a4b9e08e792c0f17cce98749f1406ffad35e6d4835194570bdbf", size = 1994327, upload-time = "2025-08-28T23:17:11.709Z" },
|
{ url = "https://files.pythonhosted.org/packages/d6/21/85b29e2dc44a8a0de0dff73f909c88506d14e96f479b9a50b39369b8d5fe/selectolax-0.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97b30b18e5b4633b889fd8bb8fc1cc31acb348e6f4cf67e2fa615d1c38697d24", size = 2218148, upload-time = "2025-09-28T17:52:00.877Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ac/29/02b22eff289b29ee3f869a85e4be4f7f3cf4b480d429bb18aab014848917/selectolax-0.3.34-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:412ce46d963444cd378e9f3197a2f30b05d858722677a361fc44ad244d2bb7db", size = 2201620, upload-time = "2025-08-28T23:17:13.538Z" },
|
{ url = "https://files.pythonhosted.org/packages/62/02/d03bedc09e60185f35c7424d3ef5046361dab50b40de19141e29289981dc/selectolax-0.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:45b73eadfbc9d96a354e9b559bac40bc430f0cfa95f3c7e8ff9b8c642bd4063f", size = 2256958, upload-time = "2025-09-28T17:52:02.664Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/6d/d3/bdd3a94bb1276be4ef4371dbfd254137b22f5c54a94d051a8d72c3956dc6/selectolax-0.3.34-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:58dd7dc062b0424adb001817bf9b05476d165a4db1885a69cac66ca16b313035", size = 2233487, upload-time = "2025-08-28T23:17:14.921Z" },
|
{ url = "https://files.pythonhosted.org/packages/ff/8e/ae06590bebd5ed0243b52427d44919d26aebec8afa1c6fc9ccdbf9008f1c/selectolax-0.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:78b069c1a7d118077e905b6e560d8e9eb86ad6b7dc9122e6e16879fcd59731b9", size = 2228723, upload-time = "2025-09-28T17:52:04.332Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/e6/6a/5d551c570f29bfca5815f45fa6e6a3310cc5bc6c9b1073a968d71f73612b/selectolax-0.3.34-cp314-cp314-win32.whl", hash = "sha256:4255558fa48e3685a13f3d9dfc84586146c7b0b86e44c899ac2ac263357c987f", size = 1779755, upload-time = "2025-08-28T23:17:16.322Z" },
|
{ url = "https://files.pythonhosted.org/packages/d3/fe/fada91b80ee58973def8fefe302a40214de0f8f5091ec1eb808fc1e60148/selectolax-0.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:06bb0f67ae5c951ea9f93e1fa51f646eb61336be6041631eee3fad1b61615415", size = 2261784, upload-time = "2025-09-28T17:52:05.76Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/cc/dc/5def41b07cb3b917841022489e6bd6c3277363c23b44eca00a0ada93221c/selectolax-0.3.34-cp314-cp314-win_amd64.whl", hash = "sha256:6cbf2707d79afd7e15083f3f32c11c9b6e39a39026c8b362ce25959842a837b6", size = 1877332, upload-time = "2025-08-28T23:17:17.766Z" },
|
{ url = "https://files.pythonhosted.org/packages/1f/71/257efdd7d4bf28c7889bb8c9ecc2a11233a6bb9d7c06292add90db8cdf9b/selectolax-0.4.0-cp313-cp313-win32.whl", hash = "sha256:3302f5d8f921e873b8f99d93cd7f093fc44e0fbea4ac6e9ce835991de3ca47e4", size = 1702602, upload-time = "2025-09-28T17:52:07.221Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/19/0f/63da99be8f78bbfca0cb3f9ad71b7475ab97383f830c86a9abd29c6d3f25/selectolax-0.3.34-cp314-cp314-win_arm64.whl", hash = "sha256:3aa83e4d1f5f5534c9d9e44fc53640c82edc7d0eef6fca0829830cccc8df9568", size = 1831124, upload-time = "2025-08-28T23:17:19.744Z" },
|
{ url = "https://files.pythonhosted.org/packages/a6/a0/f8de5940e5f286f8e1c88fbff8297c1427be0117a8f3258a6482d831f405/selectolax-0.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:6424ea4d27d26cb49042c729a7721cdc1210e7c404abcfe991c6f299a330bca7", size = 1806161, upload-time = "2025-09-28T17:52:08.613Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/39/5c/07d8031c6c106de10ff42b4440ad7fa6a038650942bb2e194e4eb9ffec6d/selectolax-0.3.34-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:bb0b9002974ec7052f7eb1439b8e404e11a00a26affcbdd73fc53fc55beec809", size = 2023889, upload-time = "2025-08-28T23:17:21.222Z" },
|
{ url = "https://files.pythonhosted.org/packages/29/e0/0a8ae951856c9819551ab22ac57c1cae530702a170f81fae48ab5da2bb25/selectolax-0.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:8e4994013ce69d65224afe4ae2038316a968ac3c0dcfd34faeaff09bf6825e61", size = 1749911, upload-time = "2025-09-28T17:52:10.102Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/fd/80/fa8220c2eae44928b5ae73eccd44baedb328109f115c948d796c46d11048/selectolax-0.3.34-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:38e5fdffab6d08800a19671ac9641ff9ca6738fad42090f4dd0da76e4db29582", size = 2011882, upload-time = "2025-08-28T23:17:22.844Z" },
|
{ url = "https://files.pythonhosted.org/packages/27/dc/ac0b643411d0cd394e3b9c2660351c73cce7805efde5b1923ec943c27d6b/selectolax-0.4.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:5edf5793c83b713022a325437ec1f0460b1163655fd1145ee2acf427ab1ff388", size = 2037005, upload-time = "2025-09-28T17:52:11.538Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/f6/02/657089f68f59308bd90137102a7f6da0c3770128ae7245e1290e99f5a48d/selectolax-0.3.34-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:871d35e19dfde9ee83c1df139940c2e5cdf6a50ef3d147a0e9acf382b63b5b3e", size = 2221871, upload-time = "2025-08-28T23:17:24.259Z" },
|
{ url = "https://files.pythonhosted.org/packages/51/69/e4629f8dd307f5039ce9c50ac45085da5e194e8a1710cbd8bb4cf5ea02f5/selectolax-0.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a0faaad30a143ec0068cd29d346106309ca14add2813607c84548f69ff0babd9", size = 2030288, upload-time = "2025-09-28T17:52:13.319Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d2/56/1ad7877f9b2b12f616a8847eca0a3047c6b5ed14588f21fe1f6915357efb/selectolax-0.3.34-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f3f269bc53bc84ccc166704263712f4448130ec827a38a0df230cffe3dc46a9", size = 2241032, upload-time = "2025-08-28T23:17:25.76Z" },
|
{ url = "https://files.pythonhosted.org/packages/ca/d1/a59866e3ef94349a3fa17c8db8e12c783de52e99277c8bff45bed30ce0fe/selectolax-0.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da04f1a1bf31a33e05739bc14ac585523adb4df8fbd0ce4eb3b1b6da82e76c56", size = 2223289, upload-time = "2025-09-28T17:52:14.84Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/60/c0/30ce665b7382f663fdbb282748ddee392a61c85f51862776b128d8644d45/selectolax-0.3.34-cp314-cp314t-win32.whl", hash = "sha256:b957d105c2f3d86de872f61be1c9a92e1d84580a5ec89a413282f60ffb3f7bc1", size = 1828494, upload-time = "2025-08-28T23:17:27.447Z" },
|
{ url = "https://files.pythonhosted.org/packages/98/8f/12e2e3958c1d2a50123725fd05a8739c436093e88b5086b1c29bdea818bb/selectolax-0.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:468063c361d45ce04e3ada12f8848b0e99289aeb32b100c2b38dc86e085cea6a", size = 2255545, upload-time = "2025-09-28T17:52:16.243Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/a4/9e/11d023ad74d0d1a48cefdddbb2d00365c4d9a97735d7c24c0f206cd1babb/selectolax-0.3.34-cp314-cp314t-win_amd64.whl", hash = "sha256:9c609d639ce09154d688063bb830dc351fb944fa52629e25717dbab45ad04327", size = 1951608, upload-time = "2025-08-28T23:17:29.327Z" },
|
{ url = "https://files.pythonhosted.org/packages/39/ea/514a4e18b08820601091ab64328f1e0f6327e219123ea24e72a2444401f1/selectolax-0.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b137e6a3470c6e374552cbe3ef69d5e10f4ac5317b0dd8d8a5d288ad02a980e7", size = 2239440, upload-time = "2025-09-28T17:52:18.365Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/cc/20/a5f93b84e3e6de9756dc82465c0dff57b1c8a25b1815bca0817e4342494c/selectolax-0.3.34-cp314-cp314t-win_arm64.whl", hash = "sha256:6359e94d66fb4fce9fb7c9d18252c3d8cba28b90f7412da8ce610bd77746f750", size = 1852855, upload-time = "2025-08-28T23:17:30.746Z" },
|
{ url = "https://files.pythonhosted.org/packages/f3/dd/a6ef1d4fac7014d008ee25296057ed785a2b0daf6bc2a8ee503a84c428fa/selectolax-0.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e98416d1096f438fa7fa243f53350c8fc3e63e7c781a513300ff2da4ae247f95", size = 2262156, upload-time = "2025-09-28T17:52:19.759Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/15/9f/a01ebbed6e83ca61bc2bf40119ada6c1206f562a15bb50535fa341264f69/selectolax-0.4.0-cp314-cp314-win32.whl", hash = "sha256:a144be60b2e9b8c34602cf07b01d1edf310fe1079944c473867295cc582c30ef", size = 1795070, upload-time = "2025-09-28T17:52:21.152Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4c/9a/fd58ca54e4544b59603a9f106ff7af412fd5e3852d1c1e3f4f56ab653dfc/selectolax-0.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:5d2bf8359fda6c5e7d2ac4183343fed5c84276adc7a28aa1f4f5c94964d15d5d", size = 1894458, upload-time = "2025-09-28T17:52:23.016Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/02/5a/86a330a64519749ddeb4124a8370124737f44bceb8cfd8b0a3c36aa67d30/selectolax-0.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:9f249b72ce4a344f1ac8b14fc9c9ccb043c8afbef6fedfc535ba8d03db0c7f19", size = 1843326, upload-time = "2025-09-28T17:52:24.367Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a7/64/f5f5ef5ccac5a70db6d3d129c027eff0d50ac0222360fbdd86ffab0e5119/selectolax-0.4.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f2048fad6e21bbecb0a96f6d830f4c295c8638fb66b04e996afc7225db751594", size = 2052639, upload-time = "2025-09-28T17:52:25.77Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d9/8f/46dc6990313b872ab1f33175c5c02846ef9eb7c6c488ddf0984824673d69/selectolax-0.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4b28af17499028ae8c210b3e8ef04c93f4d7a1b4e150da5781a3cf331a63f5a6", size = 2052569, upload-time = "2025-09-28T17:52:27.136Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8d/ee/48ff15c3ddac428749a5fbbc99b55c0b0a1720ba83ca048fbaf630254338/selectolax-0.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9da70a07e6ec2409f2cc6da787a6c4018ca672c7e139983e37de0cefa0686395", size = 2239877, upload-time = "2025-09-28T17:52:28.577Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e5/cb/5bffe2b9acd022eefb3a84c80cbdf441624a5754c062c05fcf1f6752a195/selectolax-0.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e62446575f9c58681a112273b48fcd659ee7c5f9f1b8c774223b128ce793ae38", size = 2263748, upload-time = "2025-09-28T17:52:29.964Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ee/1b/1a1970723372c6f4cf28b7100d415b66c2530138f39e82c5aad6789680e2/selectolax-0.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:a20ae8d377e74ce6480e55039c83c3ebfa96ed41281660530f02758a62ce6b80", size = 2254831, upload-time = "2025-09-28T17:52:31.309Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/71/1c/359e14203c84bd6542d7631510bf5b2eec263a5ca1efa5fdeb2b3a768a42/selectolax-0.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:744b2809fcb360b01389b3d4929476951f5a260e375b12be1a6fa86ce11805dc", size = 2271220, upload-time = "2025-09-28T17:52:32.759Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/81/7b/69fb95a7f3f3ade4622f35f5e02b38ae1f73f6a73856eb017bc965174678/selectolax-0.4.0-cp314-cp314t-win32.whl", hash = "sha256:9d9a5b1b2ecb64409d143aa37d4708bf3a3aacf47c19fe095429880e8fd43e4e", size = 1846313, upload-time = "2025-09-28T17:52:34.114Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/42/36/a50f7af1d35e4f299d52f2163dffb60318294fc2bc8a58dcaf07d8d95e3d/selectolax-0.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bbb5f0c0ea169a01aaa92c46a054a46ddc53c6e69aef683abf438fe39fbd60c2", size = 1967681, upload-time = "2025-09-28T17:52:35.542Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c3/96/e2f3ddd77dbfabf2b22dff676a16b0c87da71785756fa53b48b2217e1e4a/selectolax-0.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:22f2ce15e8e79acb66b576518609efe5d34e32200cb0818b48c7ebd89f59d4e6", size = 1863193, upload-time = "2025-09-28T17:52:37.052Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue