e
This commit is contained in:
parent
7103b0f1c4
commit
00000d9937
17 changed files with 597 additions and 524 deletions
|
|
@ -3,7 +3,7 @@ import asyncio
|
|||
from pathlib import Path
|
||||
|
||||
from scrapers import fstv, livetvsx, ppv, streambtw, streameast, streamed, tvpass
|
||||
from scrapers.utils import CLIENT, UA, get_logger
|
||||
from scrapers.utils import get_logger, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
|
@ -12,7 +12,7 @@ BASE_FILE = Path(__file__).parent / "base.m3u8"
|
|||
M3U8_FILE = Path(__file__).parent / "TV.m3u8"
|
||||
|
||||
|
||||
def vanilla_fetch() -> tuple[list[str], int]:
|
||||
def load_base() -> tuple[list[str], int]:
|
||||
log.info("Fetching base M3U8")
|
||||
|
||||
data = BASE_FILE.read_text(encoding="utf-8")
|
||||
|
|
@ -23,19 +23,19 @@ def vanilla_fetch() -> tuple[list[str], int]:
|
|||
|
||||
|
||||
async def main() -> None:
|
||||
base_m3u8, tvg_chno = vanilla_fetch()
|
||||
base_m3u8, tvg_chno = load_base()
|
||||
|
||||
tasks = [
|
||||
asyncio.create_task(fstv.scrape(CLIENT)),
|
||||
asyncio.create_task(livetvsx.scrape(CLIENT)),
|
||||
asyncio.create_task(ppv.scrape(CLIENT)),
|
||||
asyncio.create_task(streambtw.scrape(CLIENT)),
|
||||
asyncio.create_task(streameast.scrape(CLIENT)),
|
||||
asyncio.create_task(streamed.scrape(CLIENT)),
|
||||
asyncio.create_task(tvpass.scrape(CLIENT)),
|
||||
asyncio.create_task(fstv.scrape(network.client)),
|
||||
asyncio.create_task(livetvsx.scrape(network.client)),
|
||||
asyncio.create_task(ppv.scrape(network.client)),
|
||||
asyncio.create_task(streambtw.scrape(network.client)),
|
||||
asyncio.create_task(streameast.scrape(network.client)),
|
||||
asyncio.create_task(streamed.scrape(network.client)),
|
||||
asyncio.create_task(tvpass.scrape(network.client)),
|
||||
]
|
||||
|
||||
await asyncio.gather(*tasks)
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
additions = (
|
||||
fstv.urls
|
||||
|
|
@ -55,10 +55,12 @@ async def main() -> None:
|
|||
):
|
||||
live_events.extend(
|
||||
(
|
||||
f'\n#EXTINF:-1 tvg-chno="{chnl_num}" tvg-id="{info["id"]}" tvg-name="{event}" tvg-logo="{info["logo"]}" group-title="Live Events",{event}',
|
||||
f'\n#EXTINF:-1 tvg-chno="{chnl_num}" tvg-id="{info["id"]}" ',
|
||||
f'tvg-name="{event}" tvg-logo="{info["logo"]}" ',
|
||||
f'group-title="Live Events",{event}',
|
||||
f'#EXTVLCOPT:http-referrer={info["base"]}',
|
||||
f'#EXTVLCOPT:http-origin={info["base"]}',
|
||||
f"#EXTVLCOPT:http-user-agent={UA}",
|
||||
f"#EXTVLCOPT:http-user-agent={network.UA}",
|
||||
info["url"],
|
||||
)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -4,15 +4,7 @@ from urllib.parse import unquote, urljoin
|
|||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import (
|
||||
get_base,
|
||||
get_logger,
|
||||
leagues,
|
||||
load_cache,
|
||||
now,
|
||||
safe_process_event,
|
||||
write_cache,
|
||||
)
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
|
@ -25,7 +17,7 @@ MIRRORS = [
|
|||
"https://fstv.us",
|
||||
]
|
||||
|
||||
CACHE_FILE = Path(__file__).parent / "caches" / "fstv.json"
|
||||
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "fstv.json", exp=10_800)
|
||||
|
||||
|
||||
async def get_events(
|
||||
|
|
@ -39,7 +31,7 @@ async def get_events(
|
|||
r = await client.get(base_url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{base_url}"\n{e}')
|
||||
log.error(f'Failed to fetch "{base_url}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
|
|
@ -115,20 +107,20 @@ async def process_event(
|
|||
|
||||
if src := ifr.attributes.get("src", ""):
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return match_name, unquote(src).split("link=")[-1]
|
||||
return match_name or "", unquote(src).split("link=")[-1]
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_hrefs = {entry["href"] for entry in cached_urls.values()}
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Collected {cached_count} event(s) from cache")
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await get_base(client, MIRRORS)):
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working FSTV mirrors")
|
||||
write_cache(CACHE_FILE, cached_urls)
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
events = await get_events(
|
||||
|
|
@ -140,7 +132,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
match_name, url = await safe_process_event(
|
||||
match_name, url = await network.safe_process(
|
||||
lambda: process_event(
|
||||
client,
|
||||
ev["link"],
|
||||
|
|
@ -163,8 +155,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
"url": url,
|
||||
"logo": logo,
|
||||
"base": base_url,
|
||||
"timestamp": now.timestamp(),
|
||||
"id": tvg_id,
|
||||
"timestamp": Time.now().timestamp(),
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"href": ev["href"],
|
||||
}
|
||||
|
||||
|
|
@ -175,4 +167,4 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
write_cache(CACHE_FILE, cached_urls)
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
|
|
|||
|
|
@ -2,24 +2,14 @@ import asyncio
|
|||
import io
|
||||
import ssl
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import (
|
||||
TZ,
|
||||
capture_req,
|
||||
get_logger,
|
||||
leagues,
|
||||
load_cache,
|
||||
new_browser,
|
||||
now,
|
||||
safe_process_event,
|
||||
write_cache,
|
||||
)
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
|
@ -36,7 +26,7 @@ CERT_BUNDLE_URLS = [
|
|||
|
||||
CERT_FILE = Path(__file__).parent / "caches" / "cached-cert.pem"
|
||||
|
||||
CACHE_FILE = Path(__file__).parent / "caches" / "livetvsx.json"
|
||||
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "livetvsx.json", exp=10_800)
|
||||
|
||||
|
||||
async def write_to_cert(
|
||||
|
|
@ -64,9 +54,9 @@ async def refresh_cert_cache(client: httpx.AsyncClient) -> ssl.SSLContext:
|
|||
|
||||
async def get_cert(client: httpx.AsyncClient) -> ssl.SSLContext:
|
||||
if CERT_FILE.is_file():
|
||||
mtime = datetime.fromtimestamp(CERT_FILE.stat().st_mtime, TZ)
|
||||
mtime = Time.from_ts(CERT_FILE.stat().st_mtime)
|
||||
|
||||
if now - mtime < timedelta(days=30):
|
||||
if Time.now() - mtime < timedelta(days=30):
|
||||
return ssl.create_default_context(cafile=CERT_FILE)
|
||||
|
||||
log.info("Refreshing cached certificate")
|
||||
|
|
@ -101,7 +91,7 @@ async def fetch_xml_stream(url: str, ssl_ctx: ssl.SSLContext) -> io.BytesIO | No
|
|||
|
||||
async def process_event(url: str, url_num: int) -> str | None:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await new_browser(p, ignore_https_errors=True)
|
||||
browser, context = await network.browser(p, ignore_https_errors=True)
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
|
|
@ -109,7 +99,7 @@ async def process_event(url: str, url_num: int) -> str | None:
|
|||
|
||||
got_one = asyncio.Event()
|
||||
|
||||
handler = partial(capture_req, captured=captured, got_one=got_one)
|
||||
handler = partial(network.capture_req, captured=captured, got_one=got_one)
|
||||
|
||||
popup = None
|
||||
|
||||
|
|
@ -149,9 +139,7 @@ async def process_event(url: str, url_num: int) -> str | None:
|
|||
try:
|
||||
await link_img.click()
|
||||
except Exception as e:
|
||||
log.debug(
|
||||
f"URL {url_num}) Click failed (popup might have already been opened): {e}"
|
||||
)
|
||||
log.debug(f"URL {url_num}) Click failed: {e}")
|
||||
|
||||
popup = await popup_info.value
|
||||
|
||||
|
|
@ -194,8 +182,9 @@ async def process_event(url: str, url_num: int) -> str | None:
|
|||
|
||||
return captured[-1]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured in popup or inline playback.")
|
||||
log.warning(f"URL {url_num}) No M3U8 captured")
|
||||
return
|
||||
|
||||
except Exception:
|
||||
try:
|
||||
page.remove_listener("request", handler)
|
||||
|
|
@ -220,73 +209,84 @@ async def get_events(
|
|||
|
||||
events: list[dict[str, str]] = []
|
||||
|
||||
start_dt = now - timedelta(minutes=30)
|
||||
end_dt = now + timedelta(minutes=30)
|
||||
now = Time.now()
|
||||
|
||||
if buffer := await fetch_xml_stream(url, ssl_ctx):
|
||||
pub_date_format = "%a, %d %b %Y %H:%M:%S %z"
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for _, elem in ET.iterparse(buffer, events=("end",)):
|
||||
if elem.tag == "item":
|
||||
title = elem.findtext("title")
|
||||
desc = elem.findtext("description")
|
||||
pub_date = elem.findtext("pubDate")
|
||||
link = elem.findtext("link")
|
||||
if not (buffer := await fetch_xml_stream(url, ssl_ctx)):
|
||||
return events
|
||||
|
||||
try:
|
||||
dt = datetime.strptime(pub_date, pub_date_format)
|
||||
dt = dt.astimezone(TZ)
|
||||
except Exception:
|
||||
elem.clear()
|
||||
continue
|
||||
pub_date_format = "%a, %d %b %Y %H:%M:%S %z"
|
||||
|
||||
if not start_dt <= dt <= end_dt:
|
||||
elem.clear()
|
||||
continue
|
||||
|
||||
sport, event = (
|
||||
(
|
||||
desc.split(".")[0].strip(),
|
||||
" ".join(p.strip() for p in desc.split(".")[1:]),
|
||||
)
|
||||
if desc
|
||||
else ("", "")
|
||||
)
|
||||
|
||||
key = f"[{sport}: {event}] {title} (LTVSX)"
|
||||
|
||||
if cached_keys & {key}:
|
||||
elem.clear()
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": event,
|
||||
"title": title,
|
||||
"link": link,
|
||||
}
|
||||
)
|
||||
for _, elem in ET.iterparse(buffer, events=("end",)):
|
||||
if elem.tag == "item":
|
||||
title = elem.findtext("title") or ""
|
||||
desc = elem.findtext("description") or ""
|
||||
pub_date = elem.findtext("pubDate") or ""
|
||||
link = elem.findtext("link") or ""
|
||||
|
||||
if not all([title, pub_date, link]):
|
||||
elem.clear()
|
||||
continue
|
||||
|
||||
try:
|
||||
event_dt = Time.from_str(pub_date, pub_date_format)
|
||||
except Exception:
|
||||
elem.clear()
|
||||
continue
|
||||
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
elem.clear()
|
||||
continue
|
||||
|
||||
if desc:
|
||||
parts = desc.split(".")
|
||||
sport = parts[0].strip() if parts else ""
|
||||
event = parts[1].strip() if parts else ""
|
||||
else:
|
||||
sport, event = "", ""
|
||||
|
||||
key = f"[{sport}: {event}] {title} (LTVSX)"
|
||||
|
||||
if cached_keys & {key}:
|
||||
elem.clear()
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": event,
|
||||
"title": title,
|
||||
"link": link,
|
||||
"timestamp": event_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
elem.clear()
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Collected {cached_count} event(s) from cache")
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
cert = await get_cert(client)
|
||||
ssl_ctx = await get_cert(client)
|
||||
|
||||
if not ssl_ctx:
|
||||
log.error("Failed to create SSL context, aborting")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
events = await get_events(
|
||||
BASE_URL,
|
||||
cert,
|
||||
ssl_ctx,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
|
|
@ -295,16 +295,19 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
for i, ev in enumerate(events, start=1):
|
||||
link = ev["link"]
|
||||
|
||||
url = await safe_process_event(
|
||||
url = await network.safe_process(
|
||||
lambda: process_event(link, url_num=i),
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport = ev["sport"]
|
||||
event = ev["event"]
|
||||
title = ev["title"]
|
||||
sport, event, title, ts = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["title"],
|
||||
ev["timestamp"],
|
||||
)
|
||||
|
||||
key = f"[{sport}: {event}] {title} (LTVSX)"
|
||||
|
||||
|
|
@ -316,9 +319,9 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"id": tvg_id,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"base": "https://livetv.sx/enx/",
|
||||
"timestamp": now.timestamp(),
|
||||
"timestamp": ts,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
|
@ -328,4 +331,4 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
write_cache(CACHE_FILE, cached_urls)
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from urllib.parse import urljoin
|
||||
|
|
@ -7,26 +6,15 @@ from urllib.parse import urljoin
|
|||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import (
|
||||
TZ,
|
||||
capture_req,
|
||||
get_base,
|
||||
get_logger,
|
||||
leagues,
|
||||
load_cache,
|
||||
new_browser,
|
||||
now,
|
||||
safe_process_event,
|
||||
write_cache,
|
||||
)
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
API_FILE = Path(__file__).parent / "caches" / "ppv_api.json"
|
||||
API_FILE = Cache(Path(__file__).parent / "caches" / "ppv_api.json", exp=86_400)
|
||||
|
||||
CACHE_FILE = Path(__file__).parent / "caches" / "ppv.json"
|
||||
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "ppv.json", exp=10_800)
|
||||
|
||||
MIRRORS = [
|
||||
"https://ppvs.su",
|
||||
|
|
@ -37,7 +25,7 @@ MIRRORS = [
|
|||
]
|
||||
|
||||
|
||||
def get_tvg(sport: str, event: str) -> str | None:
|
||||
def get_tvg(sport: str, event: str) -> str:
|
||||
match sport:
|
||||
case "American Football":
|
||||
if leagues.is_valid(event, "NFL"):
|
||||
|
|
@ -79,7 +67,7 @@ async def refresh_api_cache(
|
|||
|
||||
async def process_event(url: str, url_num: int) -> str | None:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await new_browser(p)
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
|
|
@ -87,12 +75,16 @@ async def process_event(url: str, url_num: int) -> str | None:
|
|||
|
||||
got_one = asyncio.Event()
|
||||
|
||||
handler = partial(capture_req, captured=captured, got_one=got_one)
|
||||
handler = partial(network.capture_req, captured=captured, got_one=got_one)
|
||||
|
||||
page.on("request", handler)
|
||||
|
||||
try:
|
||||
await page.goto(url, wait_until="domcontentloaded", timeout=15_000)
|
||||
await page.goto(
|
||||
url,
|
||||
wait_until="domcontentloaded",
|
||||
timeout=15_000,
|
||||
)
|
||||
|
||||
wait_task = asyncio.create_task(got_one.wait())
|
||||
|
||||
|
|
@ -137,25 +129,18 @@ async def get_events(
|
|||
|
||||
events: list[dict[str, str]] = []
|
||||
|
||||
if not (
|
||||
api_data := load_cache(
|
||||
API_FILE,
|
||||
exp=86_400,
|
||||
nearest_hr=True,
|
||||
per_entry=False,
|
||||
)
|
||||
):
|
||||
if not (api_data := API_FILE.load(nearest_hr=True, per_entry=False)):
|
||||
api_data = await refresh_api_cache(client, urljoin(base_url, "api/streams"))
|
||||
|
||||
write_cache(API_FILE, api_data)
|
||||
API_FILE.write(api_data)
|
||||
|
||||
for stream_group in api_data["streams"]:
|
||||
sport = stream_group["category"]
|
||||
for stream_group in api_data.get("streams", []):
|
||||
sport = stream_group("category", [])
|
||||
|
||||
if sport == "24/7 Streams":
|
||||
continue
|
||||
|
||||
for event in stream_group["streams"]:
|
||||
for event in stream_group.get("streams", []):
|
||||
name, start_ts, end_ts, logo, uri_name = (
|
||||
event["name"],
|
||||
event["starts_at"],
|
||||
|
|
@ -169,11 +154,11 @@ async def get_events(
|
|||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
start_dt = datetime.fromtimestamp(start_ts, tz=TZ) - timedelta(minutes=30)
|
||||
start_dt = Time.from_ts(start_ts).delta(minutes=-30)
|
||||
|
||||
end_dt = datetime.fromtimestamp(end_ts, tz=TZ) + timedelta(minutes=30)
|
||||
end_dt = Time.from_ts(end_ts).delta(minutes=30)
|
||||
|
||||
if not start_dt <= now < end_dt:
|
||||
if not start_dt <= Time.now() < end_dt:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
|
|
@ -182,6 +167,7 @@ async def get_events(
|
|||
"event": name,
|
||||
"link": urljoin(base_url, f"live/{uri_name}"),
|
||||
"logo": logo,
|
||||
"timestamp": start_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
|
|
@ -189,15 +175,15 @@ async def get_events(
|
|||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Collected {cached_count} event(s) from cache")
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await get_base(client, MIRRORS)):
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working PPV mirrors")
|
||||
write_cache(CACHE_FILE, cached_urls)
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
|
@ -211,14 +197,19 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
url = await safe_process_event(
|
||||
url = await network.safe_process(
|
||||
lambda: process_event(ev["link"], url_num=i),
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, logo = ev["sport"], ev["event"], ev["logo"]
|
||||
sport, event, logo, ts = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["logo"],
|
||||
ev["timestamp"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} (PPV)"
|
||||
|
||||
|
|
@ -226,7 +217,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
"url": url,
|
||||
"logo": logo,
|
||||
"base": base_url,
|
||||
"timestamp": now.timestamp(),
|
||||
"timestamp": ts,
|
||||
"id": get_tvg(sport, event) or "Live.Event.us",
|
||||
}
|
||||
|
||||
|
|
@ -237,4 +228,4 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
write_cache(CACHE_FILE, cached_urls)
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from urllib.parse import urljoin
|
|||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import get_logger, leagues, load_cache, now, safe_process_event, write_cache
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
|
@ -13,7 +13,7 @@ urls: dict[str, dict[str, str]] = {}
|
|||
|
||||
BASE_URL = "https://streambtw.com/"
|
||||
|
||||
CACHE_FILE = Path(__file__).parent / "caches" / "streambtw.json"
|
||||
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "streambtw.json", exp=86_400)
|
||||
|
||||
|
||||
async def process_event(
|
||||
|
|
@ -45,7 +45,7 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
|
|||
r = await client.get(BASE_URL)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{BASE_URL}"\n{e}')
|
||||
log.error(f'Failed to fetch "{BASE_URL}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
|
|
@ -60,22 +60,24 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
|
|||
|
||||
link = card.css_first("a.btn.btn-primary")
|
||||
|
||||
if href := link.attrs.get("href"):
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": urljoin(BASE_URL, href),
|
||||
}
|
||||
)
|
||||
if not (href := link.attrs.get("href")):
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": urljoin(BASE_URL, href),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
if cached := load_cache(CACHE_FILE, exp=86_400, nearest_hr=True):
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
log.info(f"Collected {len(urls)} event(s) from cache")
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
|
@ -85,10 +87,11 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
url = await safe_process_event(
|
||||
url = await network.safe_process(
|
||||
lambda: process_event(client, url=ev["link"], url_num=i),
|
||||
url_num=i,
|
||||
log=log,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
if url:
|
||||
|
|
@ -102,12 +105,12 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": now.timestamp(),
|
||||
"id": tvg_id,
|
||||
"timestamp": Time.now().timestamp(),
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
urls[key] = entry
|
||||
|
||||
log.info(f"Collected {len(urls)} event(s)")
|
||||
|
||||
write_cache(CACHE_FILE, urls)
|
||||
CACHE_FILE.write(urls)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from urllib.parse import urljoin
|
||||
|
|
@ -8,24 +7,13 @@ import httpx
|
|||
from playwright.async_api import async_playwright
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import (
|
||||
TZ,
|
||||
capture_req,
|
||||
get_base,
|
||||
get_logger,
|
||||
leagues,
|
||||
load_cache,
|
||||
new_browser,
|
||||
now,
|
||||
safe_process_event,
|
||||
write_cache,
|
||||
)
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Path(__file__).parent / "caches" / "streameast.json"
|
||||
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "streameast.json", exp=10_800)
|
||||
|
||||
MIRRORS = [
|
||||
"https://streameast.ga",
|
||||
|
|
@ -48,7 +36,7 @@ MIRRORS = [
|
|||
|
||||
async def process_event(url: str, url_num: int) -> str | None:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await new_browser(p, browser="brave")
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
|
|
@ -56,7 +44,7 @@ async def process_event(url: str, url_num: int) -> str | None:
|
|||
|
||||
got_one = asyncio.Event()
|
||||
|
||||
handler = partial(capture_req, captured=captured, got_one=got_one)
|
||||
handler = partial(network.capture_req, captured=captured, got_one=got_one)
|
||||
|
||||
page.on("request", handler)
|
||||
|
||||
|
|
@ -114,8 +102,10 @@ async def get_events(
|
|||
soup = HTMLParser(r.text)
|
||||
events = []
|
||||
|
||||
start_dt = now - timedelta(minutes=30)
|
||||
end_dt = now + timedelta(minutes=30)
|
||||
now = Time.now()
|
||||
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for section in soup.css("div.se-sport-section"):
|
||||
if not (sport := section.attributes.get("data-sport-name", "").strip()):
|
||||
|
|
@ -147,7 +137,7 @@ async def get_events(
|
|||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
event_dt = datetime.fromtimestamp(timestamp, TZ)
|
||||
event_dt = Time.from_ts(timestamp)
|
||||
|
||||
if time_text == "LIVE" or (start_dt <= event_dt < end_dt):
|
||||
events.append(
|
||||
|
|
@ -155,6 +145,7 @@ async def get_events(
|
|||
"sport": sport,
|
||||
"event": name,
|
||||
"link": href,
|
||||
"timestamp": timestamp,
|
||||
}
|
||||
)
|
||||
|
||||
|
|
@ -162,15 +153,15 @@ async def get_events(
|
|||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Collected {cached_count} event(s) from cache")
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await get_base(client, MIRRORS)):
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working StreamEast mirrors")
|
||||
write_cache(CACHE_FILE, cached_urls)
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
|
@ -184,14 +175,14 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
url = await safe_process_event(
|
||||
url = await network.safe_process(
|
||||
lambda: process_event(ev["link"], url_num=i),
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event = ev["sport"], ev["event"]
|
||||
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
|
||||
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
|
|
@ -205,8 +196,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
"url": url,
|
||||
"logo": logo,
|
||||
"base": base_url,
|
||||
"timestamp": now.timestamp(),
|
||||
"id": tvg_id,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
|
@ -216,4 +207,4 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
write_cache(CACHE_FILE, cached_urls)
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import asyncio
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
|
||||
|
|
@ -8,40 +7,22 @@ import httpx
|
|||
from playwright.async_api import async_playwright
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import (
|
||||
TZ,
|
||||
capture_req,
|
||||
get_logger,
|
||||
leagues,
|
||||
load_cache,
|
||||
new_browser,
|
||||
now,
|
||||
safe_process_event,
|
||||
write_cache,
|
||||
)
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Path(__file__).parent / "caches" / "streamed.json"
|
||||
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "streamed.json", exp=10_800)
|
||||
|
||||
HTML_CACHE = Path(__file__).parent / "caches" / "streamed_php.json"
|
||||
HTML_CACHE = Cache(Path(__file__).parent / "caches" / "streamed_php.json", exp=86_400)
|
||||
|
||||
base_url = "https://streamed.site/webmaster.php"
|
||||
|
||||
|
||||
def get_date(s: str) -> datetime:
|
||||
try:
|
||||
return datetime.strptime(s, "%Y-%m-%d %H:%M %Z").astimezone(TZ)
|
||||
except ValueError:
|
||||
s = s.replace("ET", "").strip()
|
||||
return datetime.strptime(s, "%Y-%m-%d %H:%M").astimezone(TZ)
|
||||
BASE_URL = "https://streamed.site/webmaster.php"
|
||||
|
||||
|
||||
async def process_event(url: str, url_num: int) -> str | None:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await new_browser(p, browser="brave")
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
|
|
@ -49,7 +30,7 @@ async def process_event(url: str, url_num: int) -> str | None:
|
|||
|
||||
got_one = asyncio.Event()
|
||||
|
||||
handler = partial(capture_req, captured=captured, got_one=got_one)
|
||||
handler = partial(network.capture_req, captured=captured, got_one=got_one)
|
||||
|
||||
page.on("request", handler)
|
||||
|
||||
|
|
@ -102,12 +83,13 @@ async def refresh_html_cache(client: httpx.AsyncClient, url: str) -> dict[str, s
|
|||
|
||||
soup = HTMLParser(r.text)
|
||||
events = {}
|
||||
now = Time.now().to_tz("EST")
|
||||
|
||||
for row in soup.css("div.wrap div.row"):
|
||||
if not (date := row.css_first("div.date")):
|
||||
continue
|
||||
|
||||
event_dt = get_date(date.text(strip=True))
|
||||
event_dt = Time.from_str(date.text(strip=True)).to_tz("EST")
|
||||
|
||||
if event_dt.date() != now.date():
|
||||
continue
|
||||
|
|
@ -134,8 +116,8 @@ async def refresh_html_cache(client: httpx.AsyncClient, url: str) -> dict[str, s
|
|||
"sport": sport,
|
||||
"event": event,
|
||||
"link": f"https://streamed.site/set.php?{m[1]}",
|
||||
"event_ts": event_dt.timestamp(),
|
||||
"timestamp": now.timestamp(),
|
||||
"event_ts": event_dt.timestamp(),
|
||||
}
|
||||
|
||||
return events
|
||||
|
|
@ -147,13 +129,15 @@ async def get_events(
|
|||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
if not (events := load_cache(HTML_CACHE, exp=86_400, nearest_hr=True)):
|
||||
if not (events := HTML_CACHE.load(nearest_hr=True)):
|
||||
events = await refresh_html_cache(client, url)
|
||||
write_cache(HTML_CACHE, events)
|
||||
HTML_CACHE.write(events)
|
||||
|
||||
live = []
|
||||
start_ts = (now - timedelta(minutes=30)).timestamp()
|
||||
end_ts = (now + timedelta(minutes=30)).timestamp()
|
||||
now = Time.now().to_tz("EST")
|
||||
|
||||
start_ts = now.delta(minutes=-30).to_tz("EST").timestamp()
|
||||
end_ts = now.delta(minutes=30).to_tz("EST").timestamp()
|
||||
|
||||
for k, v in events.items():
|
||||
if cached_keys & {k}:
|
||||
|
|
@ -168,31 +152,31 @@ async def get_events(
|
|||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Collected {cached_count} event(s) from cache")
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
base_url,
|
||||
BASE_URL,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
url = await safe_process_event(
|
||||
url = await network.safe_process(
|
||||
lambda: process_event(ev["link"], url_num=i),
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event = ev["sport"], ev["event"]
|
||||
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
|
||||
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
|
|
@ -202,8 +186,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
"url": url,
|
||||
"logo": logo,
|
||||
"base": "https://streamed.site/",
|
||||
"timestamp": now.timestamp(),
|
||||
"id": tvg_id,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
|
@ -213,4 +197,4 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
write_cache(CACHE_FILE, cached_urls)
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
|
|
|||
|
|
@ -3,15 +3,15 @@ from pathlib import Path
|
|||
|
||||
import httpx
|
||||
|
||||
from .utils import get_logger, leagues, load_cache, now, write_cache
|
||||
from .utils import Cache, Time, get_logger, leagues
|
||||
|
||||
log = get_logger(__name__)
|
||||
log = get_logger()
|
||||
|
||||
urls: dict[str, dict[str, str]] = {}
|
||||
|
||||
BASE_URL = "https://tvpass.org/playlist/m3u"
|
||||
|
||||
CACHE_FILE = Path(__file__).parent / "caches" / "tvpass.json"
|
||||
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "tvpass.json", exp=86_400)
|
||||
|
||||
|
||||
async def fetch_m3u8(client: httpx.AsyncClient) -> list[str]:
|
||||
|
|
@ -19,21 +19,25 @@ async def fetch_m3u8(client: httpx.AsyncClient) -> list[str]:
|
|||
r = await client.get(BASE_URL)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{BASE_URL}"\n{e}')
|
||||
log.error(f'Failed to fetch "{BASE_URL}": {e}')
|
||||
return []
|
||||
|
||||
return r.text.splitlines()
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
if cached := load_cache(CACHE_FILE, exp=86_400, nearest_hr=True):
|
||||
if cached := CACHE_FILE.load(nearest_hr=True):
|
||||
urls.update(cached)
|
||||
log.info(f"Collected {len(urls)} event(s) from cache")
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
for i, line in enumerate(data := await fetch_m3u8(client)):
|
||||
if not (data := await fetch_m3u8(client)):
|
||||
log.warning("No M3U8 data received")
|
||||
return
|
||||
|
||||
for i, line in enumerate(data):
|
||||
if line.startswith("#EXTINF"):
|
||||
tvg_id_match = re.search(r'tvg-id="([^"]*)"', line)
|
||||
tvg_name_match = re.search(r'tvg-name="([^"]*)"', line)
|
||||
|
|
@ -56,13 +60,13 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
entry = {
|
||||
"url": f"http://origin.thetvapp.to/hls/{channel}/mono.m3u8",
|
||||
"logo": logo,
|
||||
"id": tvg_id,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"base": "https://tvpass.org",
|
||||
"timestamp": now.timestamp(),
|
||||
"timestamp": Time.now().timestamp(),
|
||||
}
|
||||
|
||||
urls[key] = entry
|
||||
|
||||
write_cache(CACHE_FILE, urls)
|
||||
CACHE_FILE.write(urls)
|
||||
|
||||
log.info(f"Cached {len(urls)} event(s)")
|
||||
|
|
|
|||
|
|
@ -1,19 +1,12 @@
|
|||
from .cache import load_cache, write_cache
|
||||
from .config import TZ, leagues, now
|
||||
from .caching import Cache
|
||||
from .config import Time, leagues
|
||||
from .logger import get_logger
|
||||
from .network import CLIENT, UA, capture_req, get_base, new_browser, safe_process_event
|
||||
from .webwork import network
|
||||
|
||||
__all__ = [
|
||||
"CLIENT",
|
||||
"TZ",
|
||||
"UA",
|
||||
"capture_req",
|
||||
"get_base",
|
||||
"Cache",
|
||||
"Time",
|
||||
"get_logger",
|
||||
"leagues",
|
||||
"load_cache",
|
||||
"new_browser",
|
||||
"now",
|
||||
"safe_process_event",
|
||||
"write_cache",
|
||||
"network",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,50 +0,0 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from .config import now
|
||||
|
||||
|
||||
def near_hr(dt: datetime) -> float:
|
||||
return dt.replace(minute=0, second=0, microsecond=0).timestamp()
|
||||
|
||||
|
||||
def is_fresh(
|
||||
entry: dict,
|
||||
nearest_hr: bool,
|
||||
exp: int,
|
||||
) -> bool:
|
||||
ts: float | int = entry.get("timestamp", 31496400)
|
||||
|
||||
if nearest_hr:
|
||||
ts = near_hr(datetime.fromtimestamp(ts))
|
||||
|
||||
return now.timestamp() - ts < exp
|
||||
|
||||
|
||||
def load_cache(
|
||||
file: Path,
|
||||
exp: int | float,
|
||||
nearest_hr: bool = False,
|
||||
per_entry: bool = True,
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
try:
|
||||
data: dict = json.loads(file.read_text(encoding="utf-8"))
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return {}
|
||||
|
||||
if per_entry:
|
||||
return {k: v for k, v in data.items() if is_fresh(v, nearest_hr, exp)}
|
||||
|
||||
ts: float | int = data.get("timestamp", 31496400)
|
||||
|
||||
if nearest_hr:
|
||||
ts = near_hr(datetime.fromtimestamp(ts))
|
||||
|
||||
return data if now.timestamp() - ts < exp else {}
|
||||
|
||||
|
||||
def write_cache(file: Path, data: dict) -> None:
|
||||
file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
file.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
||||
65
M3U8/scrapers/utils/caching.py
Normal file
65
M3U8/scrapers/utils/caching.py
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from .config import Time
|
||||
|
||||
|
||||
class Cache:
|
||||
def __init__(self, file: Path, exp: int | float) -> None:
|
||||
self.file = file
|
||||
self.exp = exp
|
||||
|
||||
@staticmethod
|
||||
def near_hr(dt: datetime) -> float:
|
||||
return dt.replace(minute=0, second=0, microsecond=0).timestamp()
|
||||
|
||||
def is_fresh(
|
||||
self,
|
||||
entry: dict,
|
||||
nearest_hr: bool,
|
||||
) -> bool:
|
||||
ts: float | int = entry.get("timestamp", 31496400)
|
||||
|
||||
if nearest_hr:
|
||||
ts = self.near_hr(Time.from_ts(ts))
|
||||
|
||||
return Time.now().timestamp() - ts < self.exp
|
||||
|
||||
def load(
|
||||
self,
|
||||
nearest_hr: bool = False,
|
||||
per_entry: bool = True,
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
try:
|
||||
data: dict = json.loads(self.file.read_text(encoding="utf-8"))
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return {}
|
||||
|
||||
if not data:
|
||||
return {}
|
||||
|
||||
if per_entry:
|
||||
return {k: v for k, v in data.items() if self.is_fresh(v, nearest_hr)}
|
||||
|
||||
ts: float | int = data.get("timestamp", 31496400)
|
||||
|
||||
if nearest_hr:
|
||||
ts = self.near_hr(Time.from_ts(ts))
|
||||
|
||||
return data if self.is_fresh({"timestamp": ts}, False) else {}
|
||||
|
||||
def write(self, data: dict) -> None:
|
||||
self.file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.file.write_text(
|
||||
json.dumps(
|
||||
data,
|
||||
indent=2,
|
||||
ensure_ascii=False,
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
|
||||
__all__ = ["Cache"]
|
||||
|
|
@ -1,27 +1,79 @@
|
|||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import pytz
|
||||
|
||||
TZ = pytz.timezone("America/New_York")
|
||||
ZONES = {"ET": pytz.timezone("America/New_York"), "UTC": timezone.utc}
|
||||
|
||||
now = datetime.now(TZ)
|
||||
ZONES["EDT"] = ZONES["EST"] = ZONES["ET"]
|
||||
|
||||
live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png"
|
||||
|
||||
leagues_file = Path(__file__).parent / "leagues.json"
|
||||
class Time(datetime):
|
||||
TZ = timezone.utc
|
||||
|
||||
@classmethod
|
||||
def now(cls) -> "Time":
|
||||
dt = datetime.now(cls.TZ)
|
||||
return cls.fromtimestamp(dt.timestamp(), tz=cls.TZ)
|
||||
|
||||
@classmethod
|
||||
def from_ts(cls, ts: int | float) -> "Time":
|
||||
return cls.fromtimestamp(ts, tz=cls.TZ)
|
||||
|
||||
def delta(self, **kwargs) -> "Time":
|
||||
new_dt = super().__add__(timedelta(**kwargs))
|
||||
return self.__class__.fromtimestamp(new_dt.timestamp(), tz=new_dt.tzinfo)
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, s: str, fmt: str | None = None) -> "Time":
|
||||
pattern = r"\b(ET|UTC|EST|EDT)\b"
|
||||
|
||||
match = re.search(pattern, s)
|
||||
|
||||
tz = ZONES.get(match[1]) if match else cls.TZ
|
||||
|
||||
cleaned_str = re.sub(pattern, "", s).strip()
|
||||
|
||||
if fmt:
|
||||
dt = datetime.strptime(cleaned_str, fmt)
|
||||
else:
|
||||
formats = [
|
||||
"%Y-%m-%d %H:%M",
|
||||
"%Y-%m-%d %H:%M:%S",
|
||||
]
|
||||
|
||||
for frmt in formats:
|
||||
try:
|
||||
dt = datetime.strptime(cleaned_str, frmt)
|
||||
break
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
return cls.from_ts(31496400)
|
||||
|
||||
dt = tz.localize(dt) if hasattr(tz, "localize") else dt.replace(tzinfo=tz)
|
||||
|
||||
return cls.fromtimestamp(dt.astimezone(cls.TZ).timestamp(), tz=cls.TZ)
|
||||
|
||||
def to_tz(self, tzone: str) -> "Time":
|
||||
dt = self.astimezone(ZONES[tzone])
|
||||
return self.__class__.fromtimestamp(dt.timestamp(), tz=ZONES[tzone])
|
||||
|
||||
|
||||
class Leagues:
|
||||
def __init__(self) -> None:
|
||||
self.data = json.loads(leagues_file.read_text(encoding="utf-8"))
|
||||
self.data = json.loads(
|
||||
(Path(__file__).parent / "leagues.json").read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
self.live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png"
|
||||
|
||||
def teams(self, league: str) -> list[str]:
|
||||
return self.data["teams"].get(league, [])
|
||||
|
||||
def info(self, name: str) -> tuple[str | str]:
|
||||
def info(self, name: str) -> tuple[str | None, str]:
|
||||
name = name.upper()
|
||||
|
||||
if match := next(
|
||||
|
|
@ -36,9 +88,9 @@ class Leagues:
|
|||
):
|
||||
tvg_id, logo = match
|
||||
|
||||
return (tvg_id, logo or live_img)
|
||||
return (tvg_id, logo or self.live_img)
|
||||
|
||||
return ("Live.Event.us", live_img)
|
||||
return (None, self.live_img)
|
||||
|
||||
def is_valid(self, event: str, league: str) -> bool:
|
||||
if match := re.search(r"(\-|vs.?)", event):
|
||||
|
|
@ -48,5 +100,11 @@ class Leagues:
|
|||
|
||||
return event.lower() == "nfl redzone" if league == "NFL" else False
|
||||
|
||||
@property
|
||||
def league_names(self) -> list[str]:
|
||||
return self.data["teams"].keys()
|
||||
|
||||
|
||||
leagues = Leagues()
|
||||
|
||||
__all__ = ["leagues", "Time"]
|
||||
|
|
|
|||
|
|
@ -10,22 +10,23 @@ LOG_FMT = (
|
|||
)
|
||||
|
||||
COLORS = {
|
||||
"DEBUG": "\033[37m",
|
||||
"DEBUG": "\033[36m",
|
||||
"INFO": "\033[32m",
|
||||
"WARNING": "\033[33m",
|
||||
"ERROR": "\033[31m",
|
||||
"CRITICAL": "\033[41m",
|
||||
"CRITICAL": "\033[1;41m",
|
||||
"reset": "\033[0m",
|
||||
}
|
||||
|
||||
|
||||
class ColorFormatter(logging.Formatter):
|
||||
def format(self, record) -> str:
|
||||
color = COLORS.get(record.levelname, "")
|
||||
color = COLORS.get(record.levelname, COLORS["reset"])
|
||||
levelname = record.levelname
|
||||
record.levelname = f"{color}{levelname}{COLORS['reset']}"
|
||||
record.levelname = f"{color}{levelname:<8}{COLORS['reset']}"
|
||||
formatted = super().format(record)
|
||||
record.levelname = levelname
|
||||
|
||||
return formatted
|
||||
|
||||
|
||||
|
|
@ -41,5 +42,9 @@ def get_logger(name: str | None = None) -> logging.Logger:
|
|||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.INFO)
|
||||
logger.propagate = False
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
__all__ = ["get_logger", "ColorFormatter"]
|
||||
|
|
|
|||
|
|
@ -1,154 +0,0 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import Browser, BrowserContext, Playwright, Request
|
||||
|
||||
UA = (
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
||||
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||
"Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
|
||||
)
|
||||
|
||||
CLIENT = httpx.AsyncClient(
|
||||
timeout=5,
|
||||
follow_redirects=True,
|
||||
headers={"User-Agent": UA},
|
||||
)
|
||||
|
||||
|
||||
async def check_status(client: httpx.AsyncClient, url: str) -> bool:
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
return r.status_code == 200
|
||||
|
||||
|
||||
async def get_base(client: httpx.AsyncClient, mirrors: list[str]) -> str | None:
|
||||
tasks = [check_status(client, link) for link in mirrors]
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
try:
|
||||
return [url for url, ok in zip(mirrors, results) if ok][0]
|
||||
except IndexError:
|
||||
return
|
||||
|
||||
|
||||
async def safe_process_event(
|
||||
fn: Callable,
|
||||
url_num: int,
|
||||
timeout: int | float = 15,
|
||||
log: logging.Logger | None = None,
|
||||
) -> Any | None:
|
||||
|
||||
if not log:
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
task = asyncio.create_task(fn())
|
||||
|
||||
try:
|
||||
return await asyncio.wait_for(task, timeout=timeout)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"URL {url_num}) Timed out after {timeout}s, skipping event")
|
||||
|
||||
task.cancel()
|
||||
|
||||
try:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
except Exception as e:
|
||||
log.debug(f"URL {url_num}) Ignore exception after timeout: {e}")
|
||||
|
||||
|
||||
def capture_req(
|
||||
req: Request,
|
||||
captured: list[str],
|
||||
got_one: asyncio.Event,
|
||||
) -> None:
|
||||
valid_m3u8 = re.compile(r"^(?!.*(amazonaws|knitcdn)).*\.m3u8")
|
||||
|
||||
if valid_m3u8.search(req.url):
|
||||
captured.append(req.url)
|
||||
got_one.set()
|
||||
|
||||
|
||||
async def new_browser(
|
||||
playwright: Playwright,
|
||||
browser: str = "firefox",
|
||||
ignore_https_errors: bool = False,
|
||||
) -> tuple[Browser, BrowserContext]:
|
||||
|
||||
if browser == "brave":
|
||||
brwsr = await playwright.chromium.connect_over_cdp("http://localhost:9222")
|
||||
context = brwsr.contexts[0]
|
||||
else:
|
||||
brwsr = await playwright.firefox.launch(headless=True)
|
||||
|
||||
context = await brwsr.new_context(
|
||||
user_agent=UA,
|
||||
ignore_https_errors=ignore_https_errors,
|
||||
viewport={"width": 1366, "height": 768},
|
||||
device_scale_factor=1,
|
||||
locale="en-US",
|
||||
timezone_id="America/New_York",
|
||||
color_scheme="dark",
|
||||
permissions=["geolocation"],
|
||||
extra_http_headers={
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
},
|
||||
)
|
||||
|
||||
await context.add_init_script(
|
||||
"""
|
||||
Object.defineProperty(navigator, 'webdriver', {get: () => undefined});
|
||||
|
||||
Object.defineProperty(navigator, 'languages', {
|
||||
get: () => ['en-US', 'en']
|
||||
});
|
||||
|
||||
Object.defineProperty(navigator, 'plugins', {
|
||||
get: () => [1, 2, 3, 4]
|
||||
});
|
||||
|
||||
const elementDescriptor = Object.getOwnPropertyDescriptor(HTMLElement.prototype, 'offsetHeight');
|
||||
Object.defineProperty(HTMLDivElement.prototype, 'offsetHeight', {
|
||||
...elementDescriptor,
|
||||
get: function() {
|
||||
if (this.id === 'modernizr') { return 24; }
|
||||
return elementDescriptor.get.apply(this);
|
||||
}
|
||||
});
|
||||
|
||||
Object.defineProperty(window.screen, 'width', { get: () => 1366 });
|
||||
Object.defineProperty(window.screen, 'height', { get: () => 768 });
|
||||
|
||||
const getParameter = WebGLRenderingContext.prototype. getParameter;
|
||||
WebGLRenderingContext.prototype.getParameter = function (param) {
|
||||
if (param === 37445) return "Intel Inc."; // UNMASKED_VENDOR_WEBGL
|
||||
if (param === 37446) return "Intel Iris OpenGL Engine"; // UNMASKED_RENDERER_WEBGL
|
||||
return getParameter.apply(this, [param]);
|
||||
};
|
||||
|
||||
const observer = new MutationObserver(mutations => {
|
||||
mutations.forEach(mutation => {
|
||||
mutation.addedNodes.forEach(node => {
|
||||
if (node.tagName === 'IFRAME' && node.hasAttribute('sandbox')) {
|
||||
node.removeAttribute('sandbox');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
observer.observe(document.documentElement, { childList: true, subtree: true });
|
||||
"""
|
||||
)
|
||||
|
||||
return brwsr, context
|
||||
174
M3U8/scrapers/utils/webwork.py
Normal file
174
M3U8/scrapers/utils/webwork.py
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import Browser, BrowserContext, Playwright, Request
|
||||
|
||||
from .logger import get_logger
|
||||
|
||||
|
||||
class Network:
|
||||
UA = (
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
||||
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||
"Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.client = httpx.AsyncClient(
|
||||
timeout=5,
|
||||
follow_redirects=True,
|
||||
headers={"User-Agent": Network.UA},
|
||||
)
|
||||
|
||||
self._logger = get_logger("network")
|
||||
|
||||
async def check_status(self, url: str) -> bool:
|
||||
try:
|
||||
r = await self.client.get(url)
|
||||
r.raise_for_status()
|
||||
return r.status_code == 200
|
||||
except (httpx.HTTPError, httpx.TimeoutException) as e:
|
||||
self._logger.debug(f"Status check failed for {url}: {e}")
|
||||
return False
|
||||
|
||||
async def get_base(self, mirrors: list[str]) -> str | None:
|
||||
tasks = [self.check_status(link) for link in mirrors]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
working_mirrors = [
|
||||
mirror for mirror, success in zip(mirrors, results) if success
|
||||
]
|
||||
|
||||
return working_mirrors[0] if working_mirrors else None
|
||||
|
||||
@staticmethod
|
||||
async def safe_process(
|
||||
fn: Callable,
|
||||
url_num: int,
|
||||
timeout: int | float = 15,
|
||||
log: logging.Logger | None = None,
|
||||
) -> Any | None:
|
||||
|
||||
if not log:
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
task = asyncio.create_task(fn())
|
||||
|
||||
try:
|
||||
return await asyncio.wait_for(task, timeout=timeout)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"URL {url_num}) Timed out after {timeout}s, skipping event")
|
||||
|
||||
task.cancel()
|
||||
|
||||
try:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
except Exception as e:
|
||||
log.debug(f"URL {url_num}) Ignore exception after timeout: {e}")
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
log.error(f"URL {url_num}) Unexpected error: {e}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def capture_req(
|
||||
req: Request,
|
||||
captured: list[str],
|
||||
got_one: asyncio.Event,
|
||||
patterns: list[str] | None = None,
|
||||
) -> None:
|
||||
if not patterns:
|
||||
patterns = ["amazonaws", "knitcdn"]
|
||||
|
||||
pattern = re.compile(rf"^.*\.m3u8(?!.*({'|'.join(patterns)}))")
|
||||
|
||||
if pattern.search(req.url):
|
||||
captured.append(req.url)
|
||||
got_one.set()
|
||||
|
||||
@staticmethod
|
||||
async def browser(
|
||||
playwright: Playwright,
|
||||
browser: str = "firefox",
|
||||
ignore_https_errors: bool = False,
|
||||
) -> tuple[Browser, BrowserContext]:
|
||||
|
||||
if browser == "brave":
|
||||
brwsr = await playwright.chromium.connect_over_cdp("http://localhost:9222")
|
||||
context = brwsr.contexts[0]
|
||||
else:
|
||||
brwsr = await playwright.firefox.launch(headless=True)
|
||||
|
||||
context = await brwsr.new_context(
|
||||
user_agent=Network.UA,
|
||||
ignore_https_errors=ignore_https_errors,
|
||||
viewport={"width": 1366, "height": 768},
|
||||
device_scale_factor=1,
|
||||
locale="en-US",
|
||||
timezone_id="America/New_York",
|
||||
color_scheme="dark",
|
||||
permissions=["geolocation"],
|
||||
extra_http_headers={
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
},
|
||||
)
|
||||
|
||||
await context.add_init_script(
|
||||
"""
|
||||
Object.defineProperty(navigator, 'webdriver', {get: () => undefined});
|
||||
|
||||
Object.defineProperty(navigator, 'languages', {
|
||||
get: () => ['en-US', 'en']
|
||||
});
|
||||
|
||||
Object.defineProperty(navigator, 'plugins', {
|
||||
get: () => [1, 2, 3, 4]
|
||||
});
|
||||
|
||||
const elementDescriptor = Object.getOwnPropertyDescriptor(HTMLElement.prototype, 'offsetHeight');
|
||||
Object.defineProperty(HTMLDivElement.prototype, 'offsetHeight', {
|
||||
...elementDescriptor,
|
||||
get: function() {
|
||||
if (this.id === 'modernizr') { return 24; }
|
||||
return elementDescriptor.get.apply(this);
|
||||
}
|
||||
});
|
||||
|
||||
Object.defineProperty(window.screen, 'width', { get: () => 1366 });
|
||||
Object.defineProperty(window.screen, 'height', { get: () => 768 });
|
||||
|
||||
const getParameter = WebGLRenderingContext.prototype. getParameter;
|
||||
WebGLRenderingContext.prototype.getParameter = function (param) {
|
||||
if (param === 37445) return "Intel Inc."; // UNMASKED_VENDOR_WEBGL
|
||||
if (param === 37446) return "Intel Iris OpenGL Engine"; // UNMASKED_RENDERER_WEBGL
|
||||
return getParameter.apply(this, [param]);
|
||||
};
|
||||
|
||||
const observer = new MutationObserver(mutations => {
|
||||
mutations.forEach(mutation => {
|
||||
mutation.addedNodes.forEach(node => {
|
||||
if (node.tagName === 'IFRAME' && node.hasAttribute('sandbox')) {
|
||||
node.removeAttribute('sandbox');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
observer.observe(document.documentElement, { childList: true, subtree: true });
|
||||
"""
|
||||
)
|
||||
|
||||
return brwsr, context
|
||||
|
||||
|
||||
network = Network()
|
||||
|
||||
__all__ = ["network"]
|
||||
Loading…
Add table
Add a link
Reference in a new issue