This commit is contained in:
doms9 2025-10-15 10:53:54 -04:00
parent b110aee1e8
commit 00000d9ba6
11 changed files with 121 additions and 135 deletions

View file

@ -76,7 +76,7 @@ async def fetch_xml(url: str) -> ET.Element | None:
r = await client.get(url) r = await client.get(url)
r.raise_for_status() r.raise_for_status()
except Exception as e: except Exception as e:
print(f'Failed to fetch "{url}"\n{e}') print(f'Failed to fetch "{url}": {e}')
return return
try: try:
@ -85,7 +85,7 @@ async def fetch_xml(url: str) -> ET.Element | None:
return ET.fromstring(decompressed_data) return ET.fromstring(decompressed_data)
except Exception as e: except Exception as e:
print(f'Failed to decompress and parse XML from "{url}"\n{e}') print(f'Failed to decompress and parse XML from "{url}": {e}')
def hijack_id( def hijack_id(
@ -182,7 +182,7 @@ async def main() -> None:
tree.write(epg_file, encoding="utf-8", xml_declaration=True) tree.write(epg_file, encoding="utf-8", xml_declaration=True)
print(f"EPG saved to {epg_file.name}") print(f"EPG saved to {epg_file.resolve()}")
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -83,7 +83,7 @@ async def main() -> None:
encoding="utf-8", encoding="utf-8",
) )
log.info(f"Base + Events saved to {COMBINED_FILE.name}") log.info(f"Base + Events saved to {COMBINED_FILE.resolve()}")
EVENTS_FILE.write_text( EVENTS_FILE.write_text(
'#EXTM3U url-tvg="https://raw.githubusercontent.com/doms9/iptv/refs/heads/default/EPG/TV.xml"\n' '#EXTM3U url-tvg="https://raw.githubusercontent.com/doms9/iptv/refs/heads/default/EPG/TV.xml"\n'
@ -91,7 +91,7 @@ async def main() -> None:
encoding="utf-8", encoding="utf-8",
) )
log.info(f"Events saved to {EVENTS_FILE.name}") log.info(f"Events saved to {EVENTS_FILE.resolve()}")
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -1,3 +1,4 @@
from functools import partial
from pathlib import Path from pathlib import Path
from urllib.parse import unquote, urljoin from urllib.parse import unquote, urljoin
@ -20,14 +21,48 @@ MIRRORS = [
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "fstv.json", exp=10_800) CACHE_FILE = Cache(Path(__file__).parent / "caches" / "fstv.json", exp=10_800)
async def process_event(
client: httpx.AsyncClient,
url: str,
url_num: int,
) -> tuple[str, str]:
try:
r = await client.get(url)
r.raise_for_status()
except Exception as e:
log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
return "", ""
soup = HTMLParser(r.text)
if category_links := soup.css(".common-list-category .category-item a"):
match_name = category_links[-1].text(strip=True)
else:
match_name = None
if not match_name or match_name.lower() == "vs":
if og_title := soup.css_first("meta[property='og:title']"):
match_name = (
og_title.attributes.get("content", "").split(" start on")[0].strip()
)
if not (ifr := soup.css_first("iframe")):
log.info(f"URL {url_num}) No M3U8 found")
return "", ""
if src := ifr.attributes.get("src"):
log.info(f"URL {url_num}) Captured M3U8")
return match_name or "", unquote(src).split("link=")[-1]
async def get_events( async def get_events(
client: httpx.AsyncClient, client: httpx.AsyncClient,
base_url: str, base_url: str,
cached_hrefs: set[str], cached_hrefs: set[str],
) -> list[dict[str, str]]: ) -> list[dict[str, str]]:
log.info(f'Scraping from "{base_url}"')
try: try:
r = await client.get(base_url) r = await client.get(base_url)
r.raise_for_status() r.raise_for_status()
@ -76,42 +111,6 @@ async def get_events(
return events return events
async def process_event(
client: httpx.AsyncClient,
url: str,
url_num: int,
) -> tuple[str, str]:
try:
r = await client.get(url)
r.raise_for_status()
except Exception as e:
log.error(f'URL {url_num}) Failed to fetch "{url}"\n{e}')
return "", ""
soup = HTMLParser(r.text)
if category_links := soup.css(".common-list-category .category-item a"):
match_name = category_links[-1].text(strip=True)
else:
match_name = None
if not match_name or match_name.lower() == "vs":
if og_title := soup.css_first("meta[property='og:title']"):
match_name = (
og_title.attributes.get("content", "").split(" start on")[0].strip()
)
if not (ifr := soup.css_first("iframe")):
log.info(f"URL {url_num}) No M3U8 found")
return "", ""
if src := ifr.attributes.get("src", ""):
log.info(f"URL {url_num}) Captured M3U8")
return match_name or "", unquote(src).split("link=")[-1]
async def scrape(client: httpx.AsyncClient) -> None: async def scrape(client: httpx.AsyncClient) -> None:
cached_urls = CACHE_FILE.load() cached_urls = CACHE_FILE.load()
cached_hrefs = {entry["href"] for entry in cached_urls.values()} cached_hrefs = {entry["href"] for entry in cached_urls.values()}
@ -125,6 +124,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
CACHE_FILE.write(cached_urls) CACHE_FILE.write(cached_urls)
return return
log.info(f'Scraping from "{base_url}"')
events = await get_events( events = await get_events(
client, client,
base_url, base_url,
@ -136,15 +137,9 @@ async def scrape(client: httpx.AsyncClient) -> None:
now = Time.now().timestamp() now = Time.now().timestamp()
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
match_name, url = await network.safe_process( handler = partial(process_event, client=client, url=ev["link"], url_num=i)
lambda: process_event(
client, match_name, url = await network.safe_process(handler, url_num=i, log=log)
ev["link"],
url_num=i,
),
url_num=i,
log=log,
)
if url: if url:
sport = ev["sport"] sport = ev["sport"]

View file

@ -1,4 +1,5 @@
import re import re
from functools import partial
from pathlib import Path from pathlib import Path
from urllib.parse import urljoin from urllib.parse import urljoin
@ -26,7 +27,7 @@ async def process_event(
r = await client.get(url) r = await client.get(url)
r.raise_for_status() r.raise_for_status()
except Exception as e: except Exception as e:
log.error(f'URL {url_num}) Failed to fetch "{url}"\n{e}') log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
return return
valid_m3u8 = re.compile( valid_m3u8 = re.compile(
@ -55,15 +56,15 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
events = [] events = []
for card in soup.css("div.container div.card"): for card in soup.css("div.container div.card"):
sport = card.css_first("h5.card-title").text(strip=True)
name = card.css_first("p.card-text").text(strip=True)
link = card.css_first("a.btn.btn-primary") link = card.css_first("a.btn.btn-primary")
if not (href := link.attrs.get("href")): if not (href := link.attrs.get("href")):
continue continue
sport = card.css_first("h5.card-title").text(strip=True)
name = card.css_first("p.card-text").text(strip=True)
events.append( events.append(
{ {
"sport": sport, "sport": sport,
@ -90,8 +91,10 @@ async def scrape(client: httpx.AsyncClient) -> None:
now = Time.now().timestamp() now = Time.now().timestamp()
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial(process_event, client=client, url=ev["link"], url_num=i)
url = await network.safe_process( url = await network.safe_process(
lambda: process_event(client, url=ev["link"], url_num=i), handler,
url_num=i, url_num=i,
log=log, log=log,
timeout=10, timeout=10,

View file

@ -95,7 +95,7 @@ async def get_events(
r = await client.get(url) r = await client.get(url)
r.raise_for_status() r.raise_for_status()
except Exception as e: except Exception as e:
log.error(f'Failed to fetch "{url}"\n{e}') log.error(f'Failed to fetch "{url}": {e}')
return [] return []
@ -132,7 +132,7 @@ async def get_events(
time_text = time_span.text(strip=True) time_text = time_span.text(strip=True)
timestamp = int(a.attributes.get("data-time", 31496400)) timestamp = int(a.attributes.get("data-time", Time.default_8()))
key = f"[{sport}] {name} (SEAST)" key = f"[{sport}] {name} (SEAST)"
@ -180,15 +180,9 @@ async def scrape(client: httpx.AsyncClient) -> None:
browser, context = await network.browser(p, browser="brave") browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
url = await network.safe_process( handler = partial(process_event, url=ev["link"], url_num=i, context=context)
lambda: process_event(
ev["link"], url = await network.safe_process(handler, url_num=i, log=log)
url_num=i,
context=context,
),
url_num=i,
log=log,
)
if url: if url:
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"] sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]

View file

@ -78,7 +78,7 @@ async def refresh_html_cache(client: httpx.AsyncClient, url: str) -> dict[str, s
r = await client.get(url) r = await client.get(url)
r.raise_for_status() r.raise_for_status()
except Exception as e: except Exception as e:
log.error(f'Failed to fetch "{url}"\n{e}') log.error(f'Failed to fetch "{url}": {e}')
return [] return []
@ -173,15 +173,9 @@ async def scrape(client: httpx.AsyncClient) -> None:
browser, context = await network.browser(p, browser="brave") browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
url = await network.safe_process( handler = partial(process_event, url=ev["link"], url_num=i, context=context)
lambda: process_event(
ev["link"], url = await network.safe_process(handler, url_num=i, log=log)
url_num=i,
context=context,
),
url_num=i,
log=log,
)
if url: if url:
sport, event, ts = ev["sport"], ev["event"], ev["event_ts"] sport, event, ts = ev["sport"], ev["event"], ev["event_ts"]

View file

@ -28,7 +28,7 @@ def validate_category(s: str) -> str:
elif s == "fight": elif s == "fight":
return "Fight (UFC/Boxing)" return "Fight (UFC/Boxing)"
return s.capitalize() return s.capitalize() if len(s) > 4 else s.upper()
async def refresh_api_cache( async def refresh_api_cache(
@ -40,12 +40,12 @@ async def refresh_api_cache(
r = await client.get(url) r = await client.get(url)
r.raise_for_status() r.raise_for_status()
except Exception as e: except Exception as e:
log.error(f'Failed to fetch "{url}"\n{e}') log.error(f'Failed to fetch "{url}": {e}')
return {} return {}
data = r.json() data = r.json()
data[0]["timestamp"] = Time.now().timestamp() data[-1]["timestamp"] = Time.now().timestamp()
return data return data
@ -113,7 +113,7 @@ async def get_events(
cached_keys: set[str], cached_keys: set[str],
) -> list[dict[str, str]]: ) -> list[dict[str, str]]:
if not (api_data := API_FILE.load(per_entry=False, index=True)): if not (api_data := API_FILE.load(per_entry=False, index=-1)):
api_data = await refresh_api_cache( api_data = await refresh_api_cache(
client, client,
urljoin( urljoin(
@ -211,15 +211,9 @@ async def scrape(client: httpx.AsyncClient) -> None:
browser, context = await network.browser(p, "brave") browser, context = await network.browser(p, "brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
url = await network.safe_process( handler = partial(process_event, url=ev["link"], url_num=i, context=context)
lambda: process_event(
ev["link"], url = await network.safe_process(handler, url_num=i, log=log)
url_num=i,
context=context,
),
url_num=i,
log=log,
)
if url: if url:
sport, event, logo, ts = ( sport, event, logo, ts = (

View file

@ -11,7 +11,7 @@ class Cache:
self.now_ts = Time.now().timestamp() self.now_ts = Time.now().timestamp()
def is_fresh(self, entry: dict) -> bool: def is_fresh(self, entry: dict) -> bool:
ts: float | int = entry.get("timestamp", 31496400) ts: float | int = entry.get("timestamp", Time.default_8())
dt_ts = Time.clean(Time.from_ts(ts)).timestamp() dt_ts = Time.clean(Time.from_ts(ts)).timestamp()
@ -20,7 +20,7 @@ class Cache:
def load( def load(
self, self,
per_entry: bool = True, per_entry: bool = True,
index: bool = False, index: int | None = None,
) -> dict[str, dict[str, str | float]]: ) -> dict[str, dict[str, str | float]]:
try: try:
@ -32,10 +32,10 @@ class Cache:
return {k: v for k, v in data.items() if self.is_fresh(v)} return {k: v for k, v in data.items() if self.is_fresh(v)}
if index: if index:
ts: float | int = data[0].get("timestamp", 31496400) ts: float | int = data[index].get("timestamp", Time.default_8())
else: else:
ts: float | int = data.get("timestamp", 31496400) ts: float | int = data.get("timestamp", Time.default_8())
dt_ts = Time.clean(Time.from_ts(ts)).timestamp() dt_ts = Time.clean(Time.from_ts(ts)).timestamp()

View file

@ -21,6 +21,14 @@ class Time(datetime):
def from_ts(cls, ts: int | float) -> "Time": def from_ts(cls, ts: int | float) -> "Time":
return cls.fromtimestamp(ts, tz=cls.TZ) return cls.fromtimestamp(ts, tz=cls.TZ)
@classmethod
def default_8(cls) -> float:
return (
cls.now()
.replace(hour=8, minute=0, second=0, microsecond=0, tzinfo=cls.TZ)
.timestamp()
)
def delta(self, **kwargs) -> "Time": def delta(self, **kwargs) -> "Time":
return self.from_ts((self + timedelta(**kwargs)).timestamp()) return self.from_ts((self + timedelta(**kwargs)).timestamp())
@ -66,7 +74,7 @@ class Time(datetime):
except ValueError: except ValueError:
continue continue
else: else:
return cls.from_ts(31496400) return cls.from_ts(Time.default_8())
if not dt.tzinfo: if not dt.tzinfo:
dt = tz.localize(dt) if hasattr(tz, "localize") else dt.replace(tzinfo=tz) dt = tz.localize(dt) if hasattr(tz, "localize") else dt.replace(tzinfo=tz)
@ -75,13 +83,13 @@ class Time(datetime):
class Leagues: class Leagues:
live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png"
def __init__(self) -> None: def __init__(self) -> None:
self.data = json.loads( self.data = json.loads(
(Path(__file__).parent / "leagues.json").read_text(encoding="utf-8") (Path(__file__).parent / "leagues.json").read_text(encoding="utf-8")
) )
self.live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png"
def teams(self, league: str) -> list[str]: def teams(self, league: str) -> list[str]:
return self.data["teams"].get(league, []) return self.data["teams"].get(league, [])
@ -145,16 +153,12 @@ class Leagues:
else: else:
return self.info("Basketball") return self.info("Basketball")
case "Hockey": case "Ice Hockey" | "Hockey":
return self.info("NHL") return self.info("NHL")
case _: case _:
return self.info(sport) return self.info(sport)
@property
def league_names(self) -> list[str]:
return self.data["teams"].keys()
leagues = Leagues() leagues = Leagues()

View file

@ -127,46 +127,54 @@ class Network:
await context.add_init_script( await context.add_init_script(
""" """
Object.defineProperty(navigator, 'webdriver', {get: () => undefined}); Object.defineProperty(navigator, "webdriver", { get: () => undefined });
Object.defineProperty(navigator, 'languages', { Object.defineProperty(navigator, "languages", {
get: () => ['en-US', 'en'] get: () => ["en-US", "en"],
}); });
Object.defineProperty(navigator, 'plugins', { Object.defineProperty(navigator, "plugins", {
get: () => [1, 2, 3, 4] get: () => [1, 2, 3, 4],
}); });
const elementDescriptor = Object.getOwnPropertyDescriptor(HTMLElement.prototype, 'offsetHeight'); const elementDescriptor = Object.getOwnPropertyDescriptor(
Object.defineProperty(HTMLDivElement.prototype, 'offsetHeight', { HTMLElement.prototype,
"offsetHeight"
);
Object.defineProperty(HTMLDivElement.prototype, "offsetHeight", {
...elementDescriptor, ...elementDescriptor,
get: function() { get: function () {
if (this.id === 'modernizr') { return 24; } if (this.id === "modernizr") {
return 24;
}
return elementDescriptor.get.apply(this); return elementDescriptor.get.apply(this);
} },
}); });
Object.defineProperty(window.screen, 'width', { get: () => 1366 }); Object.defineProperty(window.screen, "width", { get: () => 1366 });
Object.defineProperty(window.screen, 'height', { get: () => 768 }); Object.defineProperty(window.screen, "height", { get: () => 768 });
const getParameter = WebGLRenderingContext.prototype.getParameter;
const getParameter = WebGLRenderingContext.prototype. getParameter;
WebGLRenderingContext.prototype.getParameter = function (param) { WebGLRenderingContext.prototype.getParameter = function (param) {
if (param === 37445) return "Intel Inc."; // UNMASKED_VENDOR_WEBGL if (param === 37445) return "Intel Inc."; // UNMASKED_VENDOR_WEBGL
if (param === 37446) return "Intel Iris OpenGL Engine"; // UNMASKED_RENDERER_WEBGL if (param === 37446) return "Intel Iris OpenGL Engine"; // UNMASKED_RENDERER_WEBGL
return getParameter.apply(this, [param]); return getParameter.apply(this, [param]);
}; };
const observer = new MutationObserver(mutations => { const observer = new MutationObserver((mutations) => {
mutations.forEach(mutation => { mutations.forEach((mutation) => {
mutation.addedNodes.forEach(node => { mutation.addedNodes.forEach((node) => {
if (node.tagName === 'IFRAME' && node.hasAttribute('sandbox')) { if (node.tagName === "IFRAME" && node.hasAttribute("sandbox")) {
node.removeAttribute('sandbox'); node.removeAttribute("sandbox");
} }
});
}); });
}); });
});
observer.observe(document.documentElement, { childList: true, subtree: true }); observer.observe(document.documentElement, { childList: true, subtree: true });
""" """
) )

View file

@ -68,7 +68,7 @@ async def refresh_api_cache(
for ev in data: for ev in data:
ev["ts"] = ev.pop("timestamp") ev["ts"] = ev.pop("timestamp")
data[0]["timestamp"] = Time.now().timestamp() data[-1]["timestamp"] = Time.now().timestamp()
return data return data
@ -152,7 +152,7 @@ async def get_events(
cached_keys: set[str], cached_keys: set[str],
) -> list[dict[str, str]]: ) -> list[dict[str, str]]:
if not (api_data := API_FILE.load(per_entry=False, index=True)): if not (api_data := API_FILE.load(per_entry=False, index=-1)):
api_data = await refresh_api_cache(client, base_url) api_data = await refresh_api_cache(client, base_url)
API_FILE.write(api_data) API_FILE.write(api_data)
@ -227,15 +227,9 @@ async def scrape(client: httpx.AsyncClient) -> None:
browser, context = await network.browser(p) browser, context = await network.browser(p)
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
url = await network.safe_process( handler = partial(process_event, url=ev["link"], url_num=i, context=context)
lambda: process_event(
ev["link"], url = await network.safe_process(handler, url_num=i, log=log)
url_num=i,
context=context,
),
url_num=i,
log=log,
)
sport, event, logo, ts = ( sport, event, logo, ts = (
ev["sport"], ev["sport"],