This commit is contained in:
doms9 2025-08-27 10:26:56 -04:00
parent 73f28024ac
commit 00000d9822
6 changed files with 125 additions and 180 deletions

View file

@ -1,21 +1,22 @@
import asyncio
from urllib.parse import urljoin
import httpx
from bs4 import BeautifulSoup
from selectolax.parser import HTMLParser
urls: dict[str, str] = {}
mirrors = {
mirrors = [
"https://fstv.online",
"https://fstv.space",
"https://fstv.zip",
"https://fstv.us",
}
]
def check_status(client: httpx.Client, url: str) -> bool:
async def check_status(client: httpx.AsyncClient, url: str) -> bool:
try:
r = client.get(url)
r = await client.get(url)
r.raise_for_status()
except Exception:
return False
@ -23,81 +24,90 @@ def check_status(client: httpx.Client, url: str) -> bool:
return r.status_code == 200
def get_base(client: httpx.Client) -> str:
for url in filter(lambda x: check_status(client, x), mirrors):
return url
async def get_base(client: httpx.AsyncClient) -> str:
tasks = [check_status(client, link) for link in mirrors]
results = await asyncio.gather(*tasks)
return [url for url, ok in zip(mirrors, results) if ok][0]
def get_hrefs(client: httpx.Client, base_url: str) -> list[tuple[str, str]]:
async def get_hrefs(client: httpx.AsyncClient, base_url: str) -> list[tuple[str, str]]:
print(f'Scraping from "{base_url}"')
try:
r = client.get(base_url)
r = await client.get(base_url)
r.raise_for_status()
except Exception as e:
print(f'Failed to fetch "{base_url}"\n{e}')
return []
soup = BeautifulSoup(r.text, "lxml")
soup = HTMLParser(r.text)
events = {}
for wrpr in soup.find_all("div", class_="fixtures-live-wrapper"):
for games in wrpr.select(".match-table-item"):
for wrpr in soup.css("div.fixtures-live-wrapper"):
for games in wrpr.css(".match-table-item"):
league_name = games.select_one(".league-info a.league-name")
league_name = games.css_first(".league-info a.league-name")
league_match = games.select_one(".common-table-row a[href*='/match/']")
league_match = games.css_first(".common-table-row a[href*='/match/']")
if league_name and league_match:
full_text = league_name.get_text(strip=True)
full_text = league_name.text(strip=True)
if "]" in full_text:
event_name = full_text.split("]", 1)[1].strip()
else:
event_name = full_text
events[event_name] = urljoin(base_url, league_match["href"])
events[event_name] = urljoin(
base_url, league_match.attributes.get("href")
)
return events.items()
def fetch_m3u8(client: httpx.Client, url: str) -> tuple[str, list[str]]:
async def fetch_m3u8(client: httpx.AsyncClient, url: str) -> tuple[str, list[str]]:
try:
r = client.get(url)
r = await client.get(url)
r.raise_for_status()
except Exception as e:
print(f'Failed to fetch "{url}"\n{e}')
return []
soup = BeautifulSoup(r.text, "lxml")
soup = HTMLParser(r.text)
if category_links := soup.select(".common-list-category .category-item a"):
match_name = category_links[-1].get_text(strip=True)
if category_links := soup.css(".common-list-category .category-item a"):
match_name = category_links[-1].text(strip=True)
else:
match_name = None
if not match_name or match_name.lower() == "vs":
if og_title := soup.find("meta", property="og:title"):
match_name = og_title["content"].split(" start on")[0].strip()
if og_title := soup.css_first("meta[property='og:title']"):
match_name = (
og_title.attributes.get("content", "").split(" start on")[0].strip()
)
btns = soup.select("button.btn-server")
btns = soup.css("button.btn-server")
return match_name, [btn["data-link"] for btn in btns if btn.has_attr("data-link")]
return match_name, [
btn.attributes.get("data-link") for btn in btns if "data-link" in btn.attributes
]
def main(client: httpx.Client) -> None:
base_url = get_base(client)
async def main(client: httpx.AsyncClient) -> None:
if not (base_url := await get_base(client)):
print("No working FSTV mirrors")
return
for event, href in get_hrefs(client, base_url):
events = await get_hrefs(client, base_url)
if not href:
continue
match_name, m3u8_urls = fetch_m3u8(client, href)
tasks = [fetch_m3u8(client, href) for _, href in events if href]
results = await asyncio.gather(*tasks)
for (event, _), (match_name, m3u8_urls) in zip(events, results):
if not m3u8_urls:
continue