e
This commit is contained in:
parent
c2aa25a654
commit
00000d934a
12 changed files with 231 additions and 68 deletions
|
|
@ -1,24 +1,31 @@
|
|||
import asyncio
|
||||
from urllib.parse import urljoin
|
||||
import json
|
||||
from pathlib import Path
|
||||
from urllib.parse import unquote, urljoin
|
||||
|
||||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import LOGOS, get_base, get_logger
|
||||
from .utils import LOGOS, get_base, get_logger, load_cache, now, safe_process_event
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str]] = {}
|
||||
|
||||
MIRRORS = [
|
||||
"https://fstv.online",
|
||||
"https://fstv.space",
|
||||
"https://fstv.zip",
|
||||
"https://fstv.space",
|
||||
"https://fstv.online",
|
||||
"https://fstv.us",
|
||||
]
|
||||
|
||||
CACHE_FILE = Path(__file__).parent / "caches" / "fstv.json"
|
||||
|
||||
async def get_events(client: httpx.AsyncClient, base_url: str) -> list[tuple[str, str]]:
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
base_url: str,
|
||||
cached_hrefs: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
try:
|
||||
|
|
@ -31,7 +38,7 @@ async def get_events(client: httpx.AsyncClient, base_url: str) -> list[tuple[str
|
|||
|
||||
soup = HTMLParser(r.text)
|
||||
|
||||
events = {}
|
||||
events = []
|
||||
|
||||
for wrpr in soup.css("div.fixtures-live-wrapper"):
|
||||
for games in wrpr.css(".match-table-item"):
|
||||
|
|
@ -48,21 +55,40 @@ async def get_events(client: httpx.AsyncClient, base_url: str) -> list[tuple[str
|
|||
else:
|
||||
event_name = full_text
|
||||
|
||||
events[event_name] = urljoin(
|
||||
base_url, league_match.attributes.get("href")
|
||||
href = league_match.attributes.get("href")
|
||||
|
||||
link = urljoin(base_url, href)
|
||||
|
||||
if cached_hrefs & {href}:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": event_name,
|
||||
"link": link,
|
||||
"logo": LOGOS.get(
|
||||
event_name,
|
||||
"https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png",
|
||||
),
|
||||
"href": href,
|
||||
}
|
||||
)
|
||||
|
||||
return events.items()
|
||||
return events
|
||||
|
||||
|
||||
async def process_events(client: httpx.AsyncClient, url: str) -> tuple[str, list[str]]:
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
url_num: int,
|
||||
) -> tuple[str, str]:
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}"\n{e}')
|
||||
log.error(f'URL {url_num}) Failed to fetch "{url}"\n{e}')
|
||||
|
||||
return []
|
||||
return "", ""
|
||||
|
||||
soup = HTMLParser(r.text)
|
||||
|
||||
|
|
@ -77,43 +103,65 @@ async def process_events(client: httpx.AsyncClient, url: str) -> tuple[str, list
|
|||
og_title.attributes.get("content", "").split(" start on")[0].strip()
|
||||
)
|
||||
|
||||
btns = soup.css("button.btn-server")
|
||||
if src := soup.css_first("iframe").attributes.get("src", ""):
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return match_name, unquote(src).split("link=")[-1]
|
||||
|
||||
return match_name, [
|
||||
btn.attributes.get("data-link") for btn in btns if "data-link" in btn.attributes
|
||||
]
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
return "", ""
|
||||
|
||||
|
||||
async def main(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = load_cache(CACHE_FILE, exp=14400)
|
||||
cached_hrefs = {entry["href"] for entry in cached_urls.values()}
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Collected {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await get_base(client, MIRRORS)):
|
||||
log.warning("No working FSTV mirrors")
|
||||
return
|
||||
|
||||
events = await get_events(client, base_url)
|
||||
events = await get_events(
|
||||
client,
|
||||
base_url,
|
||||
cached_hrefs,
|
||||
)
|
||||
|
||||
tasks = [process_events(client, href) for _, href in events if href]
|
||||
results = await asyncio.gather(*tasks)
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
for (event, _), (match_name, m3u8_urls) in zip(events, results):
|
||||
event = event.strip()
|
||||
for i, ev in enumerate(events, start=1):
|
||||
match_name, url = await safe_process_event(
|
||||
lambda: process_event(
|
||||
client,
|
||||
ev["link"],
|
||||
url_num=i,
|
||||
),
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if not m3u8_urls:
|
||||
continue
|
||||
|
||||
for i, link in enumerate(m3u8_urls, start=1):
|
||||
if url:
|
||||
key = (
|
||||
f"[{event}] {match_name} (S{i})" if match_name else f"[{event}] (S{i})"
|
||||
f"[{ev['sport']}] {match_name} (FSTV)"
|
||||
if match_name
|
||||
else f"[{ev['sport']}] (FSTV)"
|
||||
)
|
||||
|
||||
urls[key] = {
|
||||
"url": link,
|
||||
"logo": LOGOS.get(
|
||||
event,
|
||||
"https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png",
|
||||
),
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": ev["logo"],
|
||||
"base": base_url,
|
||||
"timestamp": now.timestamp(),
|
||||
"href": ev["href"],
|
||||
}
|
||||
|
||||
log.info(f"Collected {len(urls)} live event(s)")
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
# add caching
|
||||
CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue