e
This commit is contained in:
parent
7103b0f1c4
commit
00000d9937
17 changed files with 597 additions and 524 deletions
|
|
@ -4,15 +4,7 @@ from urllib.parse import unquote, urljoin
|
|||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import (
|
||||
get_base,
|
||||
get_logger,
|
||||
leagues,
|
||||
load_cache,
|
||||
now,
|
||||
safe_process_event,
|
||||
write_cache,
|
||||
)
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
|
@ -25,7 +17,7 @@ MIRRORS = [
|
|||
"https://fstv.us",
|
||||
]
|
||||
|
||||
CACHE_FILE = Path(__file__).parent / "caches" / "fstv.json"
|
||||
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "fstv.json", exp=10_800)
|
||||
|
||||
|
||||
async def get_events(
|
||||
|
|
@ -39,7 +31,7 @@ async def get_events(
|
|||
r = await client.get(base_url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{base_url}"\n{e}')
|
||||
log.error(f'Failed to fetch "{base_url}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
|
|
@ -115,20 +107,20 @@ async def process_event(
|
|||
|
||||
if src := ifr.attributes.get("src", ""):
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return match_name, unquote(src).split("link=")[-1]
|
||||
return match_name or "", unquote(src).split("link=")[-1]
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = load_cache(CACHE_FILE, exp=10_800)
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_hrefs = {entry["href"] for entry in cached_urls.values()}
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Collected {cached_count} event(s) from cache")
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await get_base(client, MIRRORS)):
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working FSTV mirrors")
|
||||
write_cache(CACHE_FILE, cached_urls)
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
events = await get_events(
|
||||
|
|
@ -140,7 +132,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
match_name, url = await safe_process_event(
|
||||
match_name, url = await network.safe_process(
|
||||
lambda: process_event(
|
||||
client,
|
||||
ev["link"],
|
||||
|
|
@ -163,8 +155,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
"url": url,
|
||||
"logo": logo,
|
||||
"base": base_url,
|
||||
"timestamp": now.timestamp(),
|
||||
"id": tvg_id,
|
||||
"timestamp": Time.now().timestamp(),
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"href": ev["href"],
|
||||
}
|
||||
|
||||
|
|
@ -175,4 +167,4 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
write_cache(CACHE_FILE, cached_urls)
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue