This commit is contained in:
doms9 2025-09-04 19:53:27 -04:00
parent cb9d5637fc
commit 00000d905f
7 changed files with 224 additions and 93 deletions

View file

@ -18,17 +18,18 @@ CACHE_FILE = Path(__file__).parent / "caches" / "tvpass.json"
def load_cache() -> dict[str, str]:
try:
data = json.loads(CACHE_FILE.read_text(encoding="utf-8"))
return {} if 8 <= now.hour <= 12 else data
return {} if now.hour <= 12 else data
except (FileNotFoundError, json.JSONDecodeError):
return {}
async def fetch_m3u8(client: httpx.AsyncClient) -> list[str] | None:
async def fetch_m3u8(client: httpx.AsyncClient) -> list[str]:
try:
r = await client.get(BASE_URL)
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{BASE_URL}"\n{e}')
return []
return r.text.splitlines()
@ -41,10 +42,7 @@ async def main(client: httpx.AsyncClient) -> None:
log.info(f'Scraping from "{BASE_URL}"')
if not (data := await fetch_m3u8(client)):
return
for i, line in enumerate(data):
for i, line in enumerate(data := await fetch_m3u8(client)):
if line.startswith("#EXTINF"):
tvg_id_match = re.search(r'tvg-id="([^"]*)"', line)
tvg_name_match = re.search(r'tvg-name="([^"]*)"', line)
@ -69,7 +67,6 @@ async def main(client: httpx.AsyncClient) -> None:
),
}
if urls:
CACHE_FILE.write_text(json.dumps(urls, indent=2), encoding="utf-8")
CACHE_FILE.write_text(json.dumps(urls, indent=2), encoding="utf-8")
log.info(f"Cached {len(urls)} event(s)")
log.info(f"Cached {len(urls)} event(s)")