diff --git a/.github/workflows/m3u8.yml b/.github/workflows/m3u8.yml index a0a4fbc..cfc639b 100644 --- a/.github/workflows/m3u8.yml +++ b/.github/workflows/m3u8.yml @@ -34,8 +34,8 @@ jobs: - name: Cache cert uses: actions/cache@v4 with: - path: M3U8/scrape/cached-ca.pem - key: cert-cache-${{ runner.os }}-${{ hashFiles('M3U8/scrape/cached-ca.pem') }} + path: M3U8/scrape/utils/cached-ca.pem + key: cert-cache-${{ runner.os }}-${{ hashFiles('M3U8/scrape/utils/cached-ca.pem') }} restore-keys: | cert-cache-${{ runner.os }}- @@ -79,7 +79,7 @@ jobs: uses: stefanzweifel/git-auto-commit-action@v6 with: commit_message: "update M3U8" - file_pattern: "M3U8/TV.m3u8 M3U8/scrape/tvpass.json M3U8/scrape/cached-ca.pem M3U8/scrape/livetvsx.json" + file_pattern: "M3U8/TV.m3u8 M3U8/scrape/caches/*.json" commit_author: "GitHub Actions Bot " commit_user_name: "GitHub Actions Bot" commit_user_email: "actions@github.com" diff --git a/M3U8/fetch.py b/M3U8/fetch.py index 80f1a3d..d0fb83a 100644 --- a/M3U8/fetch.py +++ b/M3U8/fetch.py @@ -3,7 +3,8 @@ import asyncio from pathlib import Path import httpx -from scrape import ace, fstv, livetvsx, logger, tvpass +from scrape import ace, fstv, livetvsx, tvpass +from scrape.utils import logger log = logger.get_logger(__name__) diff --git a/M3U8/scrape/ace.py b/M3U8/scrape/ace.py index 3080531..94848b9 100644 --- a/M3U8/scrape/ace.py +++ b/M3U8/scrape/ace.py @@ -6,8 +6,8 @@ import httpx from selectolax.parser import HTMLParser, Node from .fstv import get_base -from .logger import get_logger from .tvpass import logos +from .utils.logger import get_logger log = get_logger(__name__) diff --git a/M3U8/scrape/tvpass.json b/M3U8/scrape/caches/tvpass.json similarity index 100% rename from M3U8/scrape/tvpass.json rename to M3U8/scrape/caches/tvpass.json diff --git a/M3U8/scrape/fstv.py b/M3U8/scrape/fstv.py index 8e6ebb1..703116d 100644 --- a/M3U8/scrape/fstv.py +++ b/M3U8/scrape/fstv.py @@ -4,8 +4,8 @@ from urllib.parse import urljoin import httpx from selectolax.parser import HTMLParser -from .logger import get_logger from .tvpass import logos +from .utils.logger import get_logger log = get_logger(__name__) diff --git a/M3U8/scrape/livetvsx.py b/M3U8/scrape/livetvsx.py index 7edfb88..bcb7f6f 100644 --- a/M3U8/scrape/livetvsx.py +++ b/M3U8/scrape/livetvsx.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 import asyncio import io import json @@ -11,12 +10,12 @@ from typing import Any import httpx from playwright.async_api import Request, async_playwright -from .logger import get_logger from .tvpass import TZ, logos +from .utils.logger import get_logger log = get_logger(__name__) -urls: dict[str, str] = {} +urls: dict[str, dict[str, str]] = {} tvp_sports = set(logos.keys()) @@ -29,9 +28,9 @@ CERT_BUNDL_URLS = [ "https://ssl.com/repo/certs/Sectigo-AAA-Root.pem", ] -CERT_FILE = Path(__file__).parent / "cached-ca.pem" +CERT_FILE = Path(__file__).parent / "utils" / "cached-ca.pem" -CACHE_FILE = Path(__file__).parent / "livetvsx.json" +CACHE_FILE = Path(__file__).parent / "caches" / "livetvsx.json" async def safe_process_event(fn, url_num: int, timeout=20) -> Any | None: diff --git a/M3U8/scrape/tvpass.py b/M3U8/scrape/tvpass.py index cc2262e..72127f7 100644 --- a/M3U8/scrape/tvpass.py +++ b/M3U8/scrape/tvpass.py @@ -6,16 +6,16 @@ from pathlib import Path import httpx import pytz -from .logger import get_logger +from .utils.logger import get_logger log = get_logger(__name__) -base_url = "https://tvpass.org/playlist/m3u" - -base_file = Path(__file__).parent / "tvpass.json" - urls: dict[str, dict[str, str]] = {} +BASE_URL = "https://tvpass.org/playlist/m3u" + +CACHE_FILE = Path(__file__).parent / "caches" / "tvpass.json" + logos = { "MLB": "https://i.gyazo.com/0fe7865ef2f06c9507791b24f04dbca8.png", "NBA": "https://i.gyazo.com/773c23570f095a5d549c23b9401d83f4.png", @@ -31,7 +31,7 @@ TZ = pytz.timezone("America/New_York") def load_cache() -> dict[str, str]: try: - data = json.loads(base_file.read_text(encoding="utf-8")) + data = json.loads(CACHE_FILE.read_text(encoding="utf-8")) return {} if 8 <= datetime.now(TZ).hour <= 12 else data except (FileNotFoundError, json.JSONDecodeError): @@ -40,10 +40,10 @@ def load_cache() -> dict[str, str]: async def fetch_m3u8(client: httpx.AsyncClient) -> list[str] | None: try: - r = await client.get(base_url) + r = await client.get(BASE_URL) r.raise_for_status() except Exception as e: - log.error(f'Failed to fetch "{base_url}"\n{e}') + log.error(f'Failed to fetch "{BASE_URL}"\n{e}') return r.text.splitlines() @@ -54,7 +54,7 @@ async def main(client: httpx.AsyncClient) -> None: log.info(f"Collected {len(urls)} event(s) from cache") return - log.info(f'Scraping from "{base_url}"') + log.info(f'Scraping from "{BASE_URL}"') if not (data := await fetch_m3u8(client)): return @@ -85,6 +85,6 @@ async def main(client: httpx.AsyncClient) -> None: } if urls: - base_file.write_text(json.dumps(urls, indent=2), encoding="utf-8") + CACHE_FILE.write_text(json.dumps(urls, indent=2), encoding="utf-8") log.info(f"Cached {len(urls)} event(s)") diff --git a/M3U8/scrape/logger.py b/M3U8/scrape/utils/logger.py similarity index 100% rename from M3U8/scrape/logger.py rename to M3U8/scrape/utils/logger.py