This commit is contained in:
doms9 2025-09-03 03:14:52 -04:00
parent 00000d932b
commit 00000d9e96
8 changed files with 21 additions and 21 deletions

View file

@ -34,8 +34,8 @@ jobs:
- name: Cache cert
uses: actions/cache@v4
with:
path: M3U8/scrape/cached-ca.pem
key: cert-cache-${{ runner.os }}-${{ hashFiles('M3U8/scrape/cached-ca.pem') }}
path: M3U8/scrape/utils/cached-ca.pem
key: cert-cache-${{ runner.os }}-${{ hashFiles('M3U8/scrape/utils/cached-ca.pem') }}
restore-keys: |
cert-cache-${{ runner.os }}-
@ -79,7 +79,7 @@ jobs:
uses: stefanzweifel/git-auto-commit-action@v6
with:
commit_message: "update M3U8"
file_pattern: "M3U8/TV.m3u8 M3U8/scrape/tvpass.json M3U8/scrape/cached-ca.pem M3U8/scrape/livetvsx.json"
file_pattern: "M3U8/TV.m3u8 M3U8/scrape/caches/*.json"
commit_author: "GitHub Actions Bot <actions@github.com>"
commit_user_name: "GitHub Actions Bot"
commit_user_email: "actions@github.com"

View file

@ -3,7 +3,8 @@ import asyncio
from pathlib import Path
import httpx
from scrape import ace, fstv, livetvsx, logger, tvpass
from scrape import ace, fstv, livetvsx, tvpass
from scrape.utils import logger
log = logger.get_logger(__name__)

View file

@ -6,8 +6,8 @@ import httpx
from selectolax.parser import HTMLParser, Node
from .fstv import get_base
from .logger import get_logger
from .tvpass import logos
from .utils.logger import get_logger
log = get_logger(__name__)

View file

@ -4,8 +4,8 @@ from urllib.parse import urljoin
import httpx
from selectolax.parser import HTMLParser
from .logger import get_logger
from .tvpass import logos
from .utils.logger import get_logger
log = get_logger(__name__)

View file

@ -1,4 +1,3 @@
#!/usr/bin/env python3
import asyncio
import io
import json
@ -11,12 +10,12 @@ from typing import Any
import httpx
from playwright.async_api import Request, async_playwright
from .logger import get_logger
from .tvpass import TZ, logos
from .utils.logger import get_logger
log = get_logger(__name__)
urls: dict[str, str] = {}
urls: dict[str, dict[str, str]] = {}
tvp_sports = set(logos.keys())
@ -29,9 +28,9 @@ CERT_BUNDL_URLS = [
"https://ssl.com/repo/certs/Sectigo-AAA-Root.pem",
]
CERT_FILE = Path(__file__).parent / "cached-ca.pem"
CERT_FILE = Path(__file__).parent / "utils" / "cached-ca.pem"
CACHE_FILE = Path(__file__).parent / "livetvsx.json"
CACHE_FILE = Path(__file__).parent / "caches" / "livetvsx.json"
async def safe_process_event(fn, url_num: int, timeout=20) -> Any | None:

View file

@ -6,16 +6,16 @@ from pathlib import Path
import httpx
import pytz
from .logger import get_logger
from .utils.logger import get_logger
log = get_logger(__name__)
base_url = "https://tvpass.org/playlist/m3u"
base_file = Path(__file__).parent / "tvpass.json"
urls: dict[str, dict[str, str]] = {}
BASE_URL = "https://tvpass.org/playlist/m3u"
CACHE_FILE = Path(__file__).parent / "caches" / "tvpass.json"
logos = {
"MLB": "https://i.gyazo.com/0fe7865ef2f06c9507791b24f04dbca8.png",
"NBA": "https://i.gyazo.com/773c23570f095a5d549c23b9401d83f4.png",
@ -31,7 +31,7 @@ TZ = pytz.timezone("America/New_York")
def load_cache() -> dict[str, str]:
try:
data = json.loads(base_file.read_text(encoding="utf-8"))
data = json.loads(CACHE_FILE.read_text(encoding="utf-8"))
return {} if 8 <= datetime.now(TZ).hour <= 12 else data
except (FileNotFoundError, json.JSONDecodeError):
@ -40,10 +40,10 @@ def load_cache() -> dict[str, str]:
async def fetch_m3u8(client: httpx.AsyncClient) -> list[str] | None:
try:
r = await client.get(base_url)
r = await client.get(BASE_URL)
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{base_url}"\n{e}')
log.error(f'Failed to fetch "{BASE_URL}"\n{e}')
return r.text.splitlines()
@ -54,7 +54,7 @@ async def main(client: httpx.AsyncClient) -> None:
log.info(f"Collected {len(urls)} event(s) from cache")
return
log.info(f'Scraping from "{base_url}"')
log.info(f'Scraping from "{BASE_URL}"')
if not (data := await fetch_m3u8(client)):
return
@ -85,6 +85,6 @@ async def main(client: httpx.AsyncClient) -> None:
}
if urls:
base_file.write_text(json.dumps(urls, indent=2), encoding="utf-8")
CACHE_FILE.write_text(json.dumps(urls, indent=2), encoding="utf-8")
log.info(f"Cached {len(urls)} event(s)")