e
This commit is contained in:
parent
00000d932b
commit
00000d9e96
8 changed files with 21 additions and 21 deletions
|
|
@ -6,16 +6,16 @@ from pathlib import Path
|
|||
import httpx
|
||||
import pytz
|
||||
|
||||
from .logger import get_logger
|
||||
from .utils.logger import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
base_url = "https://tvpass.org/playlist/m3u"
|
||||
|
||||
base_file = Path(__file__).parent / "tvpass.json"
|
||||
|
||||
urls: dict[str, dict[str, str]] = {}
|
||||
|
||||
BASE_URL = "https://tvpass.org/playlist/m3u"
|
||||
|
||||
CACHE_FILE = Path(__file__).parent / "caches" / "tvpass.json"
|
||||
|
||||
logos = {
|
||||
"MLB": "https://i.gyazo.com/0fe7865ef2f06c9507791b24f04dbca8.png",
|
||||
"NBA": "https://i.gyazo.com/773c23570f095a5d549c23b9401d83f4.png",
|
||||
|
|
@ -31,7 +31,7 @@ TZ = pytz.timezone("America/New_York")
|
|||
|
||||
def load_cache() -> dict[str, str]:
|
||||
try:
|
||||
data = json.loads(base_file.read_text(encoding="utf-8"))
|
||||
data = json.loads(CACHE_FILE.read_text(encoding="utf-8"))
|
||||
|
||||
return {} if 8 <= datetime.now(TZ).hour <= 12 else data
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
|
|
@ -40,10 +40,10 @@ def load_cache() -> dict[str, str]:
|
|||
|
||||
async def fetch_m3u8(client: httpx.AsyncClient) -> list[str] | None:
|
||||
try:
|
||||
r = await client.get(base_url)
|
||||
r = await client.get(BASE_URL)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{base_url}"\n{e}')
|
||||
log.error(f'Failed to fetch "{BASE_URL}"\n{e}')
|
||||
|
||||
return r.text.splitlines()
|
||||
|
||||
|
|
@ -54,7 +54,7 @@ async def main(client: httpx.AsyncClient) -> None:
|
|||
log.info(f"Collected {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
if not (data := await fetch_m3u8(client)):
|
||||
return
|
||||
|
|
@ -85,6 +85,6 @@ async def main(client: httpx.AsyncClient) -> None:
|
|||
}
|
||||
|
||||
if urls:
|
||||
base_file.write_text(json.dumps(urls, indent=2), encoding="utf-8")
|
||||
CACHE_FILE.write_text(json.dumps(urls, indent=2), encoding="utf-8")
|
||||
|
||||
log.info(f"Cached {len(urls)} event(s)")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue