This commit is contained in:
doms9 2025-09-04 09:59:19 -04:00
parent 1257f84f58
commit 00000d940e
6 changed files with 29 additions and 48 deletions

View file

@ -9,7 +9,7 @@ from pathlib import Path
import httpx
from playwright.async_api import Request, async_playwright
from .utils import LOGOS, TZ, get_logger, safe_process_event
from .utils import LOGOS, TZ, get_logger, now, safe_process_event
log = get_logger(__name__)
@ -54,7 +54,7 @@ async def get_cert(client: httpx.AsyncClient) -> ssl.SSLContext:
if CERT_FILE.is_file():
mtime = datetime.fromtimestamp(CERT_FILE.stat().st_mtime)
if datetime.now() - mtime < timedelta(days=30):
if now - mtime < timedelta(days=30):
return ssl.create_default_context(cafile=CERT_FILE)
log.info("Refreshing cached certificate")
@ -68,9 +68,7 @@ def load_cache() -> dict[str, dict[str, str | str]]:
try:
data: dict = json.loads(CACHE_FILE.read_text(encoding="utf-8"))
now = datetime.now(TZ).timestamp()
age: float = now - data.get("timestamp", 0)
age: float = now.timestamp() - data.get("timestamp", 0)
return {k: v for k, v in data.items() if age < 14400} # 4 hours
except (FileNotFoundError, json.JSONDecodeError):
@ -229,7 +227,6 @@ async def get_events(
events: list[dict[str, str]] = []
pub_date_format = "%a, %d %b %Y %H:%M:%S %z"
now = datetime.now(TZ)
window_start, window_end = now - timedelta(hours=3), now + timedelta(hours=1)
@ -294,8 +291,6 @@ async def main(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} URLs")
now_ts = datetime.now(TZ).timestamp()
for i, ev in enumerate(events, start=1):
sport = ev["sport"]
event = ev["event"]
@ -317,15 +312,11 @@ async def main(client: httpx.AsyncClient) -> None:
sport,
"https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png",
),
"timestamp": now_ts,
"timestamp": now.timestamp(),
}
urls[key] = cached_urls[key] = entry
CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8")
new_count = len(cached_urls) - cached_count
log.info(f"Cached {cached_count} event(s)")
log.info(f"Collected {new_count} new event(s)")
log.info(f"Collected {len(cached_urls) - cached_count} event(s)")