iptv/M3U8/scrapers/utils/caching.py

58 lines
1.5 KiB
Python
Raw Normal View History

2025-10-01 11:57:49 -04:00
import json
from pathlib import Path
from .config import Time
class Cache:
2025-11-13 12:43:55 -05:00
def __init__(self, file: str, exp: int | float) -> None:
self.file = Path(__file__).parent.parent / "caches" / file
2025-10-01 11:57:49 -04:00
self.exp = exp
2025-10-01 22:28:01 -04:00
self.now_ts = Time.now().timestamp()
2025-10-01 11:57:49 -04:00
2025-10-01 22:28:01 -04:00
def is_fresh(self, entry: dict) -> bool:
2025-10-15 10:53:54 -04:00
ts: float | int = entry.get("timestamp", Time.default_8())
2025-10-01 11:57:49 -04:00
2025-10-02 12:57:25 -04:00
dt_ts = Time.clean(Time.from_ts(ts)).timestamp()
2025-10-01 11:57:49 -04:00
2025-10-01 22:28:01 -04:00
return self.now_ts - dt_ts < self.exp
2025-10-01 11:57:49 -04:00
2025-11-13 12:43:55 -05:00
def write(self, data: dict) -> None:
self.file.parent.mkdir(parents=True, exist_ok=True)
self.file.write_text(
json.dumps(
data,
indent=2,
ensure_ascii=False,
),
encoding="utf-8",
)
2025-10-10 17:14:32 -04:00
def load(
self,
per_entry: bool = True,
2025-10-15 10:53:54 -04:00
index: int | None = None,
2025-10-10 17:14:32 -04:00
) -> dict[str, dict[str, str | float]]:
2025-10-12 00:27:42 -04:00
2025-10-01 11:57:49 -04:00
try:
data: dict = json.loads(self.file.read_text(encoding="utf-8"))
except (FileNotFoundError, json.JSONDecodeError):
return {}
if per_entry:
2025-10-01 22:28:01 -04:00
return {k: v for k, v in data.items() if self.is_fresh(v)}
2025-10-01 11:57:49 -04:00
2025-10-10 17:14:32 -04:00
if index:
2025-10-15 10:53:54 -04:00
ts: float | int = data[index].get("timestamp", Time.default_8())
2025-10-10 17:14:32 -04:00
else:
2025-10-15 10:53:54 -04:00
ts: float | int = data.get("timestamp", Time.default_8())
2025-10-01 11:57:49 -04:00
2025-10-02 12:57:25 -04:00
dt_ts = Time.clean(Time.from_ts(ts)).timestamp()
2025-10-01 11:57:49 -04:00
2025-10-01 22:28:01 -04:00
return data if self.is_fresh({"timestamp": dt_ts}) else {}
2025-10-01 11:57:49 -04:00
__all__ = ["Cache"]