e
This commit is contained in:
parent
00000d98b2
commit
00000d9449
5 changed files with 17 additions and 18 deletions
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"success": true,
|
||||
"timestamp": 1757200717,
|
||||
"timestamp": 1756995060,
|
||||
"READ_ME": "Interested in using our API? Contact us for more information.",
|
||||
"performance": 0.013457059860229492,
|
||||
"streams": [
|
||||
|
|
|
|||
|
|
@ -213,11 +213,11 @@ async def get_events(
|
|||
) -> list[dict[str, str]]:
|
||||
|
||||
events: list[dict[str, str]] = []
|
||||
pub_date_format = "%a, %d %b %Y %H:%M:%S %z"
|
||||
|
||||
window_start, window_end = now - timedelta(hours=1), now + timedelta(minutes=30)
|
||||
|
||||
if buffer := await fetch_xml_stream(url, ssl_ctx):
|
||||
pub_date_format = "%a, %d %b %Y %H:%M:%S %z"
|
||||
|
||||
for _, elem in ET.iterparse(buffer, events=("end",)):
|
||||
if elem.tag == "item":
|
||||
title = elem.findtext("title")
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from urllib.parse import urljoin
|
|||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import get_logger, load_cache, safe_process_event
|
||||
from .utils import get_logger, load_cache, now, safe_process_event
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
|
@ -79,7 +79,7 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
|
|||
|
||||
|
||||
async def main(client: httpx.AsyncClient) -> None:
|
||||
if cached := load_cache(CACHE_FILE, hour=12):
|
||||
if cached := load_cache(CACHE_FILE, exp=86400):
|
||||
urls.update(cached)
|
||||
log.info(f"Collected {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
|
@ -101,6 +101,7 @@ async def main(client: httpx.AsyncClient) -> None:
|
|||
entry = {
|
||||
"url": url,
|
||||
"logo": ev["logo"],
|
||||
"timestamp": now.timestamp(),
|
||||
}
|
||||
|
||||
urls[f"[{ev['sport']}] {ev['event']}"] = entry
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from pathlib import Path
|
|||
|
||||
import httpx
|
||||
|
||||
from .utils import LOGOS, get_logger, load_cache
|
||||
from .utils import LOGOS, get_logger, load_cache, now
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
|
@ -27,7 +27,7 @@ async def fetch_m3u8(client: httpx.AsyncClient) -> list[str]:
|
|||
|
||||
|
||||
async def main(client: httpx.AsyncClient) -> None:
|
||||
if cached := load_cache(CACHE_FILE, hour=12):
|
||||
if cached := load_cache(CACHE_FILE, exp=86400):
|
||||
urls.update(cached)
|
||||
log.info(f"Collected {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
|
@ -57,6 +57,7 @@ async def main(client: httpx.AsyncClient) -> None:
|
|||
sport,
|
||||
"https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png",
|
||||
),
|
||||
"timestamp": now.timestamp(),
|
||||
}
|
||||
|
||||
CACHE_FILE.write_text(json.dumps(urls, indent=2), encoding="utf-8")
|
||||
|
|
|
|||
|
|
@ -71,20 +71,17 @@ def get_logger(name: str | None = None) -> logging.Logger:
|
|||
def load_cache(
|
||||
file: Path,
|
||||
exp: int | float = None,
|
||||
hour: int = None,
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
try:
|
||||
data = json.loads(file.read_text(encoding="utf-8"))
|
||||
data: dict[str, dict[str, str | float]] = json.loads(
|
||||
file.read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
if exp:
|
||||
return {
|
||||
k: v
|
||||
for k, v in data.items()
|
||||
if now.timestamp() - v.get("timestamp", 0) < exp
|
||||
}
|
||||
|
||||
elif hour:
|
||||
return {} if now.hour <= hour else data
|
||||
return {
|
||||
k: v
|
||||
for k, v in data.items()
|
||||
if now.timestamp() - v.get("timestamp", 0) < exp
|
||||
}
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return {}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue