e
This commit is contained in:
parent
00000d98b2
commit
00000d9449
5 changed files with 17 additions and 18 deletions
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"success": true,
|
"success": true,
|
||||||
"timestamp": 1757200717,
|
"timestamp": 1756995060,
|
||||||
"READ_ME": "Interested in using our API? Contact us for more information.",
|
"READ_ME": "Interested in using our API? Contact us for more information.",
|
||||||
"performance": 0.013457059860229492,
|
"performance": 0.013457059860229492,
|
||||||
"streams": [
|
"streams": [
|
||||||
|
|
|
||||||
|
|
@ -213,11 +213,11 @@ async def get_events(
|
||||||
) -> list[dict[str, str]]:
|
) -> list[dict[str, str]]:
|
||||||
|
|
||||||
events: list[dict[str, str]] = []
|
events: list[dict[str, str]] = []
|
||||||
pub_date_format = "%a, %d %b %Y %H:%M:%S %z"
|
|
||||||
|
|
||||||
window_start, window_end = now - timedelta(hours=1), now + timedelta(minutes=30)
|
window_start, window_end = now - timedelta(hours=1), now + timedelta(minutes=30)
|
||||||
|
|
||||||
if buffer := await fetch_xml_stream(url, ssl_ctx):
|
if buffer := await fetch_xml_stream(url, ssl_ctx):
|
||||||
|
pub_date_format = "%a, %d %b %Y %H:%M:%S %z"
|
||||||
|
|
||||||
for _, elem in ET.iterparse(buffer, events=("end",)):
|
for _, elem in ET.iterparse(buffer, events=("end",)):
|
||||||
if elem.tag == "item":
|
if elem.tag == "item":
|
||||||
title = elem.findtext("title")
|
title = elem.findtext("title")
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ from urllib.parse import urljoin
|
||||||
import httpx
|
import httpx
|
||||||
from selectolax.parser import HTMLParser
|
from selectolax.parser import HTMLParser
|
||||||
|
|
||||||
from .utils import get_logger, load_cache, safe_process_event
|
from .utils import get_logger, load_cache, now, safe_process_event
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
@ -79,7 +79,7 @@ async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
|
||||||
|
|
||||||
|
|
||||||
async def main(client: httpx.AsyncClient) -> None:
|
async def main(client: httpx.AsyncClient) -> None:
|
||||||
if cached := load_cache(CACHE_FILE, hour=12):
|
if cached := load_cache(CACHE_FILE, exp=86400):
|
||||||
urls.update(cached)
|
urls.update(cached)
|
||||||
log.info(f"Collected {len(urls)} event(s) from cache")
|
log.info(f"Collected {len(urls)} event(s) from cache")
|
||||||
return
|
return
|
||||||
|
|
@ -101,6 +101,7 @@ async def main(client: httpx.AsyncClient) -> None:
|
||||||
entry = {
|
entry = {
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": ev["logo"],
|
"logo": ev["logo"],
|
||||||
|
"timestamp": now.timestamp(),
|
||||||
}
|
}
|
||||||
|
|
||||||
urls[f"[{ev['sport']}] {ev['event']}"] = entry
|
urls[f"[{ev['sport']}] {ev['event']}"] = entry
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ from pathlib import Path
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
from .utils import LOGOS, get_logger, load_cache
|
from .utils import LOGOS, get_logger, load_cache, now
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
@ -27,7 +27,7 @@ async def fetch_m3u8(client: httpx.AsyncClient) -> list[str]:
|
||||||
|
|
||||||
|
|
||||||
async def main(client: httpx.AsyncClient) -> None:
|
async def main(client: httpx.AsyncClient) -> None:
|
||||||
if cached := load_cache(CACHE_FILE, hour=12):
|
if cached := load_cache(CACHE_FILE, exp=86400):
|
||||||
urls.update(cached)
|
urls.update(cached)
|
||||||
log.info(f"Collected {len(urls)} event(s) from cache")
|
log.info(f"Collected {len(urls)} event(s) from cache")
|
||||||
return
|
return
|
||||||
|
|
@ -57,6 +57,7 @@ async def main(client: httpx.AsyncClient) -> None:
|
||||||
sport,
|
sport,
|
||||||
"https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png",
|
"https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png",
|
||||||
),
|
),
|
||||||
|
"timestamp": now.timestamp(),
|
||||||
}
|
}
|
||||||
|
|
||||||
CACHE_FILE.write_text(json.dumps(urls, indent=2), encoding="utf-8")
|
CACHE_FILE.write_text(json.dumps(urls, indent=2), encoding="utf-8")
|
||||||
|
|
|
||||||
|
|
@ -71,20 +71,17 @@ def get_logger(name: str | None = None) -> logging.Logger:
|
||||||
def load_cache(
|
def load_cache(
|
||||||
file: Path,
|
file: Path,
|
||||||
exp: int | float = None,
|
exp: int | float = None,
|
||||||
hour: int = None,
|
|
||||||
) -> dict[str, dict[str, str | float]]:
|
) -> dict[str, dict[str, str | float]]:
|
||||||
try:
|
try:
|
||||||
data = json.loads(file.read_text(encoding="utf-8"))
|
data: dict[str, dict[str, str | float]] = json.loads(
|
||||||
|
file.read_text(encoding="utf-8")
|
||||||
|
)
|
||||||
|
|
||||||
if exp:
|
return {
|
||||||
return {
|
k: v
|
||||||
k: v
|
for k, v in data.items()
|
||||||
for k, v in data.items()
|
if now.timestamp() - v.get("timestamp", 0) < exp
|
||||||
if now.timestamp() - v.get("timestamp", 0) < exp
|
}
|
||||||
}
|
|
||||||
|
|
||||||
elif hour:
|
|
||||||
return {} if now.hour <= hour else data
|
|
||||||
except (FileNotFoundError, json.JSONDecodeError):
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue