e
This commit is contained in:
parent
7b761c7189
commit
00000d9fe2
6 changed files with 222 additions and 56 deletions
|
|
@ -11,15 +11,10 @@ urls: dict[str, dict[str, str | float]] = {}
|
|||
|
||||
CACHE_FILE = Cache("pixel.json", exp=86_400)
|
||||
|
||||
API_FILE = Cache("pixel-api.json", exp=86_400)
|
||||
|
||||
BASE_URL = "https://pixelsport.tv/backend/livetv/events"
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
url: str,
|
||||
ts: float,
|
||||
) -> dict[str, list[dict, str, str]]:
|
||||
async def get_api_data(url: str) -> dict[str, list[dict, str, str]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
async with async_playwright() as p:
|
||||
|
|
@ -44,23 +39,13 @@ async def refresh_api_cache(
|
|||
finally:
|
||||
await browser.close()
|
||||
|
||||
data = json.loads(raw_json)
|
||||
|
||||
data["timestamp"] = ts
|
||||
|
||||
return data
|
||||
return json.loads(raw_json)
|
||||
|
||||
|
||||
async def get_events(cached_keys: set[str]) -> dict[str, str | float]:
|
||||
async def get_events() -> dict[str, dict[str, str | float]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False)):
|
||||
api_data = await refresh_api_cache(
|
||||
BASE_URL,
|
||||
now.timestamp(),
|
||||
)
|
||||
|
||||
API_FILE.write(api_data)
|
||||
api_data = await get_api_data(BASE_URL)
|
||||
|
||||
events = {}
|
||||
|
||||
|
|
@ -88,9 +73,6 @@ async def get_events(cached_keys: set[str]) -> dict[str, str | float]:
|
|||
if pattern.search(stream_link):
|
||||
key = f"[{sport}] {event_name} {z} (PIXL)"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event_name)
|
||||
|
||||
events[key] = {
|
||||
|
|
@ -105,23 +87,17 @@ async def get_events(cached_keys: set[str]) -> dict[str, str | float]:
|
|||
|
||||
|
||||
async def scrape() -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(set(cached_urls.keys()))
|
||||
events = await get_events()
|
||||
|
||||
if events:
|
||||
for d in (urls, cached_urls):
|
||||
d |= events
|
||||
urls.update(events)
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
CACHE_FILE.write(urls)
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
log.info(f"Collected and cached {len(urls)} new event(s)")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue