e
This commit is contained in:
parent
00000d90bc
commit
00000d9c6a
3 changed files with 25 additions and 16 deletions
|
|
@ -164,7 +164,7 @@ async def get_events(
|
|||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": urljoin(base_url, f"/live/{uri_name}"),
|
||||
"link": urljoin(base_url, f"live/{uri_name}"),
|
||||
"logo": logo,
|
||||
}
|
||||
)
|
||||
|
|
@ -173,21 +173,21 @@ async def get_events(
|
|||
|
||||
|
||||
async def main(client: httpx.AsyncClient) -> None:
|
||||
if not (base_url := await get_base(client, MIRRORS)):
|
||||
log.warning("No working PPV mirrors")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
cached_urls = load_cache(CACHE_FILE, exp=14400)
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Collected {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await get_base(client, MIRRORS)):
|
||||
log.warning("No working PPV mirrors")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
urljoin(base_url, "/api/streams"),
|
||||
urljoin(base_url, "api/streams"),
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue