e
This commit is contained in:
parent
00000d90bc
commit
00000d9c6a
3 changed files with 25 additions and 16 deletions
|
|
@ -38,7 +38,11 @@ CERT_FILE = Path(__file__).parent / "utils" / "cached-ca.pem"
|
||||||
CACHE_FILE = Path(__file__).parent / "caches" / "livetvsx.json"
|
CACHE_FILE = Path(__file__).parent / "caches" / "livetvsx.json"
|
||||||
|
|
||||||
|
|
||||||
async def write_to_cert(client: httpx.AsyncClient, url: str, cert: Path) -> None:
|
async def write_to_cert(
|
||||||
|
client: httpx.AsyncClient,
|
||||||
|
url: str,
|
||||||
|
cert: Path,
|
||||||
|
) -> None:
|
||||||
try:
|
try:
|
||||||
r = await client.get(url)
|
r = await client.get(url)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
@ -263,17 +267,21 @@ async def get_events(
|
||||||
|
|
||||||
|
|
||||||
async def main(client: httpx.AsyncClient) -> None:
|
async def main(client: httpx.AsyncClient) -> None:
|
||||||
log.info(f'Scraping from "{BASE_URL}"')
|
|
||||||
|
|
||||||
cert = await get_cert(client)
|
|
||||||
|
|
||||||
cached_urls = load_cache(CACHE_FILE, exp=14400)
|
cached_urls = load_cache(CACHE_FILE, exp=14400)
|
||||||
cached_count = len(cached_urls)
|
cached_count = len(cached_urls)
|
||||||
urls.update(cached_urls)
|
urls.update(cached_urls)
|
||||||
|
|
||||||
log.info(f"Collected {cached_count} event(s) from cache")
|
log.info(f"Collected {cached_count} event(s) from cache")
|
||||||
|
|
||||||
events = await get_events(BASE_URL, cert, set(cached_urls.keys()))
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
|
|
||||||
|
cert = await get_cert(client)
|
||||||
|
|
||||||
|
events = await get_events(
|
||||||
|
BASE_URL,
|
||||||
|
cert,
|
||||||
|
set(cached_urls.keys()),
|
||||||
|
)
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -164,7 +164,7 @@ async def get_events(
|
||||||
{
|
{
|
||||||
"sport": sport,
|
"sport": sport,
|
||||||
"event": name,
|
"event": name,
|
||||||
"link": urljoin(base_url, f"/live/{uri_name}"),
|
"link": urljoin(base_url, f"live/{uri_name}"),
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
@ -173,21 +173,21 @@ async def get_events(
|
||||||
|
|
||||||
|
|
||||||
async def main(client: httpx.AsyncClient) -> None:
|
async def main(client: httpx.AsyncClient) -> None:
|
||||||
if not (base_url := await get_base(client, MIRRORS)):
|
|
||||||
log.warning("No working PPV mirrors")
|
|
||||||
return
|
|
||||||
|
|
||||||
log.info(f'Scraping from "{base_url}"')
|
|
||||||
|
|
||||||
cached_urls = load_cache(CACHE_FILE, exp=14400)
|
cached_urls = load_cache(CACHE_FILE, exp=14400)
|
||||||
cached_count = len(cached_urls)
|
cached_count = len(cached_urls)
|
||||||
urls.update(cached_urls)
|
urls.update(cached_urls)
|
||||||
|
|
||||||
log.info(f"Collected {cached_count} event(s) from cache")
|
log.info(f"Collected {cached_count} event(s) from cache")
|
||||||
|
|
||||||
|
if not (base_url := await get_base(client, MIRRORS)):
|
||||||
|
log.warning("No working PPV mirrors")
|
||||||
|
return
|
||||||
|
|
||||||
|
log.info(f'Scraping from "{base_url}"')
|
||||||
|
|
||||||
events = await get_events(
|
events = await get_events(
|
||||||
client,
|
client,
|
||||||
urljoin(base_url, "/api/streams"),
|
urljoin(base_url, "api/streams"),
|
||||||
set(cached_urls.keys()),
|
set(cached_urls.keys()),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ import asyncio
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
from collections.abc import Callable
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
@ -103,9 +104,9 @@ def load_cache(
|
||||||
|
|
||||||
|
|
||||||
async def safe_process_event(
|
async def safe_process_event(
|
||||||
fn,
|
fn: Callable,
|
||||||
url_num: int,
|
url_num: int,
|
||||||
timeout=20,
|
timeout: int | float = 20,
|
||||||
log: logging.Logger | None = None,
|
log: logging.Logger | None = None,
|
||||||
) -> Any | None:
|
) -> Any | None:
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue