e
This commit is contained in:
parent
cf93e1326b
commit
00000d98b8
2 changed files with 10 additions and 11 deletions
|
|
@ -13,7 +13,7 @@ from .utils import LOGOS, TZ, get_logger, now, safe_process_event
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
urls: dict[str, dict[str, str]] = {}
|
urls: dict[str, dict[str, str | float]] = {}
|
||||||
|
|
||||||
BASE_URL = "https://cdn.livetv861.me/rss/upcoming_en.xml"
|
BASE_URL = "https://cdn.livetv861.me/rss/upcoming_en.xml"
|
||||||
|
|
||||||
|
|
@ -28,8 +28,6 @@ CERT_FILE = Path(__file__).parent / "utils" / "cached-ca.pem"
|
||||||
|
|
||||||
CACHE_FILE = Path(__file__).parent / "caches" / "livetvsx.json"
|
CACHE_FILE = Path(__file__).parent / "caches" / "livetvsx.json"
|
||||||
|
|
||||||
exist_sprts = set(LOGOS.keys())
|
|
||||||
|
|
||||||
|
|
||||||
async def write_to_cert(client: httpx.AsyncClient, url: str, cert: Path) -> None:
|
async def write_to_cert(client: httpx.AsyncClient, url: str, cert: Path) -> None:
|
||||||
try:
|
try:
|
||||||
|
|
@ -266,9 +264,6 @@ async def get_events(
|
||||||
elem.clear()
|
elem.clear()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if exist_sprts & {sport, event}:
|
|
||||||
continue
|
|
||||||
|
|
||||||
events.append(
|
events.append(
|
||||||
{
|
{
|
||||||
"sport": sport,
|
"sport": sport,
|
||||||
|
|
@ -291,9 +286,11 @@ async def main(client: httpx.AsyncClient) -> None:
|
||||||
cached_urls = load_cache()
|
cached_urls = load_cache()
|
||||||
cached_count = len(cached_urls)
|
cached_count = len(cached_urls)
|
||||||
|
|
||||||
|
log.info(f"Collected {cached_count} event(s) from cache")
|
||||||
|
|
||||||
events = await get_events(BASE_URL, cert, set(cached_urls.keys()))
|
events = await get_events(BASE_URL, cert, set(cached_urls.keys()))
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} URLs")
|
log.info(f"Processing {len(events)} new URLs")
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
sport = ev["sport"]
|
sport = ev["sport"]
|
||||||
|
|
@ -323,4 +320,4 @@ async def main(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8")
|
CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8")
|
||||||
|
|
||||||
log.info(f"Collected {len(cached_urls) - cached_count} event(s)")
|
log.info(f"Collected {len(cached_urls) - cached_count} new event(s)")
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ from .utils import TZ, get_base, get_logger, now, safe_process_event
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
urls: dict[str, dict[str, str]] = {}
|
urls: dict[str, dict[str, str | float]] = {}
|
||||||
|
|
||||||
API_FILE = Path(__file__).parent / "caches" / "ppv_api.json"
|
API_FILE = Path(__file__).parent / "caches" / "ppv_api.json"
|
||||||
|
|
||||||
|
|
@ -189,13 +189,15 @@ async def main(client: httpx.AsyncClient) -> None:
|
||||||
cached_urls = load_cache()
|
cached_urls = load_cache()
|
||||||
cached_count = len(cached_urls)
|
cached_count = len(cached_urls)
|
||||||
|
|
||||||
|
log.info(f"Collected {cached_count} event(s) from cache")
|
||||||
|
|
||||||
events = await get_events(
|
events = await get_events(
|
||||||
client,
|
client,
|
||||||
urljoin(base_url, "/api/streams"),
|
urljoin(base_url, "/api/streams"),
|
||||||
set(cached_urls.keys()),
|
set(cached_urls.keys()),
|
||||||
)
|
)
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} URLs")
|
log.info(f"Processing {len(events)} new URLs")
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
url = await safe_process_event(
|
url = await safe_process_event(
|
||||||
|
|
@ -217,7 +219,7 @@ async def main(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8")
|
CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8")
|
||||||
|
|
||||||
log.info(f"Collected {len(cached_urls) - cached_count} event(s)")
|
log.info(f"Collected {len(cached_urls) - cached_count} new event(s)")
|
||||||
|
|
||||||
|
|
||||||
# works if no cloudflare bot detection
|
# works if no cloudflare bot detection
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue