This commit is contained in:
doms9 2025-09-05 15:56:07 -04:00
parent 795d9a1c17
commit 00000d93ef
3 changed files with 15 additions and 7 deletions

View file

@ -269,12 +269,13 @@ async def main(client: httpx.AsyncClient) -> None:
cached_urls = load_cache(CACHE_FILE, exp=14400) cached_urls = load_cache(CACHE_FILE, exp=14400)
cached_count = len(cached_urls) cached_count = len(cached_urls)
urls.update(cached_urls)
log.info(f"Collected {cached_count} event(s) from cache") log.info(f"Collected {cached_count} event(s) from cache")
events = await get_events(BASE_URL, cert, set(cached_urls.keys())) events = await get_events(BASE_URL, cert, set(cached_urls.keys()))
log.info(f"Processing {len(events)} new URLs") log.info(f"Processing {len(events)} new URL(s)")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
sport = ev["sport"] sport = ev["sport"]
@ -302,6 +303,9 @@ async def main(client: httpx.AsyncClient) -> None:
urls[key] = cached_urls[key] = entry urls[key] = cached_urls[key] = entry
if (new_count := len(cached_urls) - cached_count) > 0:
CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8") CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8")
log.info(f"Collected {len(cached_urls) - cached_count} new event(s)") log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -181,6 +181,7 @@ async def main(client: httpx.AsyncClient) -> None:
cached_urls = load_cache(CACHE_FILE, exp=14400) cached_urls = load_cache(CACHE_FILE, exp=14400)
cached_count = len(cached_urls) cached_count = len(cached_urls)
urls.update(cached_urls)
log.info(f"Collected {cached_count} event(s) from cache") log.info(f"Collected {cached_count} event(s) from cache")
@ -190,7 +191,7 @@ async def main(client: httpx.AsyncClient) -> None:
set(cached_urls.keys()), set(cached_urls.keys()),
) )
log.info(f"Processing {len(events)} new URLs") log.info(f"Processing {len(events)} new URL(s)")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
url = await safe_process_event( url = await safe_process_event(
@ -210,9 +211,12 @@ async def main(client: httpx.AsyncClient) -> None:
urls[key] = cached_urls[key] = entry urls[key] = cached_urls[key] = entry
if (new_count := len(cached_urls) - cached_count) > 0:
CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8") CACHE_FILE.write_text(json.dumps(cached_urls, indent=2), encoding="utf-8")
log.info(f"Collected {len(cached_urls) - cached_count} new event(s)") log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")
# works if no cloudflare bot detection # works if no cloudflare bot detection

View file

@ -88,7 +88,7 @@ async def main(client: httpx.AsyncClient) -> None:
events = await get_events(client) events = await get_events(client)
log.info(f"Processing {len(events)} new URLs") log.info(f"Processing {len(events)} new URL(s)")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
url = await safe_process_event( url = await safe_process_event(