bugfix
This commit is contained in:
doms9 2026-04-17 11:57:54 -04:00
parent 535ff99142
commit 00000d95a2
8 changed files with 8 additions and 8 deletions

View file

@ -89,7 +89,7 @@ async def get_events() -> list[dict[str, str]]:
async def scrape() -> None:
if cached_urls := CACHE_FILE.load():
urls.update(cached_urls)
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
log.info(f"Loaded {len(urls)} event(s) from cache")

View file

@ -87,7 +87,7 @@ async def get_events() -> list[dict[str, str]]:
async def scrape() -> None:
if cached_urls := CACHE_FILE.load():
urls.update(cached_urls)
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
log.info(f"Loaded {len(urls)} event(s) from cache")

View file

@ -115,7 +115,7 @@ async def get_events() -> list[dict[str, str]]:
async def scrape(browser: Browser) -> None:
if cached_urls := CACHE_FILE.load():
urls.update(cached_urls)
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
log.info(f"Loaded {len(urls)} event(s) from cache")

View file

@ -90,7 +90,7 @@ async def get_events() -> dict[str, dict[str, str | float]]:
async def scrape() -> None:
if cached_urls := CACHE_FILE.load():
urls.update(cached_urls)
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
log.info(f"Loaded {len(urls)} event(s) from cache")

View file

@ -96,7 +96,7 @@ async def get_events() -> list[dict[str, str]]:
async def scrape() -> None:
if cached_urls := CACHE_FILE.load():
urls.update(cached_urls)
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
log.info(f"Loaded {len(urls)} event(s) from cache")

View file

@ -83,7 +83,7 @@ async def get_events() -> list[dict[str, str]]:
async def scrape() -> None:
if cached_urls := CACHE_FILE.load():
urls.update(cached_urls)
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
log.info(f"Loaded {len(urls)} event(s) from cache")

View file

@ -75,7 +75,7 @@ async def get_events() -> list[dict[str, str]]:
async def scrape() -> None:
if cached_urls := CACHE_FILE.load():
urls.update(cached_urls)
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
log.info(f"Loaded {len(urls)} event(s) from cache")

View file

@ -133,7 +133,7 @@ async def get_events() -> list[dict[str, str]]:
async def scrape() -> None:
if cached_urls := CACHE_FILE.load():
urls.update(cached_urls)
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
log.info(f"Loaded {len(urls)} event(s) from cache")