mirror of
https://github.com/doms9/iptv.git
synced 2026-04-22 19:57:00 +02:00
e
bugfix
This commit is contained in:
parent
535ff99142
commit
00000d95a2
8 changed files with 8 additions and 8 deletions
|
|
@ -89,7 +89,7 @@ async def get_events() -> list[dict[str, str]]:
|
|||
|
||||
async def scrape() -> None:
|
||||
if cached_urls := CACHE_FILE.load():
|
||||
urls.update(cached_urls)
|
||||
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
|
||||
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ async def get_events() -> list[dict[str, str]]:
|
|||
|
||||
async def scrape() -> None:
|
||||
if cached_urls := CACHE_FILE.load():
|
||||
urls.update(cached_urls)
|
||||
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
|
||||
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ async def get_events() -> list[dict[str, str]]:
|
|||
|
||||
async def scrape(browser: Browser) -> None:
|
||||
if cached_urls := CACHE_FILE.load():
|
||||
urls.update(cached_urls)
|
||||
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
|
||||
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ async def get_events() -> dict[str, dict[str, str | float]]:
|
|||
|
||||
async def scrape() -> None:
|
||||
if cached_urls := CACHE_FILE.load():
|
||||
urls.update(cached_urls)
|
||||
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
|
||||
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ async def get_events() -> list[dict[str, str]]:
|
|||
|
||||
async def scrape() -> None:
|
||||
if cached_urls := CACHE_FILE.load():
|
||||
urls.update(cached_urls)
|
||||
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
|
||||
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ async def get_events() -> list[dict[str, str]]:
|
|||
|
||||
async def scrape() -> None:
|
||||
if cached_urls := CACHE_FILE.load():
|
||||
urls.update(cached_urls)
|
||||
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
|
||||
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ async def get_events() -> list[dict[str, str]]:
|
|||
|
||||
async def scrape() -> None:
|
||||
if cached_urls := CACHE_FILE.load():
|
||||
urls.update(cached_urls)
|
||||
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
|
||||
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ async def get_events() -> list[dict[str, str]]:
|
|||
|
||||
async def scrape() -> None:
|
||||
if cached_urls := CACHE_FILE.load():
|
||||
urls.update(cached_urls)
|
||||
urls.update({k: v for k, v in cached_urls.items() if v["url"]})
|
||||
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue