mirror of
https://github.com/doms9/iptv.git
synced 2026-01-21 03:59:03 +01:00
e
This commit is contained in:
parent
8b2d8cc1fc
commit
00000d939c
20 changed files with 119 additions and 12 deletions
|
|
@ -41,9 +41,11 @@ async def process_event(url: str, url_num: int) -> str | None:
|
|||
|
||||
if not (match := valid_m3u8.search(html_data.text)):
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
|
||||
return
|
||||
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
|
||||
return match[1]
|
||||
|
||||
|
||||
|
|
@ -133,7 +135,9 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
|
||||
async def scrape() -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
|
||||
cached_count = len(cached_urls)
|
||||
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
|
@ -183,6 +187,7 @@ async def scrape() -> None:
|
|||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue