mirror of
https://github.com/doms9/iptv.git
synced 2026-04-21 19:46:59 +02:00
e
- change caching process for select sites
This commit is contained in:
parent
6b209de1d8
commit
00000d9ef7
9 changed files with 138 additions and 212 deletions
|
|
@ -91,7 +91,7 @@ async def process_event(
|
|||
return data.get("url")
|
||||
|
||||
|
||||
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||
async def get_events() -> list[dict[str, str]]:
|
||||
tasks = [network.request(url, log=log) for url in BASE_URLS.values()]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
|
@ -120,15 +120,10 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
if not (href := vs_node.attributes.get("href")):
|
||||
continue
|
||||
|
||||
event = fix_event(event_name)
|
||||
|
||||
if f"[{sport}] {event} ({TAG})" in cached_keys:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": event,
|
||||
"event": fix_event(event_name),
|
||||
"link": href,
|
||||
}
|
||||
)
|
||||
|
|
@ -137,20 +132,17 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
|
||||
|
||||
async def scrape() -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
if cached_urls := CACHE_FILE.load():
|
||||
urls.update(cached_urls)
|
||||
|
||||
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
|
||||
valid_count = cached_count = len(valid_urls)
|
||||
|
||||
urls.update(valid_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"')
|
||||
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
if events := await get_events():
|
||||
log.info(f"Processing {len(events)} URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
|
|
@ -187,13 +179,11 @@ async def scrape() -> None:
|
|||
cached_urls[key] = entry
|
||||
|
||||
if url:
|
||||
valid_count += 1
|
||||
|
||||
urls[key] = entry
|
||||
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
log.info(f"Collected and cached {len(urls)} event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
log.info("No events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue