mirror of
https://github.com/doms9/iptv.git
synced 2026-03-07 11:18:25 +01:00
e
This commit is contained in:
parent
00000d99d6
commit
00000d9a1a
23 changed files with 44 additions and 108 deletions
|
|
@ -97,9 +97,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info(f'Scraping from "{API_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
async with network.event_context(browser) as context:
|
||||
|
|
@ -142,8 +140,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -87,9 +87,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
async with network.event_context(browser) as context:
|
||||
|
|
@ -132,8 +130,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -99,9 +99,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_hrefs)
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_hrefs):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
|
@ -143,8 +141,7 @@ async def scrape() -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -112,9 +112,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
|
@ -155,8 +153,7 @@ async def scrape() -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -204,9 +204,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info('Scraping from "https://livetv.sx/enx/"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
async with network.event_context(browser, ignore_https=True) as context:
|
||||
|
|
@ -254,8 +252,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -96,9 +96,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
|
@ -139,8 +137,7 @@ async def scrape() -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -99,9 +99,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
|
@ -142,8 +140,7 @@ async def scrape() -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -97,6 +97,6 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls.update(events or {})
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
|
||||
log.info(f"Collected and cached {len(urls)} new event(s)")
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
|
|
|
|||
|
|
@ -106,9 +106,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info(f'Scraping from "{api_url}"')
|
||||
|
||||
events = await get_events(api_url, cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(api_url, cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
async with network.event_context(browser, stealth=False) as context:
|
||||
|
|
@ -159,8 +157,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -199,9 +199,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
async with network.event_context(browser) as context:
|
||||
|
|
@ -247,8 +245,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -124,9 +124,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
|
|
@ -165,8 +163,7 @@ async def scrape() -> None:
|
|||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -99,9 +99,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
|
@ -142,8 +140,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -103,9 +103,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events()
|
||||
|
||||
if events:
|
||||
if events := await get_events():
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
|
|
|||
|
|
@ -102,9 +102,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info('Scraping from "https://streamcenter.xyz"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
async with network.event_context(browser) as context:
|
||||
|
|
@ -153,8 +151,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -143,9 +143,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
async with network.event_context(browser, stealth=False) as context:
|
||||
|
|
@ -194,8 +192,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -133,9 +133,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
async with network.event_context(browser, stealth=False) as context:
|
||||
|
|
@ -184,8 +182,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -117,9 +117,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
async with network.event_context(browser, stealth=False) as context:
|
||||
|
|
@ -168,8 +166,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -145,9 +145,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
|
@ -188,8 +186,7 @@ async def scrape() -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -85,9 +85,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events()
|
||||
|
||||
if events:
|
||||
if events := await get_events():
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
|
|
|||
|
|
@ -124,9 +124,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
|
@ -167,8 +165,7 @@ async def scrape() -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -250,9 +250,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(base_url, cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(base_url, cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
async with network.event_context(browser, stealth=False) as context:
|
||||
|
|
@ -302,8 +300,7 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -124,9 +124,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
|
@ -167,8 +165,7 @@ async def scrape() -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
|
|
@ -133,9 +133,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
if events:
|
||||
if events := await get_events(cached_urls.keys()):
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
|
@ -176,8 +174,7 @@ async def scrape() -> None:
|
|||
|
||||
urls[key] = entry
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
|
||||
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue