This commit is contained in:
doms9 2026-03-02 00:50:28 -05:00
parent 00000d99d6
commit 00000d9a1a
23 changed files with 44 additions and 108 deletions

View file

@ -97,9 +97,7 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{API_URL}"') log.info(f'Scraping from "{API_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser) as context: async with network.event_context(browser) as context:
@ -142,8 +140,7 @@ async def scrape(browser: Browser) -> None:
urls[key] = cached_urls[key] = entry urls[key] = cached_urls[key] = entry
if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -87,9 +87,7 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser) as context: async with network.event_context(browser) as context:
@ -132,8 +130,7 @@ async def scrape(browser: Browser) -> None:
urls[key] = cached_urls[key] = entry urls[key] = cached_urls[key] = entry
if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -99,9 +99,7 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_hrefs) if events := await get_events(cached_hrefs):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now()) now = Time.clean(Time.now())
@ -143,8 +141,7 @@ async def scrape() -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -112,9 +112,7 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now()) now = Time.clean(Time.now())
@ -155,8 +153,7 @@ async def scrape() -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -204,9 +204,7 @@ async def scrape(browser: Browser) -> None:
log.info('Scraping from "https://livetv.sx/enx/"') log.info('Scraping from "https://livetv.sx/enx/"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, ignore_https=True) as context: async with network.event_context(browser, ignore_https=True) as context:
@ -254,8 +252,7 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -96,9 +96,7 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now()) now = Time.clean(Time.now())
@ -139,8 +137,7 @@ async def scrape() -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -99,9 +99,7 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now()) now = Time.clean(Time.now())
@ -142,8 +140,7 @@ async def scrape() -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -97,6 +97,6 @@ async def scrape(browser: Browser) -> None:
urls.update(events or {}) urls.update(events or {})
CACHE_FILE.write(urls)
log.info(f"Collected and cached {len(urls)} new event(s)") log.info(f"Collected and cached {len(urls)} new event(s)")
CACHE_FILE.write(urls)

View file

@ -106,9 +106,7 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{api_url}"') log.info(f'Scraping from "{api_url}"')
events = await get_events(api_url, cached_urls.keys()) if events := await get_events(api_url, cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, stealth=False) as context: async with network.event_context(browser, stealth=False) as context:
@ -159,8 +157,7 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -199,9 +199,7 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser) as context: async with network.event_context(browser) as context:
@ -247,8 +245,7 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -124,9 +124,7 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
@ -165,8 +163,7 @@ async def scrape() -> None:
urls[key] = cached_urls[key] = entry urls[key] = cached_urls[key] = entry
if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -99,9 +99,7 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now()) now = Time.clean(Time.now())
@ -142,8 +140,7 @@ async def scrape(browser: Browser) -> None:
urls[key] = cached_urls[key] = entry urls[key] = cached_urls[key] = entry
if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -103,9 +103,7 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events() if events := await get_events():
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now()) now = Time.clean(Time.now())

View file

@ -102,9 +102,7 @@ async def scrape(browser: Browser) -> None:
log.info('Scraping from "https://streamcenter.xyz"') log.info('Scraping from "https://streamcenter.xyz"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser) as context: async with network.event_context(browser) as context:
@ -153,8 +151,7 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -143,9 +143,7 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, stealth=False) as context: async with network.event_context(browser, stealth=False) as context:
@ -194,8 +192,7 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -133,9 +133,7 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, stealth=False) as context: async with network.event_context(browser, stealth=False) as context:
@ -184,8 +182,7 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -117,9 +117,7 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, stealth=False) as context: async with network.event_context(browser, stealth=False) as context:
@ -168,8 +166,7 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -145,9 +145,7 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now()) now = Time.clean(Time.now())
@ -188,8 +186,7 @@ async def scrape() -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -85,9 +85,7 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events() if events := await get_events():
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now()) now = Time.clean(Time.now())

View file

@ -124,9 +124,7 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now()) now = Time.clean(Time.now())
@ -167,8 +165,7 @@ async def scrape() -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -250,9 +250,7 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{base_url}"') log.info(f'Scraping from "{base_url}"')
events = await get_events(base_url, cached_urls.keys()) if events := await get_events(base_url, cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, stealth=False) as context: async with network.event_context(browser, stealth=False) as context:
@ -302,8 +300,7 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -124,9 +124,7 @@ async def scrape() -> None:
log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"') log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now()) now = Time.clean(Time.now())
@ -167,8 +165,7 @@ async def scrape() -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")

View file

@ -133,9 +133,7 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) if events := await get_events(cached_urls.keys()):
if events:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now()) now = Time.clean(Time.now())
@ -176,8 +174,7 @@ async def scrape() -> None:
urls[key] = entry urls[key] = entry
if new_count := valid_count - cached_count: log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
log.info(f"Collected and cached {new_count} new event(s)")
else: else:
log.info("No new events found") log.info("No new events found")