diff --git a/M3U8/scrapers/cdnlivetv.py b/M3U8/scrapers/cdnlivetv.py index 1d72e844..01d70241 100644 --- a/M3U8/scrapers/cdnlivetv.py +++ b/M3U8/scrapers/cdnlivetv.py @@ -97,9 +97,7 @@ async def scrape(browser: Browser) -> None: log.info(f'Scraping from "{API_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") async with network.event_context(browser) as context: @@ -142,8 +140,7 @@ async def scrape(browser: Browser) -> None: urls[key] = cached_urls[key] = entry - if new_count := len(cached_urls) - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/embedhd.py b/M3U8/scrapers/embedhd.py index f10a94bf..f38f46a2 100644 --- a/M3U8/scrapers/embedhd.py +++ b/M3U8/scrapers/embedhd.py @@ -87,9 +87,7 @@ async def scrape(browser: Browser) -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") async with network.event_context(browser) as context: @@ -132,8 +130,7 @@ async def scrape(browser: Browser) -> None: urls[key] = cached_urls[key] = entry - if new_count := len(cached_urls) - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/fawa.py b/M3U8/scrapers/fawa.py index 096f63e7..43512a4a 100644 --- a/M3U8/scrapers/fawa.py +++ b/M3U8/scrapers/fawa.py @@ -99,9 +99,7 @@ async def scrape() -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_hrefs) - - if events: + if events := await get_events(cached_hrefs): log.info(f"Processing {len(events)} new URL(s)") now = Time.clean(Time.now()) @@ -143,8 +141,7 @@ async def scrape() -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/istreameast.py b/M3U8/scrapers/istreameast.py index 1bc4e2fe..89d9049c 100644 --- a/M3U8/scrapers/istreameast.py +++ b/M3U8/scrapers/istreameast.py @@ -112,9 +112,7 @@ async def scrape() -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") now = Time.clean(Time.now()) @@ -155,8 +153,7 @@ async def scrape() -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/livetvsx.py b/M3U8/scrapers/livetvsx.py index e6ed5fc1..96d1d372 100644 --- a/M3U8/scrapers/livetvsx.py +++ b/M3U8/scrapers/livetvsx.py @@ -204,9 +204,7 @@ async def scrape(browser: Browser) -> None: log.info('Scraping from "https://livetv.sx/enx/"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") async with network.event_context(browser, ignore_https=True) as context: @@ -254,8 +252,7 @@ async def scrape(browser: Browser) -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/ovogoal.py b/M3U8/scrapers/ovogoal.py index fae771b1..02c7f14d 100644 --- a/M3U8/scrapers/ovogoal.py +++ b/M3U8/scrapers/ovogoal.py @@ -96,9 +96,7 @@ async def scrape() -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") now = Time.clean(Time.now()) @@ -139,8 +137,7 @@ async def scrape() -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/pawa.py b/M3U8/scrapers/pawa.py index 47fbb601..5436e9e3 100644 --- a/M3U8/scrapers/pawa.py +++ b/M3U8/scrapers/pawa.py @@ -99,9 +99,7 @@ async def scrape() -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") now = Time.clean(Time.now()) @@ -142,8 +140,7 @@ async def scrape() -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/pixel.py b/M3U8/scrapers/pixel.py index a11dc80e..560cad64 100644 --- a/M3U8/scrapers/pixel.py +++ b/M3U8/scrapers/pixel.py @@ -97,6 +97,6 @@ async def scrape(browser: Browser) -> None: urls.update(events or {}) - CACHE_FILE.write(urls) - log.info(f"Collected and cached {len(urls)} new event(s)") + + CACHE_FILE.write(urls) diff --git a/M3U8/scrapers/ppv.py b/M3U8/scrapers/ppv.py index 17822a3d..bd159c6c 100644 --- a/M3U8/scrapers/ppv.py +++ b/M3U8/scrapers/ppv.py @@ -106,9 +106,7 @@ async def scrape(browser: Browser) -> None: log.info(f'Scraping from "{api_url}"') - events = await get_events(api_url, cached_urls.keys()) - - if events: + if events := await get_events(api_url, cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") async with network.event_context(browser, stealth=False) as context: @@ -159,8 +157,7 @@ async def scrape(browser: Browser) -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/roxie.py b/M3U8/scrapers/roxie.py index 3e00cd24..f1af53b6 100644 --- a/M3U8/scrapers/roxie.py +++ b/M3U8/scrapers/roxie.py @@ -199,9 +199,7 @@ async def scrape(browser: Browser) -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") async with network.event_context(browser) as context: @@ -247,8 +245,7 @@ async def scrape(browser: Browser) -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/shark.py b/M3U8/scrapers/shark.py index 55214eb6..3c147baa 100644 --- a/M3U8/scrapers/shark.py +++ b/M3U8/scrapers/shark.py @@ -124,9 +124,7 @@ async def scrape() -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") for i, ev in enumerate(events, start=1): @@ -165,8 +163,7 @@ async def scrape() -> None: urls[key] = cached_urls[key] = entry - if new_count := len(cached_urls) - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/sport9.py b/M3U8/scrapers/sport9.py index 2098e3ed..c48579b6 100644 --- a/M3U8/scrapers/sport9.py +++ b/M3U8/scrapers/sport9.py @@ -99,9 +99,7 @@ async def scrape(browser: Browser) -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") now = Time.clean(Time.now()) @@ -142,8 +140,7 @@ async def scrape(browser: Browser) -> None: urls[key] = cached_urls[key] = entry - if new_count := len(cached_urls) - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/streambtw.py b/M3U8/scrapers/streambtw.py index 3d63cb66..75a9a219 100644 --- a/M3U8/scrapers/streambtw.py +++ b/M3U8/scrapers/streambtw.py @@ -103,9 +103,7 @@ async def scrape() -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events() - - if events: + if events := await get_events(): log.info(f"Processing {len(events)} new URL(s)") now = Time.clean(Time.now()) diff --git a/M3U8/scrapers/streamcenter.py b/M3U8/scrapers/streamcenter.py index 9b2f94d6..0d635a8b 100644 --- a/M3U8/scrapers/streamcenter.py +++ b/M3U8/scrapers/streamcenter.py @@ -102,9 +102,7 @@ async def scrape(browser: Browser) -> None: log.info('Scraping from "https://streamcenter.xyz"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") async with network.event_context(browser) as context: @@ -153,8 +151,7 @@ async def scrape(browser: Browser) -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/streamhub.py b/M3U8/scrapers/streamhub.py index eb285465..61e5f704 100644 --- a/M3U8/scrapers/streamhub.py +++ b/M3U8/scrapers/streamhub.py @@ -143,9 +143,7 @@ async def scrape(browser: Browser) -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") async with network.event_context(browser, stealth=False) as context: @@ -194,8 +192,7 @@ async def scrape(browser: Browser) -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/streamsgate.py b/M3U8/scrapers/streamsgate.py index c6dc2c5e..d74fd3ec 100644 --- a/M3U8/scrapers/streamsgate.py +++ b/M3U8/scrapers/streamsgate.py @@ -133,9 +133,7 @@ async def scrape(browser: Browser) -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") async with network.event_context(browser, stealth=False) as context: @@ -184,8 +182,7 @@ async def scrape(browser: Browser) -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/timstreams.py b/M3U8/scrapers/timstreams.py index 2c918661..5197c8cc 100644 --- a/M3U8/scrapers/timstreams.py +++ b/M3U8/scrapers/timstreams.py @@ -117,9 +117,7 @@ async def scrape(browser: Browser) -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") async with network.event_context(browser, stealth=False) as context: @@ -168,8 +166,7 @@ async def scrape(browser: Browser) -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/totalsportek.py b/M3U8/scrapers/totalsportek.py index fa2d92cf..fdbb5bac 100644 --- a/M3U8/scrapers/totalsportek.py +++ b/M3U8/scrapers/totalsportek.py @@ -145,9 +145,7 @@ async def scrape() -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") now = Time.clean(Time.now()) @@ -188,8 +186,7 @@ async def scrape() -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/tvapp.py b/M3U8/scrapers/tvapp.py index 469d18f5..ec753335 100644 --- a/M3U8/scrapers/tvapp.py +++ b/M3U8/scrapers/tvapp.py @@ -85,9 +85,7 @@ async def scrape() -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events() - - if events: + if events := await get_events(): log.info(f"Processing {len(events)} new URL(s)") now = Time.clean(Time.now()) diff --git a/M3U8/scrapers/volokit.py b/M3U8/scrapers/volokit.py index 11cbfe8e..9eb1f810 100644 --- a/M3U8/scrapers/volokit.py +++ b/M3U8/scrapers/volokit.py @@ -124,9 +124,7 @@ async def scrape() -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") now = Time.clean(Time.now()) @@ -167,8 +165,7 @@ async def scrape() -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/watchfooty.py b/M3U8/scrapers/watchfooty.py index c48f3e5e..0a93b3ce 100644 --- a/M3U8/scrapers/watchfooty.py +++ b/M3U8/scrapers/watchfooty.py @@ -250,9 +250,7 @@ async def scrape(browser: Browser) -> None: log.info(f'Scraping from "{base_url}"') - events = await get_events(base_url, cached_urls.keys()) - - if events: + if events := await get_events(base_url, cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") async with network.event_context(browser, stealth=False) as context: @@ -302,8 +300,7 @@ async def scrape(browser: Browser) -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/webcast.py b/M3U8/scrapers/webcast.py index b4fdfe8f..5bf24beb 100644 --- a/M3U8/scrapers/webcast.py +++ b/M3U8/scrapers/webcast.py @@ -124,9 +124,7 @@ async def scrape() -> None: log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") now = Time.clean(Time.now()) @@ -167,8 +165,7 @@ async def scrape() -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found") diff --git a/M3U8/scrapers/xstreameast.py b/M3U8/scrapers/xstreameast.py index c18381dd..9bb420f9 100644 --- a/M3U8/scrapers/xstreameast.py +++ b/M3U8/scrapers/xstreameast.py @@ -133,9 +133,7 @@ async def scrape() -> None: log.info(f'Scraping from "{BASE_URL}"') - events = await get_events(cached_urls.keys()) - - if events: + if events := await get_events(cached_urls.keys()): log.info(f"Processing {len(events)} new URL(s)") now = Time.clean(Time.now()) @@ -176,8 +174,7 @@ async def scrape() -> None: urls[key] = entry - if new_count := valid_count - cached_count: - log.info(f"Collected and cached {new_count} new event(s)") + log.info(f"Collected and cached {valid_count - cached_count} new event(s)") else: log.info("No new events found")