diff --git a/M3U8/scrapers/fawa.py b/M3U8/scrapers/fawa.py index 43512a4a..c6062dae 100644 --- a/M3U8/scrapers/fawa.py +++ b/M3U8/scrapers/fawa.py @@ -19,7 +19,7 @@ BASE_URL = "http://www.fawanews.sc/" async def process_event(url: str, url_num: int) -> str | None: if not (html_data := await network.request(url, log=log)): - log.info(f"URL {url_num}) Failed to load url.") + log.warning(f"URL {url_num}) Failed to load url.") return diff --git a/M3U8/scrapers/istreameast.py b/M3U8/scrapers/istreameast.py index 89d9049c..a90aefa8 100644 --- a/M3U8/scrapers/istreameast.py +++ b/M3U8/scrapers/istreameast.py @@ -19,7 +19,7 @@ BASE_URL = "https://istreameast.app" async def process_event(url: str, url_num: int) -> str | None: if not (event_data := await network.request(url, log=log)): - log.info(f"URL {url_num}) Failed to load url.") + log.warning(f"URL {url_num}) Failed to load url.") return @@ -36,7 +36,7 @@ async def process_event(url: str, url_num: int) -> str | None: return if not (iframe_src_data := await network.request(iframe_src, log=log)): - log.info(f"URL {url_num}) Failed to load iframe source.") + log.warning(f"URL {url_num}) Failed to load iframe source.") return diff --git a/M3U8/scrapers/livetvsx.py b/M3U8/scrapers/livetvsx.py index 5b257a8c..30301db0 100644 --- a/M3U8/scrapers/livetvsx.py +++ b/M3U8/scrapers/livetvsx.py @@ -47,8 +47,6 @@ async def process_event( page: Page, ) -> str | None: - event_id_pattern = re.compile(r"&c=(\d*)", re.I) - captured: list[str] = [] got_one = asyncio.Event() @@ -82,14 +80,7 @@ async def process_event( log.warning(f"URL {url_num}) No valid sources found.") return - if (match := event_id_pattern.search(href)) and ( - event_id := match[1] - ).isalnum(): - - event_url = f"https://aliez.tv/player/live.php?id={event_id}" - - else: - event_url = href if href.startswith("http") else f"https:{href}" + event_url = href if href.startswith("http") else f"https:{href}" await page.goto( event_url, diff --git a/M3U8/scrapers/ovogoal.py b/M3U8/scrapers/ovogoal.py index 02c7f14d..02b65e36 100644 --- a/M3U8/scrapers/ovogoal.py +++ b/M3U8/scrapers/ovogoal.py @@ -20,7 +20,7 @@ async def process_event(url: str, url_num: int) -> tuple[str | None, str | None] nones = None, None if not (html_data := await network.request(url, log=log)): - log.info(f"URL {url_num}) Failed to load url.") + log.warning(f"URL {url_num}) Failed to load url.") return nones soup = HTMLParser(html_data.content) diff --git a/M3U8/scrapers/pawa.py b/M3U8/scrapers/pawa.py index 1a90b5d3..55400e63 100644 --- a/M3U8/scrapers/pawa.py +++ b/M3U8/scrapers/pawa.py @@ -20,7 +20,7 @@ BASE_URL = "https://pawastreams.net/feed/" async def process_event(url: str, url_num: int) -> str | None: if not (event_data := await network.request(url, log=log)): - log.info(f"URL {url_num}) Failed to load url.") + log.warning(f"URL {url_num}) Failed to load url.") return @@ -37,7 +37,7 @@ async def process_event(url: str, url_num: int) -> str | None: return if not (iframe_src_data := await network.request(iframe_src, log=log)): - log.info(f"URL {url_num}) Failed to load iframe source.") + log.warning(f"URL {url_num}) Failed to load iframe source.") return diff --git a/M3U8/scrapers/shark.py b/M3U8/scrapers/shark.py index 3c147baa..3ebac049 100644 --- a/M3U8/scrapers/shark.py +++ b/M3U8/scrapers/shark.py @@ -20,14 +20,14 @@ BASE_URL = "https://sharkstreams.net" async def process_event(url: str, url_num: int) -> str | None: if not (r := await network.request(url, log=log)): - log.info(f"URL {url_num}) Failed to load url.") + log.warning(f"URL {url_num}) Failed to load url.") return data: dict[str, list[str]] = r.json() if not (urls := data.get("urls")): - log.info(f"URL {url_num}) No M3U8 found") + log.warning(f"URL {url_num}) No M3U8 found") return @@ -39,8 +39,6 @@ async def process_event(url: str, url_num: int) -> str | None: async def refresh_html_cache(now_ts: float) -> dict[str, dict[str, str | float]]: - log.info("Refreshing HTML cache") - events = {} if not (html_data := await network.request(BASE_URL, log=log)): @@ -92,6 +90,8 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: now = Time.clean(Time.now()) if not (events := HTML_CACHE.load()): + log.info("Refreshing HTML cache") + events = await refresh_html_cache(now.timestamp()) HTML_CACHE.write(events) diff --git a/M3U8/scrapers/streambtw.py b/M3U8/scrapers/streambtw.py index 75a9a219..6768d131 100644 --- a/M3U8/scrapers/streambtw.py +++ b/M3U8/scrapers/streambtw.py @@ -31,7 +31,7 @@ async def process_event(url: str, url_num: int) -> str | None: valid_m3u8 = re.compile(r'var\s+(\w+)\s*=\s*"([^"]*)"', re.I) if not (match := valid_m3u8.search(html_data.text)): - log.info(f"URL {url_num}) No M3U8 found") + log.warning(f"URL {url_num}) No M3U8 found") return stream_link: str = match[2] diff --git a/M3U8/scrapers/streamsgate.py b/M3U8/scrapers/streamsgate.py index d74fd3ec..a94c0b1e 100644 --- a/M3U8/scrapers/streamsgate.py +++ b/M3U8/scrapers/streamsgate.py @@ -46,8 +46,6 @@ def get_event(t1: str, t2: str) -> str: async def refresh_api_cache(now_ts: float) -> list[dict[str, Any]]: - log.info("Refreshing API cache") - tasks = [ network.request( urljoin(BASE_URL, f"data/{sport}.json"), @@ -73,6 +71,8 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: now = Time.clean(Time.now()) if not (api_data := API_FILE.load(per_entry=False, index=-1)): + log.info("Refreshing API cache") + api_data = await refresh_api_cache(now.timestamp()) API_FILE.write(api_data) diff --git a/M3U8/scrapers/totalsportek.py b/M3U8/scrapers/totalsportek.py index fdbb5bac..af6f7169 100644 --- a/M3U8/scrapers/totalsportek.py +++ b/M3U8/scrapers/totalsportek.py @@ -26,7 +26,7 @@ def fix_txt(s: str) -> str: async def process_event(url: str, url_num: int) -> str | None: if not (event_data := await network.request(url, log=log)): - log.info(f"URL {url_num}) Failed to load url.") + log.warning(f"URL {url_num}) Failed to load url.") return @@ -43,7 +43,7 @@ async def process_event(url: str, url_num: int) -> str | None: return if not (iframe_1_src_data := await network.request(iframe_1_src, log=log)): - log.info(f"URL {url_num}) Failed to load iframe source.") + log.warning(f"URL {url_num}) Failed to load iframe source.") return @@ -66,7 +66,7 @@ async def process_event(url: str, url_num: int) -> str | None: headers={"Referer": iframe_1_src}, ) ): - log.info(f"URL {url_num}) Failed to load iframe source.") + log.warning(f"URL {url_num}) Failed to load iframe source.") return diff --git a/M3U8/scrapers/tvapp.py b/M3U8/scrapers/tvapp.py index ec753335..ded51787 100644 --- a/M3U8/scrapers/tvapp.py +++ b/M3U8/scrapers/tvapp.py @@ -18,7 +18,7 @@ BASE_URL = "https://thetvapp.to" async def process_event(url: str, url_num: int) -> str | None: if not (html_data := await network.request(url, log=log)): - log.info(f"URL {url_num}) Failed to load url.") + log.warning(f"URL {url_num}) Failed to load url.") return diff --git a/M3U8/scrapers/volokit.py b/M3U8/scrapers/volokit.py index 9eb1f810..ff5546ae 100644 --- a/M3U8/scrapers/volokit.py +++ b/M3U8/scrapers/volokit.py @@ -30,7 +30,7 @@ def fix_event(s: str) -> str: async def process_event(url: str, url_num: int) -> str | None: if not (event_data := await network.request(url, log=log)): - log.info(f"URL {url_num}) Failed to load url.") + log.warning(f"URL {url_num}) Failed to load url.") return @@ -53,7 +53,7 @@ async def process_event(url: str, url_num: int) -> str | None: log=log, ) ): - log.info(f"URL {url_num}) Failed to load iframe source.") + log.warning(f"URL {url_num}) Failed to load iframe source.") return diff --git a/M3U8/scrapers/watchfooty.py b/M3U8/scrapers/watchfooty.py index ff5283b1..eea7fbcc 100644 --- a/M3U8/scrapers/watchfooty.py +++ b/M3U8/scrapers/watchfooty.py @@ -42,8 +42,6 @@ VALID_SPORTS = [ async def refresh_api_cache(now: Time) -> list[dict[str, Any]]: - log.info("Refreshing API cache") - tasks = [ network.request( urljoin(API_URL, "api/v1/matches/all"), @@ -175,6 +173,8 @@ async def get_events(base_url: str, cached_keys: list[str]) -> list[dict[str, st now = Time.clean(Time.now()) if not (api_data := API_FILE.load(per_entry=False, index=-1)): + log.info("Refreshing API cache") + api_data = await refresh_api_cache(now) API_FILE.write(api_data) diff --git a/M3U8/scrapers/webcast.py b/M3U8/scrapers/webcast.py index 5bf24beb..eb30e8af 100644 --- a/M3U8/scrapers/webcast.py +++ b/M3U8/scrapers/webcast.py @@ -27,7 +27,7 @@ def fix_event(s: str) -> str: async def process_event(url: str, url_num: int) -> str | None: if not (event_data := await network.request(url, log=log)): - log.info(f"URL {url_num}) Failed to load url.") + log.warning(f"URL {url_num}) Failed to load url.") return @@ -50,7 +50,7 @@ async def process_event(url: str, url_num: int) -> str | None: log=log, ) ): - log.info(f"URL {url_num}) Failed to load iframe source.") + log.warning(f"URL {url_num}) Failed to load iframe source.") return diff --git a/M3U8/scrapers/xstreameast.py b/M3U8/scrapers/xstreameast.py index 9bb420f9..593d6555 100644 --- a/M3U8/scrapers/xstreameast.py +++ b/M3U8/scrapers/xstreameast.py @@ -32,7 +32,7 @@ async def process_event(url: str, url_num: int) -> tuple[str | None, str | None] nones = None, None if not (html_data := await network.request(url, log=log)): - log.info(f"URL {url_num}) Failed to load url.") + log.warning(f"URL {url_num}) Failed to load url.") return nones soup = HTMLParser(html_data.content)