diff --git a/M3U8/fetch.py b/M3U8/fetch.py index 1683eb5..407da34 100644 --- a/M3U8/fetch.py +++ b/M3U8/fetch.py @@ -53,18 +53,18 @@ async def main() -> None: asyncio.create_task(embedhd.scrape()), asyncio.create_task(fawa.scrape()), asyncio.create_task(istreameast.scrape()), - # asyncio.create_task(pixel.scrape()), + asyncio.create_task(pixel.scrape()), asyncio.create_task(ppv.scrape()), asyncio.create_task(roxie.scrape()), asyncio.create_task(shark.scrape()), asyncio.create_task(sport9.scrape()), - # asyncio.create_task(streambtw.scrape()), + asyncio.create_task(streambtw.scrape()), asyncio.create_task(streamcenter.scrape()), asyncio.create_task(streamfree.scrape()), asyncio.create_task(streamhub.scrape()), asyncio.create_task(streamsgate.scrape()), asyncio.create_task(strmd.scrape()), - # asyncio.create_task(totalsportek.scrape()), + asyncio.create_task(totalsportek.scrape()), asyncio.create_task(tvpass.scrape()), asyncio.create_task(webcast.scrape()), ] diff --git a/M3U8/scrapers/ppv.py b/M3U8/scrapers/ppv.py index 91c1659..284171a 100644 --- a/M3U8/scrapers/ppv.py +++ b/M3U8/scrapers/ppv.py @@ -97,7 +97,9 @@ async def scrape() -> None: if not (base_url and api_url): log.warning("No working PPV mirrors") + CACHE_FILE.write(cached_urls) + return log.info(f'Scraping from "{base_url}"') diff --git a/M3U8/scrapers/streambtw.py b/M3U8/scrapers/streambtw.py index 78cc3e5..8bf8b37 100644 --- a/M3U8/scrapers/streambtw.py +++ b/M3U8/scrapers/streambtw.py @@ -15,7 +15,7 @@ TAG = "STRMBTW" CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=3_600) -BASE_URL = "https://streambtw.com" +MIRRORS = ["https://hiteasport.info/", "https://streambtw.com/"] def fix_league(s: str) -> str: @@ -45,10 +45,10 @@ async def process_event(url: str, url_num: int) -> str | None: return stream_link -async def get_events() -> list[dict[str, str]]: +async def get_events(url: str) -> list[dict[str, str]]: events = [] - if not (html_data := await network.request(BASE_URL, log=log)): + if not (html_data := await network.request(url, log=log)): return events soup = HTMLParser(html_data.content) @@ -72,7 +72,7 @@ async def get_events() -> list[dict[str, str]]: { "sport": fix_league(league), "event": name, - "link": urljoin(BASE_URL, href), + "link": urljoin(url, href), } ) @@ -87,9 +87,14 @@ async def scrape() -> None: return - log.info(f'Scraping from "{BASE_URL}"') + if not (base_url := await network.get_base(MIRRORS)): + log.warning("No working StreamBTW mirrors") - events = await get_events() + return + + log.info(f'Scraping from "{base_url}"') + + events = await get_events(base_url) log.info(f"Processing {len(events)} new URL(s)") diff --git a/M3U8/scrapers/strmd.py b/M3U8/scrapers/strmd.py index 7c57415..de9055f 100644 --- a/M3U8/scrapers/strmd.py +++ b/M3U8/scrapers/strmd.py @@ -126,7 +126,9 @@ async def scrape() -> None: if not (base_url := await network.get_base(MIRRORS)): log.warning("No working STRMD mirrors") + CACHE_FILE.write(cached_urls) + return log.info(f'Scraping from "{base_url}"')