diff --git a/M3U8/fetch.py b/M3U8/fetch.py index 75de2ff..010880a 100644 --- a/M3U8/fetch.py +++ b/M3U8/fetch.py @@ -64,12 +64,13 @@ async def main() -> None: asyncio.create_task(streamsgate.scrape()), asyncio.create_task(strmd.scrape()), asyncio.create_task(tvpass.scrape()), - # asyncio.create_task(watchfooty.scrape()), asyncio.create_task(webcast.scrape()), ] await asyncio.gather(*tasks) + await watchfooty.scrape() + additions = ( embedhd.urls | fawa.urls diff --git a/M3U8/scrapers/embedhd.py b/M3U8/scrapers/embedhd.py index 30c7622..7a3b284 100644 --- a/M3U8/scrapers/embedhd.py +++ b/M3U8/scrapers/embedhd.py @@ -89,46 +89,48 @@ async def scrape() -> None: async with async_playwright() as p: browser, context = await network.browser(p) - for i, ev in enumerate(events, start=1): - handler = partial( - network.process_event, - url=ev["link"], - url_num=i, - context=context, - log=log, - ) - - url = await network.safe_process( - handler, - url_num=i, - semaphore=network.PW_S, - log=log, - ) - - if url: - sport, event, link, ts = ( - ev["sport"], - ev["event"], - ev["link"], - ev["timestamp"], + try: + for i, ev in enumerate(events, start=1): + handler = partial( + network.process_event, + url=ev["link"], + url_num=i, + context=context, + log=log, ) - tvg_id, logo = leagues.get_tvg_info(sport, event) + url = await network.safe_process( + handler, + url_num=i, + semaphore=network.PW_S, + log=log, + ) - key = f"[{sport}] {event} ({TAG})" + if url: + sport, event, link, ts = ( + ev["sport"], + ev["event"], + ev["link"], + ev["timestamp"], + ) - entry = { - "url": url, - "logo": logo, - "base": "https://vividmosaica.com/", - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - "link": link, - } + tvg_id, logo = leagues.get_tvg_info(sport, event) - urls[key] = cached_urls[key] = entry + key = f"[{sport}] {event} ({TAG})" - await browser.close() + entry = { + "url": url, + "logo": logo, + "base": "https://vividmosaica.com/", + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + "link": link, + } + + urls[key] = cached_urls[key] = entry + + finally: + await browser.close() if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {new_count} new event(s)") diff --git a/M3U8/scrapers/pixel.py b/M3U8/scrapers/pixel.py index 14a2b73..7a8fa87 100644 --- a/M3U8/scrapers/pixel.py +++ b/M3U8/scrapers/pixel.py @@ -88,16 +88,18 @@ async def scrape() -> None: async with async_playwright() as p: browser, context = await network.browser(p) - handler = partial(get_events, context=context) + try: + handler = partial(get_events, context=context) - events = await network.safe_process( - handler, - url_num=1, - semaphore=network.PW_S, - log=log, - ) + events = await network.safe_process( + handler, + url_num=1, + semaphore=network.PW_S, + log=log, + ) - await browser.close() + finally: + await browser.close() urls.update(events or {}) diff --git a/M3U8/scrapers/ppv.py b/M3U8/scrapers/ppv.py index 5bc31b4..91c1659 100644 --- a/M3U8/scrapers/ppv.py +++ b/M3U8/scrapers/ppv.py @@ -110,48 +110,50 @@ async def scrape() -> None: async with async_playwright() as p: browser, context = await network.browser(p, browser="external") - for i, ev in enumerate(events, start=1): - handler = partial( - network.process_event, - url=ev["link"], - url_num=i, - context=context, - timeout=6, - log=log, - ) - - url = await network.safe_process( - handler, - url_num=i, - semaphore=network.PW_S, - log=log, - ) - - if url: - sport, event, logo, ts, link = ( - ev["sport"], - ev["event"], - ev["logo"], - ev["timestamp"], - ev["link"], + try: + for i, ev in enumerate(events, start=1): + handler = partial( + network.process_event, + url=ev["link"], + url_num=i, + context=context, + timeout=6, + log=log, ) - key = f"[{sport}] {event} ({TAG})" + url = await network.safe_process( + handler, + url_num=i, + semaphore=network.PW_S, + log=log, + ) - tvg_id, pic = leagues.get_tvg_info(sport, event) + if url: + sport, event, logo, ts, link = ( + ev["sport"], + ev["event"], + ev["logo"], + ev["timestamp"], + ev["link"], + ) - entry = { - "url": url, - "logo": logo or pic, - "base": base_url, - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - "link": link, - } + key = f"[{sport}] {event} ({TAG})" - urls[key] = cached_urls[key] = entry + tvg_id, pic = leagues.get_tvg_info(sport, event) - await browser.close() + entry = { + "url": url, + "logo": logo or pic, + "base": base_url, + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + "link": link, + } + + urls[key] = cached_urls[key] = entry + + finally: + await browser.close() if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {new_count} new event(s)") diff --git a/M3U8/scrapers/sport9.py b/M3U8/scrapers/sport9.py index f795d45..9842af8 100644 --- a/M3U8/scrapers/sport9.py +++ b/M3U8/scrapers/sport9.py @@ -109,45 +109,47 @@ async def scrape() -> None: async with async_playwright() as p: browser, context = await network.browser(p, browser="external") - for i, ev in enumerate(events, start=1): - handler = partial( - network.process_event, - url=ev["link"], - url_num=i, - context=context, - log=log, - ) - - url = await network.safe_process( - handler, - url_num=i, - semaphore=network.PW_S, - log=log, - ) - - if url: - sport, event, link = ( - ev["sport"], - ev["event"], - ev["link"], + try: + for i, ev in enumerate(events, start=1): + handler = partial( + network.process_event, + url=ev["link"], + url_num=i, + context=context, + log=log, ) - key = f"[{sport}] {event} ({TAG})" + url = await network.safe_process( + handler, + url_num=i, + semaphore=network.PW_S, + log=log, + ) - tvg_id, logo = leagues.get_tvg_info(sport, event) + if url: + sport, event, link = ( + ev["sport"], + ev["event"], + ev["link"], + ) - entry = { - "url": url, - "logo": logo, - "base": "https://vividmosaica.com/", - "timestamp": now, - "id": tvg_id or "Live.Event.us", - "link": link, - } + key = f"[{sport}] {event} ({TAG})" - urls[key] = cached_urls[key] = entry + tvg_id, logo = leagues.get_tvg_info(sport, event) - await browser.close() + entry = { + "url": url, + "logo": logo, + "base": "https://vividmosaica.com/", + "timestamp": now, + "id": tvg_id or "Live.Event.us", + "link": link, + } + + urls[key] = cached_urls[key] = entry + + finally: + await browser.close() if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {new_count} new event(s)") diff --git a/M3U8/scrapers/streamcenter.py b/M3U8/scrapers/streamcenter.py index acf3622..d041bf1 100644 --- a/M3U8/scrapers/streamcenter.py +++ b/M3U8/scrapers/streamcenter.py @@ -107,46 +107,48 @@ async def scrape() -> None: async with async_playwright() as p: browser, context = await network.browser(p, browser="external") - for i, ev in enumerate(events, start=1): - handler = partial( - network.process_event, - url=ev["link"], - url_num=i, - context=context, - log=log, - ) - - url = await network.safe_process( - handler, - url_num=i, - semaphore=network.PW_S, - log=log, - ) - - if url: - sport, event, ts, link = ( - ev["sport"], - ev["event"], - ev["timestamp"], - ev["link"], + try: + for i, ev in enumerate(events, start=1): + handler = partial( + network.process_event, + url=ev["link"], + url_num=i, + context=context, + log=log, ) - key = f"[{sport}] {event} ({TAG})" + url = await network.safe_process( + handler, + url_num=i, + semaphore=network.PW_S, + log=log, + ) - tvg_id, logo = leagues.get_tvg_info(sport, event) + if url: + sport, event, ts, link = ( + ev["sport"], + ev["event"], + ev["timestamp"], + ev["link"], + ) - entry = { - "url": url, - "logo": logo, - "base": "https://streamcenter.xyz", - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - "link": link, - } + key = f"[{sport}] {event} ({TAG})" - urls[key] = cached_urls[key] = entry + tvg_id, logo = leagues.get_tvg_info(sport, event) - await browser.close() + entry = { + "url": url, + "logo": logo, + "base": "https://streamcenter.xyz", + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + "link": link, + } + + urls[key] = cached_urls[key] = entry + + finally: + await browser.close() if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {new_count} new event(s)") diff --git a/M3U8/scrapers/streamhub.py b/M3U8/scrapers/streamhub.py index 9eda928..5f952a0 100644 --- a/M3U8/scrapers/streamhub.py +++ b/M3U8/scrapers/streamhub.py @@ -151,48 +151,50 @@ async def scrape() -> None: async with async_playwright() as p: browser, context = await network.browser(p, browser="external") - for i, ev in enumerate(events, start=1): - handler = partial( - network.process_event, - url=ev["link"], - url_num=i, - context=context, - timeout=5, - log=log, - ) - - url = await network.safe_process( - handler, - url_num=i, - semaphore=network.PW_S, - log=log, - ) - - if url: - sport, event, logo, link, ts = ( - ev["sport"], - ev["event"], - ev["logo"], - ev["link"], - ev["event_ts"], + try: + for i, ev in enumerate(events, start=1): + handler = partial( + network.process_event, + url=ev["link"], + url_num=i, + context=context, + timeout=5, + log=log, ) - key = f"[{sport}] {event} ({TAG})" + url = await network.safe_process( + handler, + url_num=i, + semaphore=network.PW_S, + log=log, + ) - tvg_id, pic = leagues.get_tvg_info(sport, event) + if url: + sport, event, logo, link, ts = ( + ev["sport"], + ev["event"], + ev["logo"], + ev["link"], + ev["event_ts"], + ) - entry = { - "url": url, - "logo": logo or pic, - "base": "https://storytrench.net/", - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - "link": link, - } + key = f"[{sport}] {event} ({TAG})" - urls[key] = cached_urls[key] = entry + tvg_id, pic = leagues.get_tvg_info(sport, event) - await browser.close() + entry = { + "url": url, + "logo": logo or pic, + "base": "https://storytrench.net/", + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + "link": link, + } + + urls[key] = cached_urls[key] = entry + + finally: + await browser.close() if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {new_count} new event(s)") diff --git a/M3U8/scrapers/streamsgate.py b/M3U8/scrapers/streamsgate.py index 1a9ee6e..a056d4c 100644 --- a/M3U8/scrapers/streamsgate.py +++ b/M3U8/scrapers/streamsgate.py @@ -139,46 +139,48 @@ async def scrape() -> None: async with async_playwright() as p: browser, context = await network.browser(p, browser="external") - for i, ev in enumerate(events, start=1): - handler = partial( - network.process_event, - url=ev["link"], - url_num=i, - context=context, - log=log, - ) - - url = await network.safe_process( - handler, - url_num=i, - semaphore=network.PW_S, - log=log, - ) - - if url: - sport, event, ts, link = ( - ev["sport"], - ev["event"], - ev["timestamp"], - ev["link"], + try: + for i, ev in enumerate(events, start=1): + handler = partial( + network.process_event, + url=ev["link"], + url_num=i, + context=context, + log=log, ) - key = f"[{sport}] {event} ({TAG})" + url = await network.safe_process( + handler, + url_num=i, + semaphore=network.PW_S, + log=log, + ) - tvg_id, logo = leagues.get_tvg_info(sport, event) + if url: + sport, event, ts, link = ( + ev["sport"], + ev["event"], + ev["timestamp"], + ev["link"], + ) - entry = { - "url": url, - "logo": logo, - "base": BASE_URL, - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - "link": link, - } + key = f"[{sport}] {event} ({TAG})" - urls[key] = cached_urls[key] = entry + tvg_id, logo = leagues.get_tvg_info(sport, event) - await browser.close() + entry = { + "url": url, + "logo": logo, + "base": BASE_URL, + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + "link": link, + } + + urls[key] = cached_urls[key] = entry + + finally: + await browser.close() if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {new_count} new event(s)") diff --git a/M3U8/scrapers/strmd.py b/M3U8/scrapers/strmd.py index 21b5b44..7c57415 100644 --- a/M3U8/scrapers/strmd.py +++ b/M3U8/scrapers/strmd.py @@ -139,47 +139,49 @@ async def scrape() -> None: async with async_playwright() as p: browser, context = await network.browser(p, browser="external") - for i, ev in enumerate(events, start=1): - handler = partial( - network.process_event, - url=ev["link"], - url_num=i, - context=context, - log=log, - ) - - url = await network.safe_process( - handler, - url_num=i, - semaphore=network.PW_S, - log=log, - ) - - if url: - sport, event, logo, ts, link = ( - ev["sport"], - ev["event"], - ev["logo"], - ev["timestamp"], - ev["link"], + try: + for i, ev in enumerate(events, start=1): + handler = partial( + network.process_event, + url=ev["link"], + url_num=i, + context=context, + log=log, ) - key = f"[{sport}] {event} ({TAG})" + url = await network.safe_process( + handler, + url_num=i, + semaphore=network.PW_S, + log=log, + ) - tvg_id, pic = leagues.get_tvg_info(sport, event) + if url: + sport, event, logo, ts, link = ( + ev["sport"], + ev["event"], + ev["logo"], + ev["timestamp"], + ev["link"], + ) - entry = { - "url": url, - "logo": logo or pic, - "base": "https://embedsports.top/", - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - "link": link, - } + key = f"[{sport}] {event} ({TAG})" - urls[key] = cached_urls[key] = entry + tvg_id, pic = leagues.get_tvg_info(sport, event) - await browser.close() + entry = { + "url": url, + "logo": logo or pic, + "base": "https://embedsports.top/", + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + "link": link, + } + + urls[key] = cached_urls[key] = entry + + finally: + await browser.close() if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {new_count} new event(s)") diff --git a/M3U8/scrapers/watchfooty.py b/M3U8/scrapers/watchfooty.py index 4367ac8..f749688 100644 --- a/M3U8/scrapers/watchfooty.py +++ b/M3U8/scrapers/watchfooty.py @@ -261,51 +261,53 @@ async def scrape() -> None: if events: async with async_playwright() as p: - browser, context = await network.browser(p) + browser, context = await network.browser(p, browser="external") - for i, ev in enumerate(events, start=1): - handler = partial( - process_event, - url=ev["link"], - url_num=i, - context=context, - ) + try: + for i, ev in enumerate(events, start=1): + handler = partial( + process_event, + url=ev["link"], + url_num=i, + context=context, + ) - url, iframe = await network.safe_process( - handler, - url_num=i, - semaphore=network.PW_S, - log=log, - ) + url, iframe = await network.safe_process( + handler, + url_num=i, + semaphore=network.PW_S, + log=log, + ) - sport, event, logo, ts, link = ( - ev["sport"], - ev["event"], - ev["logo"], - ev["timestamp"], - ev["link"], - ) + sport, event, logo, ts, link = ( + ev["sport"], + ev["event"], + ev["logo"], + ev["timestamp"], + ev["link"], + ) - key = f"[{sport}] {event} ({TAG})" + key = f"[{sport}] {event} ({TAG})" - tvg_id, pic = leagues.get_tvg_info(sport, event) + tvg_id, pic = leagues.get_tvg_info(sport, event) - entry = { - "url": url, - "logo": logo or pic, - "base": iframe, - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - "link": link, - } + entry = { + "url": url, + "logo": logo or pic, + "base": iframe, + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + "link": link, + } - cached_urls[key] = entry + cached_urls[key] = entry - if url: - valid_count += 1 - urls[key] = entry + if url: + valid_count += 1 + urls[key] = entry - await browser.close() + finally: + await browser.close() if new_count := valid_count - cached_count: log.info(f"Collected and cached {new_count} new event(s)") diff --git a/M3U8/scrapers/webcast.py b/M3U8/scrapers/webcast.py index 8a5623b..cc3f7f5 100644 --- a/M3U8/scrapers/webcast.py +++ b/M3U8/scrapers/webcast.py @@ -129,46 +129,48 @@ async def scrape() -> None: async with async_playwright() as p: browser, context = await network.browser(p) - for i, ev in enumerate(events, start=1): - handler = partial( - network.process_event, - url=ev["link"], - url_num=i, - context=context, - log=log, - ) - - url = await network.safe_process( - handler, - url_num=i, - semaphore=network.PW_S, - log=log, - ) - - if url: - sport, event, ts, link = ( - ev["sport"], - ev["event"], - ev["event_ts"], - ev["link"], + try: + for i, ev in enumerate(events, start=1): + handler = partial( + network.process_event, + url=ev["link"], + url_num=i, + context=context, + log=log, ) - key = f"[{sport}] {event} ({TAG})" + url = await network.safe_process( + handler, + url_num=i, + semaphore=network.PW_S, + log=log, + ) - tvg_id, logo = leagues.get_tvg_info(sport, event) + if url: + sport, event, ts, link = ( + ev["sport"], + ev["event"], + ev["event_ts"], + ev["link"], + ) - entry = { - "url": url, - "logo": logo, - "base": BASE_URLS[sport], - "timestamp": ts, - "id": tvg_id or "Live.Event.us", - "link": link, - } + key = f"[{sport}] {event} ({TAG})" - urls[key] = cached_urls[key] = entry + tvg_id, logo = leagues.get_tvg_info(sport, event) - await browser.close() + entry = { + "url": url, + "logo": logo, + "base": BASE_URLS[sport], + "timestamp": ts, + "id": tvg_id or "Live.Event.us", + "link": link, + } + + urls[key] = cached_urls[key] = entry + + finally: + await browser.close() if new_count := len(cached_urls) - cached_count: log.info(f"Collected and cached {new_count} new event(s)")