This commit is contained in:
doms9 2025-10-19 11:38:06 -04:00
parent 8adc4f7036
commit 00000d9134
7 changed files with 33 additions and 59 deletions

View file

@ -2,7 +2,16 @@
import asyncio import asyncio
from pathlib import Path from pathlib import Path
from scrapers import fstv, streambtw, streameast, streamed, strmd, tvpass, watchfooty from scrapers import (
fstv,
ppv,
streambtw,
streameast,
streamed,
strmd,
tvpass,
watchfooty,
)
from scrapers.utils import get_logger, network from scrapers.utils import get_logger, network
log = get_logger(__name__) log = get_logger(__name__)
@ -29,6 +38,7 @@ async def main() -> None:
tasks = [ tasks = [
asyncio.create_task(fstv.scrape(network.client)), asyncio.create_task(fstv.scrape(network.client)),
asyncio.create_task(ppv.scrape(network.client)),
asyncio.create_task(streambtw.scrape(network.client)), asyncio.create_task(streambtw.scrape(network.client)),
asyncio.create_task(streameast.scrape(network.client)), asyncio.create_task(streameast.scrape(network.client)),
asyncio.create_task(streamed.scrape(network.client)), asyncio.create_task(streamed.scrape(network.client)),
@ -41,6 +51,7 @@ async def main() -> None:
additions = ( additions = (
fstv.urls fstv.urls
| ppv.urls
| streambtw.urls | streambtw.urls
| streameast.urls | streameast.urls
| streamed.urls | streamed.urls

View file

@ -182,7 +182,6 @@ async def process_event(
if captured: if captured:
log.info(f"URL {url_num}) Captured M3U8") log.info(f"URL {url_num}) Captured M3U8")
return captured[-1] return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured") log.warning(f"URL {url_num}) No M3U8 captured")

View file

@ -25,36 +25,9 @@ MIRRORS = [
] ]
def get_tvg(sport: str, event: str) -> str:
match sport:
case "American Football":
if leagues.is_valid(event, "NFL"):
return "NFL.Dummy.us"
else:
return "NCAA.Sports.Dummy.us"
case "Basketball":
if leagues.is_valid(event, "NBA"):
return "NBA.Basketball.Dummy.us"
elif leagues.is_valid(event, "WNBA"):
return "WNBA.dummy.us"
# NCAA
else:
return "Basketball.Dummy.us"
case "Ice Hockey":
return "NHL.Hockey.Dummy.us"
case _:
return leagues.info(sport)[0]
async def refresh_api_cache( async def refresh_api_cache(
client: httpx.AsyncClient, url: str client: httpx.AsyncClient,
url: str,
) -> dict[str, dict[str, str]]: ) -> dict[str, dict[str, str]]:
log.info("Refreshing API cache") log.info("Refreshing API cache")
@ -73,6 +46,7 @@ async def process_event(
url_num: int, url_num: int,
context: BrowserContext, context: BrowserContext,
) -> str | None: ) -> str | None:
page = await context.new_page() page = await context.new_page()
captured: list[str] = [] captured: list[str] = []
@ -93,7 +67,7 @@ async def process_event(
wait_task = asyncio.create_task(got_one.wait()) wait_task = asyncio.create_task(got_one.wait())
try: try:
await asyncio.wait_for(wait_task, timeout=10) await asyncio.wait_for(wait_task, timeout=6)
except asyncio.TimeoutError: except asyncio.TimeoutError:
log.warning(f"URL {url_num}) Timed out waiting for M3U8.") log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
return return
@ -109,7 +83,6 @@ async def process_event(
if captured: if captured:
log.info(f"URL {url_num}) Captured M3U8") log.info(f"URL {url_num}) Captured M3U8")
return captured[-1] return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.") log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
@ -129,6 +102,7 @@ async def get_events(
base_url: str, base_url: str,
cached_keys: set[str], cached_keys: set[str],
) -> list[dict[str, str]]: ) -> list[dict[str, str]]:
if not (api_data := API_FILE.load(per_entry=False)): if not (api_data := API_FILE.load(per_entry=False)):
api_data = await refresh_api_cache( api_data = await refresh_api_cache(
client, client,
@ -153,12 +127,13 @@ async def get_events(
continue continue
for event in stream_group["streams"]: for event in stream_group["streams"]:
name, start_ts, logo, iframe = ( name = event.get("name")
event["name"], start_ts = event.get("starts_at")
event["starts_at"], logo = event.get("poster")
event["poster"], iframe = event.get("iframe")
event["iframe"],
) if not (name and start_ts and iframe):
continue
key = f"[{sport}] {name} (PPV)" key = f"[{sport}] {name} (PPV)"
@ -209,15 +184,9 @@ async def scrape(client: httpx.AsyncClient) -> None:
browser, context = await network.browser(p) browser, context = await network.browser(p)
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
url = await network.safe_process( handler = partial(process_event, url=ev["link"], url_num=i, context=context)
lambda: process_event(
ev["link"], url = await network.safe_process(handler, url_num=i, log=log)
url_num=i,
context=context,
),
url_num=i,
log=log,
)
if url: if url:
sport, event, logo, ts = ( sport, event, logo, ts = (
@ -229,12 +198,14 @@ async def scrape(client: httpx.AsyncClient) -> None:
key = f"[{sport}] {event} (PPV)" key = f"[{sport}] {event} (PPV)"
tvg_id, pic = leagues.get_tvg_info(sport, event)
entry = { entry = {
"url": url, "url": url,
"logo": logo, "logo": logo or pic,
"base": base_url, "base": base_url,
"timestamp": ts, "timestamp": ts,
"id": get_tvg(sport, event) or "Live.Event.us", "id": tvg_id or "Live.Event.us",
} }
urls[key] = cached_urls[key] = entry urls[key] = cached_urls[key] = entry
@ -247,6 +218,3 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info("No new events found") log.info("No new events found")
CACHE_FILE.write(cached_urls) CACHE_FILE.write(cached_urls)
# same provider as strmd.py

View file

@ -71,7 +71,6 @@ async def process_event(
if captured: if captured:
log.info(f"URL {url_num}) Captured M3U8") log.info(f"URL {url_num}) Captured M3U8")
return captured[-1] return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.") log.warning(f"URL {url_num}) No M3U8 captured after waiting.")

View file

@ -58,7 +58,6 @@ async def process_event(
if captured: if captured:
log.info(f"URL {url_num}) Captured M3U8") log.info(f"URL {url_num}) Captured M3U8")
return captured[-1] return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.") log.warning(f"URL {url_num}) No M3U8 captured after waiting.")

View file

@ -92,7 +92,6 @@ async def process_event(
if captured: if captured:
log.info(f"URL {url_num}) Captured M3U8") log.info(f"URL {url_num}) Captured M3U8")
return captured[-1] return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.") log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
@ -164,8 +163,8 @@ async def get_events(
if not sources: if not sources:
continue continue
source = sources[0] # source = sources[0]
# source = sources[1] if len(sources) > 1 else sources[0] source = sources[1] if len(sources) > 1 else sources[0]
source_type = source.get("source") source_type = source.get("source")
stream_id = source.get("id") stream_id = source.get("id")

View file

@ -131,7 +131,6 @@ async def process_event(
if captured: if captured:
log.info(f"URL {url_num}) Captured M3U8") log.info(f"URL {url_num}) Captured M3U8")
return captured[-1] return captured[-1]
log.warning(f"URL {url_num}) No M3U8 captured after waiting.") log.warning(f"URL {url_num}) No M3U8 captured after waiting.")