e
This commit is contained in:
parent
8adc4f7036
commit
00000d9134
7 changed files with 33 additions and 59 deletions
|
|
@ -2,7 +2,16 @@
|
|||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
from scrapers import fstv, streambtw, streameast, streamed, strmd, tvpass, watchfooty
|
||||
from scrapers import (
|
||||
fstv,
|
||||
ppv,
|
||||
streambtw,
|
||||
streameast,
|
||||
streamed,
|
||||
strmd,
|
||||
tvpass,
|
||||
watchfooty,
|
||||
)
|
||||
from scrapers.utils import get_logger, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
|
@ -29,6 +38,7 @@ async def main() -> None:
|
|||
|
||||
tasks = [
|
||||
asyncio.create_task(fstv.scrape(network.client)),
|
||||
asyncio.create_task(ppv.scrape(network.client)),
|
||||
asyncio.create_task(streambtw.scrape(network.client)),
|
||||
asyncio.create_task(streameast.scrape(network.client)),
|
||||
asyncio.create_task(streamed.scrape(network.client)),
|
||||
|
|
@ -41,6 +51,7 @@ async def main() -> None:
|
|||
|
||||
additions = (
|
||||
fstv.urls
|
||||
| ppv.urls
|
||||
| streambtw.urls
|
||||
| streameast.urls
|
||||
| streamed.urls
|
||||
|
|
|
|||
|
|
@ -182,7 +182,6 @@ async def process_event(
|
|||
|
||||
if captured:
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
|
||||
return captured[-1]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured")
|
||||
|
|
|
|||
|
|
@ -25,36 +25,9 @@ MIRRORS = [
|
|||
]
|
||||
|
||||
|
||||
def get_tvg(sport: str, event: str) -> str:
|
||||
match sport:
|
||||
case "American Football":
|
||||
if leagues.is_valid(event, "NFL"):
|
||||
return "NFL.Dummy.us"
|
||||
|
||||
else:
|
||||
return "NCAA.Sports.Dummy.us"
|
||||
|
||||
case "Basketball":
|
||||
if leagues.is_valid(event, "NBA"):
|
||||
return "NBA.Basketball.Dummy.us"
|
||||
|
||||
elif leagues.is_valid(event, "WNBA"):
|
||||
return "WNBA.dummy.us"
|
||||
|
||||
# NCAA
|
||||
|
||||
else:
|
||||
return "Basketball.Dummy.us"
|
||||
|
||||
case "Ice Hockey":
|
||||
return "NHL.Hockey.Dummy.us"
|
||||
|
||||
case _:
|
||||
return leagues.info(sport)[0]
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient, url: str
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
) -> dict[str, dict[str, str]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
|
|
@ -73,6 +46,7 @@ async def process_event(
|
|||
url_num: int,
|
||||
context: BrowserContext,
|
||||
) -> str | None:
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
captured: list[str] = []
|
||||
|
|
@ -93,7 +67,7 @@ async def process_event(
|
|||
wait_task = asyncio.create_task(got_one.wait())
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(wait_task, timeout=10)
|
||||
await asyncio.wait_for(wait_task, timeout=6)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
|
||||
return
|
||||
|
|
@ -109,7 +83,6 @@ async def process_event(
|
|||
|
||||
if captured:
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
|
||||
return captured[-1]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
|
||||
|
|
@ -129,6 +102,7 @@ async def get_events(
|
|||
base_url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False)):
|
||||
api_data = await refresh_api_cache(
|
||||
client,
|
||||
|
|
@ -153,12 +127,13 @@ async def get_events(
|
|||
continue
|
||||
|
||||
for event in stream_group["streams"]:
|
||||
name, start_ts, logo, iframe = (
|
||||
event["name"],
|
||||
event["starts_at"],
|
||||
event["poster"],
|
||||
event["iframe"],
|
||||
)
|
||||
name = event.get("name")
|
||||
start_ts = event.get("starts_at")
|
||||
logo = event.get("poster")
|
||||
iframe = event.get("iframe")
|
||||
|
||||
if not (name and start_ts and iframe):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {name} (PPV)"
|
||||
|
||||
|
|
@ -209,15 +184,9 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
browser, context = await network.browser(p)
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
url = await network.safe_process(
|
||||
lambda: process_event(
|
||||
ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
),
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
handler = partial(process_event, url=ev["link"], url_num=i, context=context)
|
||||
|
||||
url = await network.safe_process(handler, url_num=i, log=log)
|
||||
|
||||
if url:
|
||||
sport, event, logo, ts = (
|
||||
|
|
@ -229,12 +198,14 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
|
||||
key = f"[{sport}] {event} (PPV)"
|
||||
|
||||
tvg_id, pic = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"logo": logo or pic,
|
||||
"base": base_url,
|
||||
"timestamp": ts,
|
||||
"id": get_tvg(sport, event) or "Live.Event.us",
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
|
@ -247,6 +218,3 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
||||
|
||||
# same provider as strmd.py
|
||||
|
|
@ -71,7 +71,6 @@ async def process_event(
|
|||
|
||||
if captured:
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
|
||||
return captured[-1]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
|
||||
|
|
|
|||
|
|
@ -58,7 +58,6 @@ async def process_event(
|
|||
|
||||
if captured:
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
|
||||
return captured[-1]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
|
||||
|
|
|
|||
|
|
@ -92,7 +92,6 @@ async def process_event(
|
|||
|
||||
if captured:
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
|
||||
return captured[-1]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
|
||||
|
|
@ -164,8 +163,8 @@ async def get_events(
|
|||
if not sources:
|
||||
continue
|
||||
|
||||
source = sources[0]
|
||||
# source = sources[1] if len(sources) > 1 else sources[0]
|
||||
# source = sources[0]
|
||||
source = sources[1] if len(sources) > 1 else sources[0]
|
||||
source_type = source.get("source")
|
||||
stream_id = source.get("id")
|
||||
|
||||
|
|
|
|||
|
|
@ -131,7 +131,6 @@ async def process_event(
|
|||
|
||||
if captured:
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
|
||||
return captured[-1]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue