2025-12-10 14:58:27 -05:00
|
|
|
from urllib.parse import urljoin
|
2025-12-08 13:21:43 -05:00
|
|
|
|
|
|
|
|
import httpx
|
|
|
|
|
|
2025-12-10 14:58:27 -05:00
|
|
|
from .utils import Cache, Time, get_logger, leagues, network
|
2025-12-08 13:21:43 -05:00
|
|
|
|
|
|
|
|
log = get_logger(__name__)
|
|
|
|
|
|
|
|
|
|
urls: dict[str, dict[str, str | float]] = {}
|
|
|
|
|
|
2025-12-13 16:57:14 -05:00
|
|
|
TAG = "STRMFREE"
|
2025-12-08 13:21:43 -05:00
|
|
|
|
2025-12-13 16:57:14 -05:00
|
|
|
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=19_800)
|
2025-12-08 13:21:43 -05:00
|
|
|
|
2025-12-13 16:57:14 -05:00
|
|
|
BASE_URL = "https://streamfree.to/"
|
2025-12-08 13:21:43 -05:00
|
|
|
|
|
|
|
|
|
2025-12-13 16:57:14 -05:00
|
|
|
async def refresh_api_cache(client: httpx.AsyncClient) -> dict[str, dict[str, list]]:
|
2025-12-08 13:21:43 -05:00
|
|
|
try:
|
2025-12-15 15:53:36 -05:00
|
|
|
r = await client.get(urljoin(BASE_URL, "streams"))
|
2025-12-08 13:21:43 -05:00
|
|
|
r.raise_for_status()
|
|
|
|
|
except Exception as e:
|
2025-12-15 15:53:36 -05:00
|
|
|
log.error(f'Failed to fetch "{r.url}": {e}')
|
2025-12-08 13:21:43 -05:00
|
|
|
|
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
|
return r.json()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | float]]:
|
2025-12-13 16:57:14 -05:00
|
|
|
api_data = await refresh_api_cache(client)
|
2025-12-08 13:21:43 -05:00
|
|
|
|
|
|
|
|
events = {}
|
|
|
|
|
|
2025-12-10 13:28:13 -05:00
|
|
|
now = Time.now().timestamp()
|
|
|
|
|
|
2025-12-08 13:21:43 -05:00
|
|
|
for streams in api_data.get("streams", {}).values():
|
|
|
|
|
if not streams:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
for stream in streams:
|
|
|
|
|
sport, name, stream_key = (
|
|
|
|
|
stream.get("league"),
|
|
|
|
|
stream.get("name"),
|
|
|
|
|
stream.get("stream_key"),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if not (sport and name and stream_key):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
key = f"[{sport}] {name} ({TAG})"
|
|
|
|
|
|
|
|
|
|
logo = (
|
|
|
|
|
urljoin(BASE_URL, thumbnail)
|
|
|
|
|
if (thumbnail := stream.get("thumbnail_url"))
|
|
|
|
|
else None
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
tvg_id, pic = leagues.get_tvg_info(sport, name)
|
|
|
|
|
|
|
|
|
|
events[key] = {
|
2025-12-10 14:58:27 -05:00
|
|
|
"url": network.build_proxy_url(
|
|
|
|
|
tag=TAG,
|
2025-12-12 03:45:53 -05:00
|
|
|
path=f"{stream_key}/index.m3u8",
|
2025-12-10 14:58:27 -05:00
|
|
|
query={"stream_name": name},
|
|
|
|
|
),
|
2025-12-08 13:21:43 -05:00
|
|
|
"logo": logo or pic,
|
|
|
|
|
"base": BASE_URL,
|
2025-12-10 13:28:13 -05:00
|
|
|
"timestamp": now,
|
2025-12-08 13:21:43 -05:00
|
|
|
"id": tvg_id or "Live.Event.us",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return events
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def scrape(client: httpx.AsyncClient) -> None:
|
|
|
|
|
if cached := CACHE_FILE.load():
|
|
|
|
|
urls.update(cached)
|
|
|
|
|
log.info(f"Loaded {len(urls)} event(s) from cache")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
log.info(f'Scraping from "{BASE_URL}"')
|
|
|
|
|
|
|
|
|
|
events = await get_events(client)
|
|
|
|
|
|
|
|
|
|
urls.update(events)
|
|
|
|
|
|
|
|
|
|
CACHE_FILE.write(urls)
|
|
|
|
|
|
|
|
|
|
log.info(f"Collected and cached {len(urls)} new event(s)")
|