2025-10-28 15:53:57 -04:00
|
|
|
from urllib.parse import urljoin
|
|
|
|
|
|
|
|
|
|
import httpx
|
|
|
|
|
|
2025-11-26 19:53:42 -05:00
|
|
|
from .utils import Cache, Time, get_logger, leagues
|
2025-10-28 15:53:57 -04:00
|
|
|
|
|
|
|
|
log = get_logger(__name__)
|
|
|
|
|
|
|
|
|
|
urls: dict[str, dict[str, str | float]] = {}
|
|
|
|
|
|
2025-11-26 19:53:42 -05:00
|
|
|
CACHE_FILE = Cache("streamfree.json", exp=19_800)
|
2025-10-28 15:53:57 -04:00
|
|
|
|
|
|
|
|
BASE_URL = "https://streamfree.to"
|
|
|
|
|
|
2025-12-01 14:46:31 -05:00
|
|
|
TAG = "STRMFR"
|
|
|
|
|
|
2025-10-28 15:53:57 -04:00
|
|
|
|
|
|
|
|
async def refresh_api_cache(
|
|
|
|
|
client: httpx.AsyncClient,
|
|
|
|
|
url: str,
|
|
|
|
|
) -> dict[str, dict[str, list]]:
|
|
|
|
|
try:
|
|
|
|
|
r = await client.get(url)
|
|
|
|
|
r.raise_for_status()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
log.error(f'Failed to fetch "{url}": {e}')
|
2025-11-13 12:43:55 -05:00
|
|
|
|
2025-10-28 15:53:57 -04:00
|
|
|
return {}
|
|
|
|
|
|
2025-11-26 19:53:42 -05:00
|
|
|
return r.json()
|
2025-10-28 15:53:57 -04:00
|
|
|
|
|
|
|
|
|
2025-11-26 19:53:42 -05:00
|
|
|
async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | float]]:
|
|
|
|
|
api_data = await refresh_api_cache(client, urljoin(BASE_URL, "streams"))
|
2025-10-28 15:53:57 -04:00
|
|
|
|
2025-11-26 19:53:42 -05:00
|
|
|
events = {}
|
2025-10-28 15:53:57 -04:00
|
|
|
|
2025-11-26 19:53:42 -05:00
|
|
|
for _, streams in api_data.get("streams", {}).items():
|
2025-10-28 15:53:57 -04:00
|
|
|
if not streams:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
for stream in streams:
|
2025-10-28 16:54:17 -04:00
|
|
|
sport, name = stream["league"], stream["name"]
|
2025-10-28 15:53:57 -04:00
|
|
|
|
2025-12-01 14:46:31 -05:00
|
|
|
key = f"[{sport}] {name} ({TAG})"
|
2025-10-28 15:53:57 -04:00
|
|
|
|
2025-11-26 19:53:42 -05:00
|
|
|
tvg_id, logo = leagues.get_tvg_info(sport, name)
|
2025-10-28 15:53:57 -04:00
|
|
|
|
2025-11-26 19:53:42 -05:00
|
|
|
events[key] = {
|
|
|
|
|
"url": urljoin(BASE_URL, f"live/{stream['stream_key']}720p/index.m3u8"),
|
|
|
|
|
"logo": logo,
|
|
|
|
|
"base": BASE_URL,
|
|
|
|
|
"timestamp": Time.now().timestamp(),
|
|
|
|
|
"id": tvg_id or "Live.Event.us",
|
|
|
|
|
}
|
2025-10-28 15:53:57 -04:00
|
|
|
|
|
|
|
|
return events
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def scrape(client: httpx.AsyncClient) -> None:
|
2025-11-26 19:53:42 -05:00
|
|
|
if cached := CACHE_FILE.load():
|
|
|
|
|
urls.update(cached)
|
|
|
|
|
log.info(f"Loaded {len(urls)} event(s) from cache")
|
|
|
|
|
return
|
2025-10-28 15:53:57 -04:00
|
|
|
|
|
|
|
|
log.info(f'Scraping from "{BASE_URL}"')
|
|
|
|
|
|
2025-11-26 19:53:42 -05:00
|
|
|
events = await get_events(client)
|
|
|
|
|
|
|
|
|
|
urls.update(events)
|
|
|
|
|
|
|
|
|
|
CACHE_FILE.write(urls)
|
|
|
|
|
|
|
|
|
|
log.info(f"Collected and cached {len(urls)} new event(s)")
|