iptv/M3U8/scrapers/streamfree.py

86 lines
1.9 KiB
Python
Raw Normal View History

2025-12-10 14:58:27 -05:00
from urllib.parse import urljoin
2025-12-08 13:21:43 -05:00
2025-12-10 14:58:27 -05:00
from .utils import Cache, Time, get_logger, leagues, network
2025-12-08 13:21:43 -05:00
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
2025-12-13 16:57:14 -05:00
TAG = "STRMFREE"
2025-12-08 13:21:43 -05:00
2025-12-16 02:30:44 -05:00
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=19_800)
2025-12-08 13:21:43 -05:00
2025-12-13 16:57:14 -05:00
BASE_URL = "https://streamfree.to/"
2025-12-08 13:21:43 -05:00
2025-12-18 03:04:11 -05:00
async def get_events() -> dict[str, dict[str, str | float]]:
events = {}
2025-12-08 13:21:43 -05:00
2025-12-18 03:04:11 -05:00
if not (
r := await network.request(
urljoin(BASE_URL, "streams"),
log=log,
)
):
return events
2025-12-08 13:21:43 -05:00
2025-12-18 03:04:11 -05:00
api_data: dict = r.json()
2025-12-08 13:21:43 -05:00
2025-12-18 03:04:11 -05:00
now = Time.clean(Time.now())
2025-12-10 13:28:13 -05:00
2025-12-08 13:21:43 -05:00
for streams in api_data.get("streams", {}).values():
if not streams:
continue
for stream in streams:
sport, name, stream_key = (
stream.get("league"),
stream.get("name"),
stream.get("stream_key"),
)
if not (sport and name and stream_key):
continue
key = f"[{sport}] {name} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, name)
2025-12-08 13:21:43 -05:00
events[key] = {
2025-12-10 14:58:27 -05:00
"url": network.build_proxy_url(
tag=TAG,
2025-12-12 03:45:53 -05:00
path=f"{stream_key}/index.m3u8",
2025-12-10 14:58:27 -05:00
query={"stream_name": name},
),
"logo": logo,
2025-12-08 13:21:43 -05:00
"base": BASE_URL,
2025-12-18 03:04:11 -05:00
"timestamp": now.timestamp(),
2025-12-08 13:21:43 -05:00
"id": tvg_id or "Live.Event.us",
}
return events
2025-12-18 03:04:11 -05:00
async def scrape() -> None:
2025-12-08 13:21:43 -05:00
if cached := CACHE_FILE.load():
urls.update(cached)
2025-12-18 04:14:54 -05:00
2025-12-08 13:21:43 -05:00
log.info(f"Loaded {len(urls)} event(s) from cache")
2025-12-18 04:14:54 -05:00
2025-12-08 13:21:43 -05:00
return
log.info(f'Scraping from "{BASE_URL}"')
events = await network.safe_process(
get_events,
url_num=1,
semaphore=network.HTTP_S,
log=log,
)
urls.update(events or {})
2025-12-08 13:21:43 -05:00
CACHE_FILE.write(urls)
log.info(f"Collected and cached {len(urls)} new event(s)")