mirror of
https://github.com/doms9/iptv.git
synced 2026-03-07 11:18:25 +01:00
update M3U8
This commit is contained in:
parent
d6c432c42c
commit
39fe38b259
4 changed files with 1692 additions and 1288 deletions
1446
M3U8/TV.m3u8
1446
M3U8/TV.m3u8
File diff suppressed because it is too large
Load diff
1446
M3U8/events.m3u8
1446
M3U8/events.m3u8
File diff suppressed because it is too large
Load diff
|
|
@ -18,7 +18,6 @@ from scrapers import (
|
|||
sport9,
|
||||
streambtw,
|
||||
streamcenter,
|
||||
streamfree,
|
||||
streamhub,
|
||||
streamsgate,
|
||||
tvpass,
|
||||
|
|
@ -79,7 +78,6 @@ async def main() -> None:
|
|||
asyncio.create_task(pawa.scrape()),
|
||||
asyncio.create_task(shark.scrape()),
|
||||
asyncio.create_task(streambtw.scrape()),
|
||||
asyncio.create_task(streamfree.scrape()),
|
||||
asyncio.create_task(tvpass.scrape()),
|
||||
# asyncio.create_task(xstreameast.scrape()),
|
||||
]
|
||||
|
|
@ -111,7 +109,6 @@ async def main() -> None:
|
|||
| sport9.urls
|
||||
| streambtw.urls
|
||||
| streamcenter.urls
|
||||
| streamfree.urls
|
||||
| streamhub.urls
|
||||
| streamsgate.urls
|
||||
| tvpass.urls
|
||||
|
|
|
|||
|
|
@ -1,85 +0,0 @@
|
|||
from urllib.parse import urljoin
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
TAG = "STRMFREE"
|
||||
|
||||
CACHE_FILE = Cache(TAG, exp=19_800)
|
||||
|
||||
BASE_URL = "https://streamfree.to/"
|
||||
|
||||
|
||||
async def get_events() -> dict[str, dict[str, str | float]]:
|
||||
events = {}
|
||||
|
||||
if not (
|
||||
r := await network.request(
|
||||
urljoin(BASE_URL, "streams"),
|
||||
log=log,
|
||||
)
|
||||
):
|
||||
return events
|
||||
|
||||
api_data: dict = r.json()
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
for streams in api_data.get("streams", {}).values():
|
||||
if not streams:
|
||||
continue
|
||||
|
||||
for stream in streams:
|
||||
sport, name, stream_key = (
|
||||
stream.get("league"),
|
||||
stream.get("name"),
|
||||
stream.get("stream_key"),
|
||||
)
|
||||
|
||||
if not (sport and name and stream_key):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, name)
|
||||
|
||||
events[key] = {
|
||||
"url": network.build_proxy_url(
|
||||
tag=TAG,
|
||||
path=f"{stream_key}/index.m3u8",
|
||||
query={"stream_name": name},
|
||||
),
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": now.timestamp(),
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape() -> None:
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await network.safe_process(
|
||||
get_events,
|
||||
url_num=1,
|
||||
semaphore=network.HTTP_S,
|
||||
log=log,
|
||||
)
|
||||
|
||||
urls.update(events or {})
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
|
||||
log.info(f"Collected and cached {len(urls)} new event(s)")
|
||||
Loading…
Add table
Add a link
Reference in a new issue