mirror of
https://github.com/doms9/iptv.git
synced 2026-01-21 03:59:03 +01:00
e
This commit is contained in:
parent
73f28024ac
commit
00000d9822
6 changed files with 125 additions and 180 deletions
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python3
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
|
@ -8,7 +9,7 @@ m3u8_file = Path(__file__).parent / "TV.m3u8"
|
|||
|
||||
base_url = "https://s.id/ePwXT"
|
||||
|
||||
client = httpx.Client(
|
||||
client = httpx.AsyncClient(
|
||||
timeout=5,
|
||||
follow_redirects=True,
|
||||
headers={
|
||||
|
|
@ -17,49 +18,44 @@ client = httpx.Client(
|
|||
)
|
||||
|
||||
|
||||
def vanilla_fetch() -> tuple[list[str], int]:
|
||||
async def vanilla_fetch() -> tuple[list[str], int]:
|
||||
print("Fetching base M3U8")
|
||||
|
||||
try:
|
||||
r = client.get(base_url)
|
||||
r = await client.get(base_url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
raise SystemExit(f'Failed to fetch "{base_url}"\n{e}') from e
|
||||
|
||||
d = r.text.splitlines()
|
||||
|
||||
d.pop(0)
|
||||
d = r.text.splitlines()[1:]
|
||||
|
||||
last_chnl_number = int(r.text.split("tvg-chno=")[-1].split('"')[1])
|
||||
|
||||
return d, last_chnl_number
|
||||
|
||||
|
||||
def main() -> None:
|
||||
tvpass.main(client)
|
||||
async def main() -> None:
|
||||
await tvpass.main(client)
|
||||
|
||||
fstv.main(client)
|
||||
await fstv.main(client)
|
||||
|
||||
base_m3u8, chnl_number = vanilla_fetch()
|
||||
base_m3u8, chnl_number = await vanilla_fetch()
|
||||
|
||||
additions = tvpass.urls | fstv.urls
|
||||
|
||||
lines = []
|
||||
|
||||
for event, url in sorted(additions.items()):
|
||||
chnl_number += 1
|
||||
lines.append(
|
||||
f'#EXTINF:-1 tvg-chno="{chnl_number}"'
|
||||
f' tvg-id="(N/A)" tvg-name="{event}"'
|
||||
' tvg-logo="https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png"'
|
||||
f' group-title="Live Events",{event}\n{url}\n'
|
||||
lines = [
|
||||
f'#EXTINF:-1 tvg-chno="{chnl_number}" tvg-id="(N/A)" tvg-name="{event}" tvg-logo="https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png" group-title="Live Events",{event}\n{url}'
|
||||
for chnl_number, (event, url) in enumerate(
|
||||
sorted(additions.items()),
|
||||
start=chnl_number + 1,
|
||||
)
|
||||
]
|
||||
|
||||
m3u8_file.write_text(
|
||||
'#EXTM3U url-tvg="https://raw.githubusercontent.com/doms9/iptv/refs/heads/default/EPG/TV.xml"\n'
|
||||
+ "\n".join(base_m3u8)
|
||||
+ "\n"
|
||||
+ "".join(lines)
|
||||
+ "\n".join(lines)
|
||||
+ "\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
|
@ -68,4 +64,4 @@ def main() -> None:
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
asyncio.run(main())
|
||||
|
|
|
|||
|
|
@ -1,21 +1,22 @@
|
|||
import asyncio
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from bs4 import BeautifulSoup
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
urls: dict[str, str] = {}
|
||||
|
||||
mirrors = {
|
||||
mirrors = [
|
||||
"https://fstv.online",
|
||||
"https://fstv.space",
|
||||
"https://fstv.zip",
|
||||
"https://fstv.us",
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def check_status(client: httpx.Client, url: str) -> bool:
|
||||
async def check_status(client: httpx.AsyncClient, url: str) -> bool:
|
||||
try:
|
||||
r = client.get(url)
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception:
|
||||
return False
|
||||
|
|
@ -23,81 +24,90 @@ def check_status(client: httpx.Client, url: str) -> bool:
|
|||
return r.status_code == 200
|
||||
|
||||
|
||||
def get_base(client: httpx.Client) -> str:
|
||||
for url in filter(lambda x: check_status(client, x), mirrors):
|
||||
return url
|
||||
async def get_base(client: httpx.AsyncClient) -> str:
|
||||
tasks = [check_status(client, link) for link in mirrors]
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
return [url for url, ok in zip(mirrors, results) if ok][0]
|
||||
|
||||
|
||||
def get_hrefs(client: httpx.Client, base_url: str) -> list[tuple[str, str]]:
|
||||
async def get_hrefs(client: httpx.AsyncClient, base_url: str) -> list[tuple[str, str]]:
|
||||
print(f'Scraping from "{base_url}"')
|
||||
|
||||
try:
|
||||
r = client.get(base_url)
|
||||
r = await client.get(base_url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
print(f'Failed to fetch "{base_url}"\n{e}')
|
||||
|
||||
return []
|
||||
|
||||
soup = BeautifulSoup(r.text, "lxml")
|
||||
soup = HTMLParser(r.text)
|
||||
|
||||
events = {}
|
||||
|
||||
for wrpr in soup.find_all("div", class_="fixtures-live-wrapper"):
|
||||
for games in wrpr.select(".match-table-item"):
|
||||
for wrpr in soup.css("div.fixtures-live-wrapper"):
|
||||
for games in wrpr.css(".match-table-item"):
|
||||
|
||||
league_name = games.select_one(".league-info a.league-name")
|
||||
league_name = games.css_first(".league-info a.league-name")
|
||||
|
||||
league_match = games.select_one(".common-table-row a[href*='/match/']")
|
||||
league_match = games.css_first(".common-table-row a[href*='/match/']")
|
||||
|
||||
if league_name and league_match:
|
||||
full_text = league_name.get_text(strip=True)
|
||||
full_text = league_name.text(strip=True)
|
||||
|
||||
if "]" in full_text:
|
||||
event_name = full_text.split("]", 1)[1].strip()
|
||||
else:
|
||||
event_name = full_text
|
||||
|
||||
events[event_name] = urljoin(base_url, league_match["href"])
|
||||
events[event_name] = urljoin(
|
||||
base_url, league_match.attributes.get("href")
|
||||
)
|
||||
|
||||
return events.items()
|
||||
|
||||
|
||||
def fetch_m3u8(client: httpx.Client, url: str) -> tuple[str, list[str]]:
|
||||
async def fetch_m3u8(client: httpx.AsyncClient, url: str) -> tuple[str, list[str]]:
|
||||
try:
|
||||
r = client.get(url)
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
print(f'Failed to fetch "{url}"\n{e}')
|
||||
|
||||
return []
|
||||
|
||||
soup = BeautifulSoup(r.text, "lxml")
|
||||
soup = HTMLParser(r.text)
|
||||
|
||||
if category_links := soup.select(".common-list-category .category-item a"):
|
||||
match_name = category_links[-1].get_text(strip=True)
|
||||
if category_links := soup.css(".common-list-category .category-item a"):
|
||||
match_name = category_links[-1].text(strip=True)
|
||||
else:
|
||||
match_name = None
|
||||
|
||||
if not match_name or match_name.lower() == "vs":
|
||||
if og_title := soup.find("meta", property="og:title"):
|
||||
match_name = og_title["content"].split(" start on")[0].strip()
|
||||
if og_title := soup.css_first("meta[property='og:title']"):
|
||||
match_name = (
|
||||
og_title.attributes.get("content", "").split(" start on")[0].strip()
|
||||
)
|
||||
|
||||
btns = soup.select("button.btn-server")
|
||||
btns = soup.css("button.btn-server")
|
||||
|
||||
return match_name, [btn["data-link"] for btn in btns if btn.has_attr("data-link")]
|
||||
return match_name, [
|
||||
btn.attributes.get("data-link") for btn in btns if "data-link" in btn.attributes
|
||||
]
|
||||
|
||||
|
||||
def main(client: httpx.Client) -> None:
|
||||
base_url = get_base(client)
|
||||
async def main(client: httpx.AsyncClient) -> None:
|
||||
if not (base_url := await get_base(client)):
|
||||
print("No working FSTV mirrors")
|
||||
return
|
||||
|
||||
for event, href in get_hrefs(client, base_url):
|
||||
events = await get_hrefs(client, base_url)
|
||||
|
||||
if not href:
|
||||
continue
|
||||
|
||||
match_name, m3u8_urls = fetch_m3u8(client, href)
|
||||
tasks = [fetch_m3u8(client, href) for _, href in events if href]
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
for (event, _), (match_name, m3u8_urls) in zip(events, results):
|
||||
if not m3u8_urls:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -43,9 +43,9 @@ def save_cache(urls: dict[str, str]) -> None:
|
|||
base_file.write_text(json.dumps(payload, indent=2), encoding="utf-8")
|
||||
|
||||
|
||||
def fetch_m3u8(client: httpx.Client) -> list[str] | None:
|
||||
async def fetch_m3u8(client: httpx.AsyncClient) -> list[str] | None:
|
||||
try:
|
||||
r = client.get(base_url)
|
||||
r = await client.get(base_url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
print(f'Failed to fetch "{base_url}"\n{e}')
|
||||
|
|
@ -53,7 +53,7 @@ def fetch_m3u8(client: httpx.Client) -> list[str] | None:
|
|||
return r.text.splitlines()
|
||||
|
||||
|
||||
def main(client: httpx.Client) -> None:
|
||||
async def main(client: httpx.AsyncClient) -> None:
|
||||
if cached := load_cache():
|
||||
urls.update(cached)
|
||||
print(f"TVPass: Collected {len(urls)} live events from cache")
|
||||
|
|
@ -61,24 +61,24 @@ def main(client: httpx.Client) -> None:
|
|||
|
||||
print(f'Scraping from "{base_url}"')
|
||||
|
||||
if not (data := fetch_m3u8(client)):
|
||||
if not (data := await fetch_m3u8(client)):
|
||||
return
|
||||
|
||||
for i in range(len(data) - 1):
|
||||
if data[i].startswith("#EXTINF"):
|
||||
tvg_id_match = re.search(r'tvg-id="([^"]*)"', data[i])
|
||||
tvg_name_match = re.search(r'tvg-name="([^"]*)"', data[i])
|
||||
for i, line in enumerate(data[:-1]):
|
||||
if line.startswith("#EXTINF"):
|
||||
tvg_id_match = re.search(r'tvg-id="([^"]*)"', line)
|
||||
tvg_name_match = re.search(r'tvg-name="([^"]*)"', line)
|
||||
|
||||
tvg_id = tvg_id_match[1] if tvg_id_match else None
|
||||
tvg_name = tvg_name_match[1]
|
||||
tvg_name = tvg_name_match[1] if tvg_name_match else None
|
||||
|
||||
if tvg_id == "":
|
||||
url = data[i + 1]
|
||||
|
||||
tvg_name = tvg_name.split("(")[0].strip()
|
||||
if tvg_name:
|
||||
tvg_name = tvg_name.split("(")[0].strip()
|
||||
|
||||
if url.endswith("/sd"):
|
||||
|
||||
path_parts = urlparse(url).path.strip("/").split("/")
|
||||
|
||||
if len(path_parts) >= 2 and path_parts[-1] == "sd":
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue