e
This commit is contained in:
parent
87aeb434ff
commit
00000d98d2
7 changed files with 130 additions and 84 deletions
|
|
@ -13,17 +13,20 @@ base = "http://m3u4u.com/m3u/d5k2nvp8w2t3w2k1n984"
|
|||
|
||||
current_hour = datetime.now(pytz.timezone("America/New_York")).hour
|
||||
|
||||
client = httpx.Client(
|
||||
timeout=5,
|
||||
follow_redirects=True,
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def vanilla_fetch() -> tuple[list[str], int]:
|
||||
print("Fetching base M3U8")
|
||||
|
||||
try:
|
||||
r = httpx.get(
|
||||
base,
|
||||
follow_redirects=True,
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
r = client.get(base)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
raise SystemExit(f'Failed to fetch "{base}"\n{e}') from e
|
||||
|
|
@ -39,38 +42,38 @@ def vanilla_fetch() -> tuple[list[str], int]:
|
|||
|
||||
def main() -> None:
|
||||
if current_hour <= 11:
|
||||
tvpass.main()
|
||||
tvpass.main(client)
|
||||
else:
|
||||
try:
|
||||
tvpass.urls = json.loads(tvpass.base_file.read_text(encoding="utf-8"))
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
pass
|
||||
|
||||
fstv.main()
|
||||
fstv.main(client)
|
||||
|
||||
base_m3u8, chnl_number = vanilla_fetch()
|
||||
|
||||
additions = {**tvpass.urls, **fstv.urls}
|
||||
|
||||
lines = []
|
||||
|
||||
for event, url in additions.items():
|
||||
chnl_number += 1
|
||||
lines.append(
|
||||
f'#EXTINF:-1 tvg-chno="{chnl_number}"'
|
||||
f' tvg-id="(N/A)" tvg-name="{event}"'
|
||||
' tvg-logo="https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png"'
|
||||
f' group-title="Live Events",{event}\n{url}\n'
|
||||
)
|
||||
|
||||
m3u8_file.write_text(
|
||||
'#EXTM3U url-tvg="https://raw.githubusercontent.com/doms9/iptv/refs/heads/default/EPG/TV.xml"\n'
|
||||
+ "\n".join(base_m3u8)
|
||||
+ "\n".join(lines)
|
||||
+ "\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
additions = tvpass.urls | fstv.urls
|
||||
|
||||
for event, url in additions.items():
|
||||
chnl_number += 1
|
||||
|
||||
with m3u8_file.open("a", encoding="utf-8") as f:
|
||||
f.write(f'#EXTINF:-1 tvg-chno="{chnl_number}"')
|
||||
f.write(f' tvg-id="(N/A)" tvg-name="{event}"')
|
||||
f.write(
|
||||
' tvg-logo="https://i.gyazo.com/ec27417a9644ae517196494afa72d2b9.png"'
|
||||
)
|
||||
f.write(' group-title="Live Events"')
|
||||
f.write(f",{event}\n{url}\n")
|
||||
|
||||
print(f"M3U8 saved to {m3u8_file.name}")
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -8,9 +8,9 @@ urls: dict[str, str] = {}
|
|||
mirrors = {"https://fstv.online", "https://fstv.space", "https://fstv.zip"}
|
||||
|
||||
|
||||
def check_status(l: str) -> bool:
|
||||
def check_status(client: httpx.Client, url: str) -> bool:
|
||||
try:
|
||||
r = httpx.get(l)
|
||||
r = client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception:
|
||||
return False
|
||||
|
|
@ -18,25 +18,21 @@ def check_status(l: str) -> bool:
|
|||
return r.status_code == 200
|
||||
|
||||
|
||||
def get_base() -> str:
|
||||
for url in filter(check_status, mirrors):
|
||||
def get_base(client: httpx.Client) -> str:
|
||||
for url in filter(lambda x: check_status(client, x), mirrors):
|
||||
return url
|
||||
|
||||
|
||||
def get_hrefs(base_url: str) -> list[tuple[str, str]] | tuple[None, None]:
|
||||
def get_hrefs(client: httpx.Client, base_url: str) -> list[tuple[str, str]]:
|
||||
print(f'Scraping from "{base_url}"')
|
||||
|
||||
try:
|
||||
r = httpx.get(
|
||||
base_url,
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
r = client.get(base_url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
print(f'Failed to fetch "{base_url}"\n{e}')
|
||||
|
||||
return None, None
|
||||
return []
|
||||
|
||||
soup = BeautifulSoup(r.text, "lxml")
|
||||
|
||||
|
|
@ -62,18 +58,14 @@ def get_hrefs(base_url: str) -> list[tuple[str, str]] | tuple[None, None]:
|
|||
return events.items()
|
||||
|
||||
|
||||
def fetch_m3u8(url: str) -> tuple[str, list[str]] | tuple[None, None]:
|
||||
def fetch_m3u8(client: httpx.Client, url: str) -> tuple[str, list[str]]:
|
||||
try:
|
||||
r = httpx.get(
|
||||
url,
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
r = client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
print(f'Failed to fetch "{url}"\n{e}')
|
||||
|
||||
return None, None
|
||||
return []
|
||||
|
||||
soup = BeautifulSoup(r.text, "lxml")
|
||||
|
||||
|
|
@ -91,22 +83,22 @@ def fetch_m3u8(url: str) -> tuple[str, list[str]] | tuple[None, None]:
|
|||
return match_name, [btn["data-link"] for btn in btns if btn.has_attr("data-link")]
|
||||
|
||||
|
||||
def main() -> None:
|
||||
for event, href in get_hrefs(get_base()):
|
||||
def main(client: httpx.Client) -> None:
|
||||
base_url = get_base(client)
|
||||
|
||||
for event, href in get_hrefs(client, base_url):
|
||||
|
||||
if not href:
|
||||
return
|
||||
continue
|
||||
|
||||
match_name, m3u8_urls = fetch_m3u8(href)
|
||||
match_name, m3u8_urls = fetch_m3u8(client, href)
|
||||
|
||||
if not m3u8_urls:
|
||||
return
|
||||
continue
|
||||
|
||||
for i, link in enumerate(m3u8_urls, start=1):
|
||||
key = (
|
||||
f"[{event}] (S{i})"
|
||||
if not match_name
|
||||
else f"[{event}] {match_name} (S{i})"
|
||||
f"[{event}] {match_name} (S{i})" if match_name else f"[{event}] (S{i})"
|
||||
)
|
||||
|
||||
urls[key] = link
|
||||
|
|
@ -114,5 +106,6 @@ def main() -> None:
|
|||
print(f"Collected {len(urls)} live events")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
# if __name__ == "__main__":
|
||||
# # create client beforehand
|
||||
# main()
|
||||
|
|
|
|||
|
|
@ -11,28 +11,20 @@ base_file = Path(__file__).parent / "tvpass.json"
|
|||
urls: dict[str, str] = {}
|
||||
|
||||
|
||||
def fetch_m3u8() -> list[str] | None:
|
||||
def fetch_m3u8(client: httpx.Client) -> list[str] | None:
|
||||
try:
|
||||
r = httpx.get(
|
||||
base_url,
|
||||
follow_redirects=True,
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
r = client.get(base_url)
|
||||
r.raise_for_status()
|
||||
|
||||
except Exception as e:
|
||||
print(f'Failed to fetch "{base_url}"\n{e}')
|
||||
|
||||
return
|
||||
|
||||
return r.text.splitlines()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
def main(client: httpx.Client) -> None:
|
||||
print(f'Scraping from "{base_url}"')
|
||||
|
||||
if not (data := fetch_m3u8()):
|
||||
if not (data := fetch_m3u8(client)):
|
||||
return
|
||||
|
||||
for i in range(len(data) - 1):
|
||||
|
|
@ -65,5 +57,6 @@ def main() -> None:
|
|||
base_file.write_text(json.dumps(urls, indent=2), encoding="utf-8")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
# if __name__ == "__main__":
|
||||
# # create client beforehand
|
||||
# main()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue