iptv/EPG/fetch.py

195 lines
5.4 KiB
Python
Raw Normal View History

2025-08-17 10:05:09 -04:00
#!/usr/bin/env python3
2025-08-27 10:26:56 -04:00
import asyncio
2025-08-17 10:05:09 -04:00
import gzip
2025-09-30 12:29:54 -04:00
import re
2025-08-17 10:05:09 -04:00
from pathlib import Path
from xml.etree import ElementTree as ET
import httpx
epg_file = Path(__file__).parent / "TV.xml"
epg_urls = [
2025-09-24 23:52:27 -04:00
"https://epgshare01.online/epgshare01/epg_ripper_CA2.xml.gz",
2025-09-19 02:05:40 -04:00
"https://epgshare01.online/epgshare01/epg_ripper_DUMMY_CHANNELS.xml.gz",
2025-09-18 17:31:45 -04:00
"https://epgshare01.online/epgshare01/epg_ripper_FANDUEL1.xml.gz",
"https://epgshare01.online/epgshare01/epg_ripper_MY1.xml.gz",
"https://epgshare01.online/epgshare01/epg_ripper_PLEX1.xml.gz",
2025-11-01 11:53:37 -04:00
"https://epgshare01.online/epgshare01/epg_ripper_UK1.xml.gz",
2025-09-14 11:10:51 -04:00
"https://epgshare01.online/epgshare01/epg_ripper_US2.xml.gz",
2025-10-07 00:33:17 -04:00
"https://epgshare01.online/epgshare01/epg_ripper_US_LOCALS1.xml.gz",
2025-10-30 02:25:28 -04:00
"https://i.mjh.nz/Roku/all.xml.gz",
2025-08-17 10:05:09 -04:00
]
2025-08-27 10:26:56 -04:00
client = httpx.AsyncClient(
2025-08-17 17:01:52 -04:00
timeout=5,
follow_redirects=True,
2025-10-30 15:38:34 -04:00
http2=True,
2025-08-17 17:01:52 -04:00
headers={
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
},
)
2025-09-19 15:44:02 -04:00
live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png"
dummies = {
"Basketball.Dummy.us": live_img,
2025-09-19 17:35:18 -04:00
"Golf.Dummy.us": live_img,
2025-09-19 15:44:02 -04:00
"Live.Event.us": live_img,
"MLB.Baseball.Dummy.us": None,
"NBA.Basketball.Dummy.us": None,
"NFL.Dummy.us": None,
"NHL.Hockey.Dummy.us": None,
"PPV.EVENTS.Dummy.us": live_img,
2025-09-21 10:14:26 -04:00
"Racing.Dummy.us": live_img,
2025-09-19 15:44:02 -04:00
"Soccer.Dummy.us": live_img,
2025-09-21 10:14:26 -04:00
"Tennis.Dummy.us": live_img,
2025-09-19 15:44:02 -04:00
"WNBA.dummy.us": None,
}
2025-09-20 23:26:18 -04:00
replace_ids = {
"NCAA Sports": {"old": "Sports.Dummy.us", "new": "NCAA.Sports.Dummy.us"},
"UFC": {"old": "UFC.247.Dummy.us", "new": "UFC.Dummy.us"},
}
2025-08-17 10:05:09 -04:00
2025-09-30 12:29:54 -04:00
def get_tvg_ids() -> dict[str, str]:
base_m3u8 = (
(Path(__file__).parent.parent / "M3U8" / "base.m3u8")
.read_text(encoding="utf-8")
.splitlines()
)
tvg = {}
for line in base_m3u8:
if line.startswith("#EXTINF"):
tvg_id = re.search(r'tvg-id="([^"]*)"', line)[1]
tvg_logo = re.search(r'tvg-logo="([^"]*)"', line)[1]
tvg[tvg_id] = tvg_logo
return tvg
2025-09-24 01:00:30 -04:00
async def fetch_xml(url: str) -> ET.Element | None:
2025-08-17 10:05:09 -04:00
try:
2025-08-27 10:26:56 -04:00
r = await client.get(url)
2025-08-17 10:05:09 -04:00
r.raise_for_status()
except Exception as e:
2025-10-15 10:53:54 -04:00
print(f'Failed to fetch "{url}": {e}')
2025-09-24 01:00:30 -04:00
return
2025-08-17 10:05:09 -04:00
try:
decompressed_data = gzip.decompress(r.content)
return ET.fromstring(decompressed_data)
except Exception as e:
2025-10-15 10:53:54 -04:00
print(f'Failed to decompress and parse XML from "{url}": {e}')
2025-08-17 10:05:09 -04:00
2025-09-20 23:26:18 -04:00
def hijack_id(
old: str,
new: str,
text: str,
root: ET.Element,
) -> None:
og_channel = root.find(f"./channel[@id='{old}']")
if og_channel is not None:
new_channel = ET.Element(og_channel.tag, {**og_channel.attrib, "id": new})
display_name = og_channel.find("display-name")
if display_name is not None:
new_channel.append(ET.Element("display-name", display_name.attrib))
new_channel[-1].text = text
for child in og_channel:
if child.tag == "display-name":
continue
new_child = ET.Element(child.tag, child.attrib)
new_child.text = child.text
root.remove(og_channel)
root.append(new_channel)
for program in root.findall(f"./programme[@channel='{old}']"):
new_program = ET.Element(program.tag, {**program.attrib, "channel": new})
for child in program:
new_child = ET.Element(child.tag, child.attrib)
new_child.text = child.text
new_program.append(new_child)
for tag_name in ["title", "desc", "sub-title"]:
tag = new_program.find(tag_name)
if tag is not None:
tag.text = text
root.remove(program)
root.append(new_program)
2025-08-27 10:26:56 -04:00
async def main() -> None:
2025-09-30 12:29:54 -04:00
tvg_ids = get_tvg_ids()
2025-09-18 17:31:45 -04:00
2025-09-24 23:52:27 -04:00
tvg_ids |= dummies | {v["old"]: live_img for v in replace_ids.values()}
2025-08-17 10:05:09 -04:00
root = ET.Element("tv")
2025-08-27 10:26:56 -04:00
tasks = [fetch_xml(url) for url in epg_urls]
2025-09-20 23:47:18 -04:00
2025-08-27 10:26:56 -04:00
results = await asyncio.gather(*tasks)
for epg_data in results:
if epg_data is None:
continue
2025-08-17 10:05:09 -04:00
for channel in epg_data.findall("channel"):
if (channel_id := channel.get("id")) in tvg_ids:
for icon_tag in channel.findall("icon"):
2025-09-19 15:44:02 -04:00
if logo := tvg_ids.get(channel_id):
icon_tag.set("src", logo)
2025-08-17 10:05:09 -04:00
if (url_tag := channel.find("url")) is not None:
channel.remove(url_tag)
root.append(channel)
for program in epg_data.findall("programme"):
2025-09-25 17:09:20 -04:00
if program.get("channel") in tvg_ids:
2025-09-20 23:26:18 -04:00
title_text = program.find("title").text
subtitle = program.find("sub-title")
2025-08-17 10:05:09 -04:00
2025-09-20 23:26:18 -04:00
if (
title_text in ["NHL Hockey", "Live: NFL Football"]
and subtitle is not None
):
2025-08-17 10:05:09 -04:00
program.find("title").text = f"{title_text} {subtitle.text}"
root.append(program)
2025-09-20 23:26:18 -04:00
for k, v in replace_ids.items():
hijack_id(**v, text=k, root=root)
2025-08-17 10:05:09 -04:00
tree = ET.ElementTree(root)
tree.write(epg_file, encoding="utf-8", xml_declaration=True)
2025-10-15 10:53:54 -04:00
print(f"EPG saved to {epg_file.resolve()}")
2025-08-17 10:05:09 -04:00
if __name__ == "__main__":
2025-08-27 10:26:56 -04:00
asyncio.run(main())
2025-10-30 15:38:34 -04:00
try:
asyncio.run(client.aclose())
except Exception:
pass