#!/usr/bin/env python3 import asyncio import gzip import json from pathlib import Path from xml.etree import ElementTree as ET import httpx tvg_ids_file = Path(__file__).parent / "TVG-IDs.json" epg_file = Path(__file__).parent / "TV.xml" epg_urls = [ "https://epgshare01.online/epgshare01/epg_ripper_CA1.xml.gz", "https://epgshare01.online/epgshare01/epg_ripper_DUMMY_CHANNELS.xml.gz", "https://epgshare01.online/epgshare01/epg_ripper_ES1.xml.gz", "https://epgshare01.online/epgshare01/epg_ripper_FANDUEL1.xml.gz", "https://epgshare01.online/epgshare01/epg_ripper_MY1.xml.gz", "https://epgshare01.online/epgshare01/epg_ripper_PLEX1.xml.gz", "https://epgshare01.online/epgshare01/epg_ripper_PT1.xml.gz", "https://epgshare01.online/epgshare01/epg_ripper_UK1.xml.gz", "https://epgshare01.online/epgshare01/epg_ripper_US1.xml.gz", "https://epgshare01.online/epgshare01/epg_ripper_US2.xml.gz", "https://epgshare01.online/epgshare01/epg_ripper_US_LOCALS2.xml.gz", "https://epgshare01.online/epgshare01/epg_ripper_US_SPORTS1.xml.gz", ] client = httpx.AsyncClient( timeout=5, follow_redirects=True, headers={ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0" }, ) live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png" dummies = { "Basketball.Dummy.us": live_img, "Live.Event.us": live_img, "MLB.Baseball.Dummy.us": None, "MLS.Soccer.Dummy.us": None, "NBA.Basketball.Dummy.us": None, "NFL.Dummy.us": None, "NHL.Hockey.Dummy.us": None, "PPV.EVENTS.Dummy.us": live_img, "Premier.League.Dummy.us": None, "Racing.Dummy.us": None, "Soccer.Dummy.us": live_img, "Sports.Dummy.us": live_img, "Tennis.Dummy.us": None, "UEFA.Champions.League.Dummy.us": None, "UFC.Fight.Pass.Dummy.us": live_img, "WNBA.dummy.us": None, } async def fetch_xml(url: str) -> ET.Element: try: r = await client.get(url) r.raise_for_status() except Exception as e: raise SystemExit(f'Failed to fetch "{url}"\n{e}') from e try: decompressed_data = gzip.decompress(r.content) return ET.fromstring(decompressed_data) except Exception as e: raise SystemExit(f'Failed to decompress and parse XML from "{url}"\n{e}') from e async def main() -> None: tvg_ids: dict[str, str] = json.loads(tvg_ids_file.read_text(encoding="utf-8")) tvg_ids |= dummies root = ET.Element("tv") tasks = [fetch_xml(url) for url in epg_urls] results = await asyncio.gather(*tasks) for epg_data in results: if epg_data is None: continue for channel in epg_data.findall("channel"): if (channel_id := channel.get("id")) in tvg_ids: for icon_tag in channel.findall("icon"): if logo := tvg_ids.get(channel_id): icon_tag.set("src", logo) if (url_tag := channel.find("url")) is not None: channel.remove(url_tag) root.append(channel) for program in epg_data.findall("programme"): tvg_id = program.get("channel") if tvg_id in tvg_ids: if (title_text := program.find("title").text) in [ "NHL Hockey", "Live: NFL Football", ] and (subtitle := program.find("sub-title")) is not None: program.find("title").text = f"{title_text} {subtitle.text}" root.append(program) tree = ET.ElementTree(root) tree.write(epg_file, encoding="utf-8", xml_declaration=True) print(f"EPG saved to {epg_file.name}") if __name__ == "__main__": asyncio.run(main())