Compare commits

..

36 commits

Author SHA1 Message Date
GitHub Actions Bot
3710836588 update M3U8 2025-12-29 18:01:31 -05:00
GitHub Actions Bot
a6e59b0d06 update M3U8 2025-12-29 17:30:49 -05:00
GitHub Actions Bot
6d70587e60 update M3U8 2025-12-29 17:00:59 -05:00
GitHub Actions Bot
39cfc0ab03 update M3U8 2025-12-29 16:31:16 -05:00
GitHub Actions Bot
bf2b985799 update M3U8 2025-12-29 16:01:20 -05:00
GitHub Actions Bot
8c80abd80e health log 2025-12-29 20:43:40 +00:00
GitHub Actions Bot
21792b96bc update M3U8 2025-12-29 15:31:09 -05:00
GitHub Actions Bot
a72c3bb73f update M3U8 2025-12-29 15:03:11 -05:00
GitHub Actions Bot
f555f21255 update M3U8 2025-12-29 14:31:58 -05:00
GitHub Actions Bot
e135816817 update M3U8 2025-12-29 14:01:16 -05:00
GitHub Actions Bot
69dd938fa1 update EPG 2025-12-29 18:56:39 +00:00
GitHub Actions Bot
78dbbd03ad update M3U8 2025-12-29 13:31:05 -05:00
GitHub Actions Bot
81bf895911 update M3U8 2025-12-29 13:02:23 -05:00
GitHub Actions Bot
17acdf2fc5 update M3U8 2025-12-29 12:01:29 -05:00
GitHub Actions Bot
6dc96217d3 update M3U8 2025-12-29 11:01:12 -05:00
GitHub Actions Bot
0287630b5f update M3U8 2025-12-29 10:01:03 -05:00
doms9
00000d903e e
fix scraper crashing if api url(s) do not work
misc. edits
2025-12-29 09:57:40 -05:00
GitHub Actions Bot
51598ce2a2 health log 2025-12-29 14:45:14 +00:00
GitHub Actions Bot
f6c01461eb update M3U8 2025-12-29 08:00:38 -05:00
GitHub Actions Bot
3e1cfb9d19 update EPG 2025-12-29 10:54:33 +00:00
GitHub Actions Bot
b0f95eb96f health log 2025-12-29 08:52:35 +00:00
doms9
00000d9a11 e
use window for embedhd
2025-12-29 02:03:05 -05:00
doms9
00000d9e80 e
remove nfl webcast (cloudflare protection added)
2025-12-29 01:54:07 -05:00
GitHub Actions Bot
168fb9171f update M3U8 2025-12-28 23:30:53 -05:00
GitHub Actions Bot
d1dcde8a0e update EPG 2025-12-29 04:03:52 +00:00
GitHub Actions Bot
8023d7a0a6 update M3U8 2025-12-28 23:00:32 -05:00
GitHub Actions Bot
9163284929 health log 2025-12-29 03:59:07 +00:00
GitHub Actions Bot
e6e27f1541 update M3U8 2025-12-28 22:31:10 -05:00
GitHub Actions Bot
0c8971a507 update M3U8 2025-12-28 22:00:22 -05:00
GitHub Actions Bot
a58cfbcf38 update M3U8 2025-12-28 21:30:55 -05:00
GitHub Actions Bot
8a82ead052 update M3U8 2025-12-28 21:01:19 -05:00
GitHub Actions Bot
dbbf58c2b2 update M3U8 2025-12-28 20:31:04 -05:00
GitHub Actions Bot
654013f31f update M3U8 2025-12-28 20:01:20 -05:00
GitHub Actions Bot
0077357b57 update M3U8 2025-12-28 19:31:23 -05:00
GitHub Actions Bot
a024aa16b6 update M3U8 2025-12-28 19:01:05 -05:00
GitHub Actions Bot
faa459e94b update M3U8 2025-12-28 18:31:11 -05:00
13 changed files with 90823 additions and 92188 deletions

177941
EPG/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -18,12 +18,14 @@ API_URL = "https://api.cdn-live.tv/api/v1/events/sports"
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
now = Time.clean(Time.now())
events = [] events = []
if not (api_data := API_FILE.load(per_entry=False)): if not (api_data := API_FILE.load(per_entry=False)):
log.info("Refreshing API cache") log.info("Refreshing API cache")
api_data = {} api_data = {"timestamp": now.timestamp()}
if r := await network.request( if r := await network.request(
API_URL, API_URL,
@ -32,13 +34,13 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
): ):
api_data: dict = r.json() api_data: dict = r.json()
api_data["timestamp"] = now.timestamp()
API_FILE.write(api_data) API_FILE.write(api_data)
if not (data := api_data.get("cdn-live-tv")): if not (data := api_data.get("cdn-live-tv")):
return events return events
now = Time.clean(Time.now())
start_dt = now.delta(minutes=-30) start_dt = now.delta(minutes=-30)
end_dt = now.delta(minutes=30) end_dt = now.delta(minutes=30)

View file

@ -27,7 +27,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
if not (api_data := API_CACHE.load(per_entry=False)): if not (api_data := API_CACHE.load(per_entry=False)):
log.info("Refreshing API cache") log.info("Refreshing API cache")
api_data = {} api_data = {"timestamp": now.timestamp()}
if r := await network.request(BASE_URL, log=log): if r := await network.request(BASE_URL, log=log):
api_data: dict = r.json() api_data: dict = r.json()
@ -38,10 +38,13 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
events = [] events = []
start_dt = now.delta(minutes=-30)
end_dt = now.delta(minutes=30)
for info in api_data.get("days", []): for info in api_data.get("days", []):
event_dt = Time.from_str(info["day_et"], timezone="ET") event_dt = Time.from_str(info["day_et"], timezone="ET")
if now.date() != event_dt.date(): if not start_dt <= event_dt <= end_dt:
continue continue
for event in info["items"]: for event in info["items"]:

View file

@ -28,19 +28,19 @@ BASE_MIRRORS = [
async def get_events(api_url: str, cached_keys: list[str]) -> list[dict[str, str]]: async def get_events(api_url: str, cached_keys: list[str]) -> list[dict[str, str]]:
events = [] now = Time.clean(Time.now())
if not (api_data := API_FILE.load(per_entry=False)): if not (api_data := API_FILE.load(per_entry=False)):
log.info("Refreshing API cache") log.info("Refreshing API cache")
api_data = {} api_data = {"timestamp": now.timestamp()}
if r := await network.request(api_url, log=log): if r := await network.request(api_url, log=log):
api_data: dict = r.json() api_data: dict = r.json()
API_FILE.write(api_data) API_FILE.write(api_data)
now = Time.clean(Time.now()) events = []
start_dt = now.delta(minutes=-30) start_dt = now.delta(minutes=-30)
end_dt = now.delta(minutes=30) end_dt = now.delta(minutes=30)

View file

@ -14,7 +14,7 @@ CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800) API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
BASE_URL = "https://backendstreamcenter.youshop.pro:488/api/Parties" BASE_URL = "https://backend.streamcenter.live/api/Parties"
CATEGORIES = { CATEGORIES = {
4: "Basketball", 4: "Basketball",
@ -37,7 +37,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
if not (api_data := API_FILE.load(per_entry=False, index=-1)): if not (api_data := API_FILE.load(per_entry=False, index=-1)):
log.info("Refreshing API cache") log.info("Refreshing API cache")
api_data = [] api_data = [{"timestamp": now.timestamp()}]
if r := await network.request( if r := await network.request(
BASE_URL, BASE_URL,

View file

@ -59,7 +59,7 @@ async def refresh_api_cache(now_ts: float) -> list[dict[str, Any]]:
results = await asyncio.gather(*tasks) results = await asyncio.gather(*tasks)
if not (data := [*chain.from_iterable(r.json() for r in results if r)]): if not (data := [*chain.from_iterable(r.json() for r in results if r)]):
return [] return [{"timestamp": now_ts}]
for ev in data: for ev in data:
ev["ts"] = ev.pop("timestamp") ev["ts"] = ev.pop("timestamp")

View file

@ -39,7 +39,7 @@ async def get_events(url: str, cached_keys: list[str]) -> list[dict[str, str]]:
if not (api_data := API_FILE.load(per_entry=False, index=-1)): if not (api_data := API_FILE.load(per_entry=False, index=-1)):
log.info("Refreshing API cache") log.info("Refreshing API cache")
api_data = [] api_data = [{"timestamp": now.timestamp()}]
if r := await network.request( if r := await network.request(
urljoin(url, "api/matches/all-today"), urljoin(url, "api/matches/all-today"),

View file

@ -82,13 +82,10 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
if not (time_node := node.css_first(".col-3")): if not (time_node := node.css_first(".col-3")):
continue continue
if not time_node.text(strip=True) == "MatchStarted": if time_node.text(strip=True) != "MatchStarted":
continue continue
if not (href := node.attributes.get("href")): if not (href := node.attributes.get("href")) or href.startswith("http"):
continue
if href.startswith("http"):
continue continue
sport = fix_league(sport) sport = fix_league(sport)

View file

@ -60,7 +60,7 @@ async def refresh_api_cache(now: Time) -> list[dict[str, Any]]:
results = await asyncio.gather(*tasks) results = await asyncio.gather(*tasks)
if not (data := [*chain.from_iterable(r.json() for r in results if r)]): if not (data := [*chain.from_iterable(r.json() for r in results if r)]):
return [] return [{"timestamp": now.timestamp()}]
for ev in data: for ev in data:
ev["ts"] = ev.pop("timestamp") ev["ts"] = ev.pop("timestamp")

View file

@ -1,4 +1,3 @@
import asyncio
from functools import partial from functools import partial
from playwright.async_api import async_playwright from playwright.async_api import async_playwright
@ -16,27 +15,23 @@ CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=86_400) HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=86_400)
BASE_URLS = {"NFL": "https://nflwebcast.com", "NHL": "https://slapstreams.com"} BASE_URL = "https://slapstreams.com"
def fix_event(s: str) -> str: def fix_event(s: str) -> str:
return " vs ".join(s.split("@")) return " vs ".join(s.split("@"))
async def refresh_html_cache(url: str) -> dict[str, dict[str, str | float]]: async def refresh_html_cache() -> dict[str, dict[str, str | float]]:
events = {} events = {}
if not (html_data := await network.request(url, log=log)): if not (html_data := await network.request(BASE_URL, log=log)):
return events return events
now = Time.clean(Time.now()) now = Time.clean(Time.now())
soup = HTMLParser(html_data.content) soup = HTMLParser(html_data.content)
title = soup.css_first("title").text(strip=True)
sport = "NFL" if "NFL" in title else "NHL"
date_text = now.strftime("%B %d, %Y") date_text = now.strftime("%B %d, %Y")
if date_row := soup.css_first("tr.mdatetitle"): if date_row := soup.css_first("tr.mdatetitle"):
@ -66,10 +61,10 @@ async def refresh_html_cache(url: str) -> dict[str, dict[str, str | float]]:
event = fix_event(event_name) event = fix_event(event_name)
key = f"[{sport}] {event} ({TAG})" key = f"[NHL] {event} ({TAG})"
events[key] = { events[key] = {
"sport": sport, "sport": "NHL",
"event": event, "event": event,
"link": href, "link": href,
"event_ts": event_dt.timestamp(), "event_ts": event_dt.timestamp(),
@ -85,11 +80,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
if not (events := HTML_CACHE.load()): if not (events := HTML_CACHE.load()):
log.info("Refreshing HTML cache") log.info("Refreshing HTML cache")
tasks = [refresh_html_cache(url) for url in BASE_URLS.values()] events = await refresh_html_cache()
results = await asyncio.gather(*tasks)
events = {k: v for data in results for k, v in data.items()}
HTML_CACHE.write(events) HTML_CACHE.write(events)
@ -119,7 +110,7 @@ async def scrape() -> None:
log.info(f"Loaded {cached_count} event(s) from cache") log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"') log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys()) events = await get_events(cached_urls.keys())
@ -161,7 +152,7 @@ async def scrape() -> None:
entry = { entry = {
"url": url, "url": url,
"logo": logo, "logo": logo,
"base": BASE_URLS[sport], "base": BASE_URL,
"timestamp": ts, "timestamp": ts,
"id": tvg_id or "Live.Event.us", "id": tvg_id or "Live.Event.us",
"link": link, "link": link,

View file

@ -1,10 +1,7 @@
## Base Log @ 2025-12-28 20:42 UTC ## Base Log @ 2025-12-29 20:43 UTC
### ✅ Working Streams: 145<br>❌ Dead Streams: 1 ### ✅ Working Streams: 146<br>❌ Dead Streams: 0
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| FX | HTTP Error (403) | `http://cord-cutter.net:8080/k4Svp2/645504/46690` |
--- ---
#### Base Channels URL #### Base Channels URL
``` ```