Compare commits

..

No commits in common. "37108365883d1fa007d7c686e30fe3ee1cee1ef1" and "8be48834b19c0ef5c39642aee55647bee2f1772e" have entirely different histories.

13 changed files with 91783 additions and 90418 deletions

176971
EPG/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -18,14 +18,12 @@ API_URL = "https://api.cdn-live.tv/api/v1/events/sports"
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
now = Time.clean(Time.now())
events = []
if not (api_data := API_FILE.load(per_entry=False)):
log.info("Refreshing API cache")
api_data = {"timestamp": now.timestamp()}
api_data = {}
if r := await network.request(
API_URL,
@ -34,13 +32,13 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
):
api_data: dict = r.json()
api_data["timestamp"] = now.timestamp()
API_FILE.write(api_data)
if not (data := api_data.get("cdn-live-tv")):
return events
now = Time.clean(Time.now())
start_dt = now.delta(minutes=-30)
end_dt = now.delta(minutes=30)

View file

@ -27,7 +27,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
if not (api_data := API_CACHE.load(per_entry=False)):
log.info("Refreshing API cache")
api_data = {"timestamp": now.timestamp()}
api_data = {}
if r := await network.request(BASE_URL, log=log):
api_data: dict = r.json()
@ -38,13 +38,10 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
events = []
start_dt = now.delta(minutes=-30)
end_dt = now.delta(minutes=30)
for info in api_data.get("days", []):
event_dt = Time.from_str(info["day_et"], timezone="ET")
if not start_dt <= event_dt <= end_dt:
if now.date() != event_dt.date():
continue
for event in info["items"]:

View file

@ -28,19 +28,19 @@ BASE_MIRRORS = [
async def get_events(api_url: str, cached_keys: list[str]) -> list[dict[str, str]]:
now = Time.clean(Time.now())
events = []
if not (api_data := API_FILE.load(per_entry=False)):
log.info("Refreshing API cache")
api_data = {"timestamp": now.timestamp()}
api_data = {}
if r := await network.request(api_url, log=log):
api_data: dict = r.json()
API_FILE.write(api_data)
events = []
now = Time.clean(Time.now())
start_dt = now.delta(minutes=-30)
end_dt = now.delta(minutes=30)

View file

@ -14,7 +14,7 @@ CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
BASE_URL = "https://backend.streamcenter.live/api/Parties"
BASE_URL = "https://backendstreamcenter.youshop.pro:488/api/Parties"
CATEGORIES = {
4: "Basketball",
@ -37,7 +37,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
log.info("Refreshing API cache")
api_data = [{"timestamp": now.timestamp()}]
api_data = []
if r := await network.request(
BASE_URL,

View file

@ -59,7 +59,7 @@ async def refresh_api_cache(now_ts: float) -> list[dict[str, Any]]:
results = await asyncio.gather(*tasks)
if not (data := [*chain.from_iterable(r.json() for r in results if r)]):
return [{"timestamp": now_ts}]
return []
for ev in data:
ev["ts"] = ev.pop("timestamp")

View file

@ -39,7 +39,7 @@ async def get_events(url: str, cached_keys: list[str]) -> list[dict[str, str]]:
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
log.info("Refreshing API cache")
api_data = [{"timestamp": now.timestamp()}]
api_data = []
if r := await network.request(
urljoin(url, "api/matches/all-today"),

View file

@ -82,10 +82,13 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
if not (time_node := node.css_first(".col-3")):
continue
if time_node.text(strip=True) != "MatchStarted":
if not time_node.text(strip=True) == "MatchStarted":
continue
if not (href := node.attributes.get("href")) or href.startswith("http"):
if not (href := node.attributes.get("href")):
continue
if href.startswith("http"):
continue
sport = fix_league(sport)

View file

@ -60,7 +60,7 @@ async def refresh_api_cache(now: Time) -> list[dict[str, Any]]:
results = await asyncio.gather(*tasks)
if not (data := [*chain.from_iterable(r.json() for r in results if r)]):
return [{"timestamp": now.timestamp()}]
return []
for ev in data:
ev["ts"] = ev.pop("timestamp")

View file

@ -1,3 +1,4 @@
import asyncio
from functools import partial
from playwright.async_api import async_playwright
@ -15,23 +16,27 @@ CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=86_400)
BASE_URL = "https://slapstreams.com"
BASE_URLS = {"NFL": "https://nflwebcast.com", "NHL": "https://slapstreams.com"}
def fix_event(s: str) -> str:
return " vs ".join(s.split("@"))
async def refresh_html_cache() -> dict[str, dict[str, str | float]]:
async def refresh_html_cache(url: str) -> dict[str, dict[str, str | float]]:
events = {}
if not (html_data := await network.request(BASE_URL, log=log)):
if not (html_data := await network.request(url, log=log)):
return events
now = Time.clean(Time.now())
soup = HTMLParser(html_data.content)
title = soup.css_first("title").text(strip=True)
sport = "NFL" if "NFL" in title else "NHL"
date_text = now.strftime("%B %d, %Y")
if date_row := soup.css_first("tr.mdatetitle"):
@ -61,10 +66,10 @@ async def refresh_html_cache() -> dict[str, dict[str, str | float]]:
event = fix_event(event_name)
key = f"[NHL] {event} ({TAG})"
key = f"[{sport}] {event} ({TAG})"
events[key] = {
"sport": "NHL",
"sport": sport,
"event": event,
"link": href,
"event_ts": event_dt.timestamp(),
@ -80,7 +85,11 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
if not (events := HTML_CACHE.load()):
log.info("Refreshing HTML cache")
events = await refresh_html_cache()
tasks = [refresh_html_cache(url) for url in BASE_URLS.values()]
results = await asyncio.gather(*tasks)
events = {k: v for data in results for k, v in data.items()}
HTML_CACHE.write(events)
@ -110,7 +119,7 @@ async def scrape() -> None:
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"')
log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"')
events = await get_events(cached_urls.keys())
@ -152,7 +161,7 @@ async def scrape() -> None:
entry = {
"url": url,
"logo": logo,
"base": BASE_URL,
"base": BASE_URLS[sport],
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
"link": link,

View file

@ -1,7 +1,10 @@
## Base Log @ 2025-12-29 20:43 UTC
## Base Log @ 2025-12-28 20:42 UTC
### ✅ Working Streams: 146<br>❌ Dead Streams: 0
### ✅ Working Streams: 145<br>❌ Dead Streams: 1
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| FX | HTTP Error (403) | `http://cord-cutter.net:8080/k4Svp2/645504/46690` |
---
#### Base Channels URL
```