mirror of
https://github.com/doms9/iptv.git
synced 2026-01-21 03:59:03 +01:00
Compare commits
36 commits
8be48834b1
...
3710836588
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3710836588 | ||
|
|
a6e59b0d06 | ||
|
|
6d70587e60 | ||
|
|
39cfc0ab03 | ||
|
|
bf2b985799 | ||
|
|
8c80abd80e | ||
|
|
21792b96bc | ||
|
|
a72c3bb73f | ||
|
|
f555f21255 | ||
|
|
e135816817 | ||
|
|
69dd938fa1 | ||
|
|
78dbbd03ad | ||
|
|
81bf895911 | ||
|
|
17acdf2fc5 | ||
|
|
6dc96217d3 | ||
|
|
0287630b5f | ||
|
|
00000d903e | ||
|
|
51598ce2a2 | ||
|
|
f6c01461eb | ||
|
|
3e1cfb9d19 | ||
|
|
b0f95eb96f | ||
|
|
00000d9a11 | ||
|
|
00000d9e80 | ||
|
|
168fb9171f | ||
|
|
d1dcde8a0e | ||
|
|
8023d7a0a6 | ||
|
|
9163284929 | ||
|
|
e6e27f1541 | ||
|
|
0c8971a507 | ||
|
|
a58cfbcf38 | ||
|
|
8a82ead052 | ||
|
|
dbbf58c2b2 | ||
|
|
654013f31f | ||
|
|
0077357b57 | ||
|
|
a024aa16b6 | ||
|
|
faa459e94b |
13 changed files with 90823 additions and 92188 deletions
177941
EPG/TV.xml
177941
EPG/TV.xml
File diff suppressed because one or more lines are too long
2500
M3U8/TV.m3u8
2500
M3U8/TV.m3u8
File diff suppressed because it is too large
Load diff
2500
M3U8/events.m3u8
2500
M3U8/events.m3u8
File diff suppressed because it is too large
Load diff
|
|
@ -18,12 +18,14 @@ API_URL = "https://api.cdn-live.tv/api/v1/events/sports"
|
|||
|
||||
|
||||
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
events = []
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False)):
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
api_data = {}
|
||||
api_data = {"timestamp": now.timestamp()}
|
||||
|
||||
if r := await network.request(
|
||||
API_URL,
|
||||
|
|
@ -32,13 +34,13 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
):
|
||||
api_data: dict = r.json()
|
||||
|
||||
api_data["timestamp"] = now.timestamp()
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
if not (data := api_data.get("cdn-live-tv")):
|
||||
return events
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
if not (api_data := API_CACHE.load(per_entry=False)):
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
api_data = {}
|
||||
api_data = {"timestamp": now.timestamp()}
|
||||
|
||||
if r := await network.request(BASE_URL, log=log):
|
||||
api_data: dict = r.json()
|
||||
|
|
@ -38,10 +38,13 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
|
||||
events = []
|
||||
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for info in api_data.get("days", []):
|
||||
event_dt = Time.from_str(info["day_et"], timezone="ET")
|
||||
|
||||
if now.date() != event_dt.date():
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
for event in info["items"]:
|
||||
|
|
|
|||
|
|
@ -28,19 +28,19 @@ BASE_MIRRORS = [
|
|||
|
||||
|
||||
async def get_events(api_url: str, cached_keys: list[str]) -> list[dict[str, str]]:
|
||||
events = []
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False)):
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
api_data = {}
|
||||
api_data = {"timestamp": now.timestamp()}
|
||||
|
||||
if r := await network.request(api_url, log=log):
|
||||
api_data: dict = r.json()
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
events = []
|
||||
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
|
|||
|
||||
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
|
||||
|
||||
BASE_URL = "https://backendstreamcenter.youshop.pro:488/api/Parties"
|
||||
BASE_URL = "https://backend.streamcenter.live/api/Parties"
|
||||
|
||||
CATEGORIES = {
|
||||
4: "Basketball",
|
||||
|
|
@ -37,7 +37,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
api_data = []
|
||||
api_data = [{"timestamp": now.timestamp()}]
|
||||
|
||||
if r := await network.request(
|
||||
BASE_URL,
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ async def refresh_api_cache(now_ts: float) -> list[dict[str, Any]]:
|
|||
results = await asyncio.gather(*tasks)
|
||||
|
||||
if not (data := [*chain.from_iterable(r.json() for r in results if r)]):
|
||||
return []
|
||||
return [{"timestamp": now_ts}]
|
||||
|
||||
for ev in data:
|
||||
ev["ts"] = ev.pop("timestamp")
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ async def get_events(url: str, cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
api_data = []
|
||||
api_data = [{"timestamp": now.timestamp()}]
|
||||
|
||||
if r := await network.request(
|
||||
urljoin(url, "api/matches/all-today"),
|
||||
|
|
|
|||
|
|
@ -82,13 +82,10 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
if not (time_node := node.css_first(".col-3")):
|
||||
continue
|
||||
|
||||
if not time_node.text(strip=True) == "MatchStarted":
|
||||
if time_node.text(strip=True) != "MatchStarted":
|
||||
continue
|
||||
|
||||
if not (href := node.attributes.get("href")):
|
||||
continue
|
||||
|
||||
if href.startswith("http"):
|
||||
if not (href := node.attributes.get("href")) or href.startswith("http"):
|
||||
continue
|
||||
|
||||
sport = fix_league(sport)
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ async def refresh_api_cache(now: Time) -> list[dict[str, Any]]:
|
|||
results = await asyncio.gather(*tasks)
|
||||
|
||||
if not (data := [*chain.from_iterable(r.json() for r in results if r)]):
|
||||
return []
|
||||
return [{"timestamp": now.timestamp()}]
|
||||
|
||||
for ev in data:
|
||||
ev["ts"] = ev.pop("timestamp")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import asyncio
|
||||
from functools import partial
|
||||
|
||||
from playwright.async_api import async_playwright
|
||||
|
|
@ -16,27 +15,23 @@ CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
|
|||
|
||||
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=86_400)
|
||||
|
||||
BASE_URLS = {"NFL": "https://nflwebcast.com", "NHL": "https://slapstreams.com"}
|
||||
BASE_URL = "https://slapstreams.com"
|
||||
|
||||
|
||||
def fix_event(s: str) -> str:
|
||||
return " vs ".join(s.split("@"))
|
||||
|
||||
|
||||
async def refresh_html_cache(url: str) -> dict[str, dict[str, str | float]]:
|
||||
async def refresh_html_cache() -> dict[str, dict[str, str | float]]:
|
||||
events = {}
|
||||
|
||||
if not (html_data := await network.request(url, log=log)):
|
||||
if not (html_data := await network.request(BASE_URL, log=log)):
|
||||
return events
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
soup = HTMLParser(html_data.content)
|
||||
|
||||
title = soup.css_first("title").text(strip=True)
|
||||
|
||||
sport = "NFL" if "NFL" in title else "NHL"
|
||||
|
||||
date_text = now.strftime("%B %d, %Y")
|
||||
|
||||
if date_row := soup.css_first("tr.mdatetitle"):
|
||||
|
|
@ -66,10 +61,10 @@ async def refresh_html_cache(url: str) -> dict[str, dict[str, str | float]]:
|
|||
|
||||
event = fix_event(event_name)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
key = f"[NHL] {event} ({TAG})"
|
||||
|
||||
events[key] = {
|
||||
"sport": sport,
|
||||
"sport": "NHL",
|
||||
"event": event,
|
||||
"link": href,
|
||||
"event_ts": event_dt.timestamp(),
|
||||
|
|
@ -85,11 +80,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
if not (events := HTML_CACHE.load()):
|
||||
log.info("Refreshing HTML cache")
|
||||
|
||||
tasks = [refresh_html_cache(url) for url in BASE_URLS.values()]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
events = {k: v for data in results for k, v in data.items()}
|
||||
events = await refresh_html_cache()
|
||||
|
||||
HTML_CACHE.write(events)
|
||||
|
||||
|
|
@ -119,7 +110,7 @@ async def scrape() -> None:
|
|||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"')
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
|
||||
|
|
@ -161,7 +152,7 @@ async def scrape() -> None:
|
|||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URLS[sport],
|
||||
"base": BASE_URL,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
## Base Log @ 2025-12-28 20:42 UTC
|
||||
## Base Log @ 2025-12-29 20:43 UTC
|
||||
|
||||
### ✅ Working Streams: 145<br>❌ Dead Streams: 1
|
||||
### ✅ Working Streams: 146<br>❌ Dead Streams: 0
|
||||
|
||||
| Channel | Error (Code) | Link |
|
||||
| ------- | ------------ | ---- |
|
||||
| FX | HTTP Error (403) | `http://cord-cutter.net:8080/k4Svp2/645504/46690` |
|
||||
---
|
||||
#### Base Channels URL
|
||||
```
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue