Compare commits

..

37 commits

Author SHA1 Message Date
GitHub Actions Bot
18d8258f33 health log 2025-12-14 20:40:47 +00:00
GitHub Actions Bot
03f309ee57 update M3U8 2025-12-14 15:34:26 -05:00
GitHub Actions Bot
ec2d619b84 update M3U8 2025-12-14 15:01:42 -05:00
GitHub Actions Bot
fafbb76180 update M3U8 2025-12-14 14:31:01 -05:00
GitHub Actions Bot
f01e28a2bd update M3U8 2025-12-14 14:02:56 -05:00
GitHub Actions Bot
ceef91f022 update EPG 2025-12-14 18:53:30 +00:00
GitHub Actions Bot
9850df9e92 update M3U8 2025-12-14 13:32:29 -05:00
GitHub Actions Bot
cbe090ff6e update M3U8 2025-12-14 13:07:44 -05:00
GitHub Actions Bot
add675e3d5 update M3U8 2025-12-14 12:02:51 -05:00
GitHub Actions Bot
4d4f8a6361 update M3U8 2025-12-14 11:10:24 -05:00
GitHub Actions Bot
0bb2617dd7 update M3U8 2025-12-14 10:01:52 -05:00
GitHub Actions Bot
854f048d59 health log 2025-12-14 14:39:34 +00:00
GitHub Actions Bot
5164793956 update M3U8 2025-12-14 09:08:06 -05:00
GitHub Actions Bot
f0fa0e6843 update M3U8 2025-12-14 08:08:17 -05:00
GitHub Actions Bot
8c0184c713 update EPG 2025-12-14 10:50:18 +00:00
GitHub Actions Bot
fcdc64536a health log 2025-12-14 08:44:45 +00:00
GitHub Actions Bot
7ff1ee068b update M3U8 2025-12-13 23:31:27 -05:00
GitHub Actions Bot
fe105f7b8b update M3U8 2025-12-13 23:00:48 -05:00
GitHub Actions Bot
37a1fa9700 update EPG 2025-12-14 03:50:48 +00:00
GitHub Actions Bot
a290a83e2d health log 2025-12-14 03:47:13 +00:00
GitHub Actions Bot
b6ccc72226 update M3U8 2025-12-13 22:31:12 -05:00
GitHub Actions Bot
e42b525bbd update M3U8 2025-12-13 22:01:15 -05:00
GitHub Actions Bot
d5549b6a20 update M3U8 2025-12-13 21:30:54 -05:00
doms9
00000d9c19 e
fix scraping for streamhub
2025-12-13 21:29:13 -05:00
GitHub Actions Bot
15ea61dcb3 update M3U8 2025-12-13 21:01:53 -05:00
GitHub Actions Bot
89cecc39cd update M3U8 2025-12-13 20:31:22 -05:00
GitHub Actions Bot
70e5e3f881 update M3U8 2025-12-13 20:02:26 -05:00
GitHub Actions Bot
2377255045 update M3U8 2025-12-13 19:31:25 -05:00
GitHub Actions Bot
e1b16989d7 update M3U8 2025-12-13 19:04:01 -05:00
GitHub Actions Bot
f2340132f2 update M3U8 2025-12-13 18:31:23 -05:00
GitHub Actions Bot
38098143ef update M3U8 2025-12-13 18:01:15 -05:00
GitHub Actions Bot
00f4810a88 update M3U8 2025-12-13 17:31:11 -05:00
GitHub Actions Bot
5ac0e6a6cf update M3U8 2025-12-13 17:01:31 -05:00
doms9
00000d930e e
add streamhub
rename cache files
2025-12-13 16:57:14 -05:00
GitHub Actions Bot
4d72d60227 update M3U8 2025-12-13 16:31:53 -05:00
GitHub Actions Bot
689f0a749c update M3U8 2025-12-13 16:02:44 -05:00
GitHub Actions Bot
0d7c7013b7 health log 2025-12-13 20:39:37 +00:00
22 changed files with 96350 additions and 90863 deletions

179335
EPG/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -14,6 +14,7 @@ from scrapers import (
streambtw,
streamcenter,
streamfree,
streamhub,
streamsgate,
strmd,
timstreams,
@ -58,9 +59,10 @@ async def main() -> None:
asyncio.create_task(streambtw.scrape(network.client)),
asyncio.create_task(streamcenter.scrape(network.client)),
asyncio.create_task(streamfree.scrape(network.client)),
asyncio.create_task(streamhub.scrape(network.client)),
asyncio.create_task(streamsgate.scrape(network.client)),
asyncio.create_task(strmd.scrape(network.client)),
asyncio.create_task(timstreams.scrape(network.client)),
# asyncio.create_task(timstreams.scrape(network.client)),
asyncio.create_task(tvpass.scrape(network.client)),
asyncio.create_task(watchfooty.scrape(network.client)),
asyncio.create_task(webcast.scrape(network.client)),
@ -80,6 +82,7 @@ async def main() -> None:
| streamcenter.urls
| strmd.urls
| streamfree.urls
| streamhub.urls
| streamsgate.urls
| timstreams.urls
| tvpass.urls

View file

@ -11,12 +11,12 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("fawa.json", exp=10_800)
TAG = "FAWA"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
BASE_URL = "http://www.fawanews.sc/"
TAG = "FAWA"
async def process_event(
client: httpx.AsyncClient,

View file

@ -9,14 +9,14 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("lotus.json", exp=5_400)
TAG = "LOTUS"
API_CACHE = Cache("lotus-api.json", exp=28_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=5_400)
API_CACHE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
BASE_URL = "https://lotusgamehd.xyz/api-event.php"
TAG = "LOTUS"
def fix_league(s: str) -> str:
return " ".join(x.capitalize() for x in s.split()) if len(s) > 5 else s.upper()
@ -46,16 +46,14 @@ async def refresh_api_cache(
async def get_events(
client: httpx.AsyncClient,
url: str,
cached_keys: set[str],
client: httpx.AsyncClient, cached_keys: set[str]
) -> list[dict[str, str]]:
now = Time.now()
if not (api_data := API_CACHE.load(per_entry=False)):
api_data = await refresh_api_cache(
client,
url,
BASE_URL,
now.timestamp(),
)
@ -108,11 +106,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(
client,
BASE_URL,
set(cached_urls.keys()),
)
events = await get_events(client, set(cached_urls.keys()))
log.info(f"Processing {len(events)} new URL(s)")

View file

@ -9,14 +9,14 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("pixel.json", exp=19_800)
TAG = "PIXEL"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=19_800)
BASE_URL = "https://pixelsport.tv/backend/livetv/events"
TAG = "PIXL"
async def get_api_data(url: str) -> dict[str, list[dict, str, str]]:
async def get_api_data() -> dict[str, list[dict, str, str]]:
async with async_playwright() as p:
try:
browser, context = await network.browser(p)
@ -24,7 +24,7 @@ async def get_api_data(url: str) -> dict[str, list[dict, str, str]]:
page = await context.new_page()
await page.goto(
url,
BASE_URL,
wait_until="domcontentloaded",
timeout=10_000,
)
@ -32,7 +32,7 @@ async def get_api_data(url: str) -> dict[str, list[dict, str, str]]:
raw_json = await page.locator("pre").inner_text(timeout=5_000)
except Exception as e:
log.error(f'Failed to fetch "{url}": {e}')
log.error(f'Failed to fetch "{BASE_URL}": {e}')
return {}
@ -45,7 +45,7 @@ async def get_api_data(url: str) -> dict[str, list[dict, str, str]]:
async def get_events() -> dict[str, dict[str, str | float]]:
now = Time.clean(Time.now())
api_data = await get_api_data(BASE_URL)
api_data = await get_api_data()
events = {}

View file

@ -9,9 +9,11 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("ppv.json", exp=10_800)
TAG = "PPV"
API_FILE = Cache("ppv-api.json", exp=19_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=19_800)
API_MIRRORS = [
"https://old.ppv.to/api/streams",
@ -25,8 +27,6 @@ BASE_MIRRORS = [
"https://ppv.to",
]
TAG = "PPV"
async def refresh_api_cache(
client: httpx.AsyncClient,

View file

@ -12,9 +12,11 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("roxie.json", exp=10_800)
TAG = "ROXIE"
HTML_CACHE = Cache("roxie-html.json", exp=19_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=19_800)
BASE_URL = "https://roxiestreams.live"
@ -28,8 +30,6 @@ SPORT_ENDPOINTS = {
"soccer": "Soccer",
}
TAG = "ROXIE"
async def process_event(
client: httpx.AsyncClient,

View file

@ -10,14 +10,14 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("shark.json", exp=10_800)
TAG = "SHARK"
HTML_CACHE = Cache("shark-html.json", exp=19_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=19_800)
BASE_URL = "https://sharkstreams.net"
TAG = "SHARK"
async def process_event(
client: httpx.AsyncClient,
@ -45,18 +45,16 @@ async def process_event(
async def refresh_html_cache(
client: httpx.AsyncClient,
url: str,
now_ts: float,
client: httpx.AsyncClient, now_ts: float
) -> dict[str, dict[str, str | float]]:
log.info("Refreshing HTML cache")
try:
r = await client.get(url)
r = await client.get(BASE_URL)
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{url}": {e}')
log.error(f'Failed to fetch "{BASE_URL}": {e}')
return {}
@ -108,11 +106,7 @@ async def get_events(
now = Time.clean(Time.now())
if not (events := HTML_CACHE.load()):
events = await refresh_html_cache(
client,
BASE_URL,
now.timestamp(),
)
events = await refresh_html_cache(client, now.timestamp())
HTML_CACHE.write(events)

View file

@ -12,14 +12,14 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("sport9.json", exp=3_600)
TAG = "SPRT9"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=3_600)
BASE_URL = "https://sport9.ru"
TAG = "SPRT9"
async def get_html(
async def get_html_data(
client: httpx.AsyncClient,
url: str,
date: str,
@ -43,7 +43,7 @@ async def get_events(
now = Time.now()
tasks = [
get_html(client, BASE_URL, str(d.date()))
get_html_data(client, BASE_URL, str(d.date()))
for d in [
now.delta(days=-1),
now,

View file

@ -12,12 +12,12 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("streambtw.json", exp=3_600)
TAG = "STRMBTW"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=3_600)
BASE_URL = "https://streambtw.com"
TAG = "STRMBTW"
def fix_league(s: str) -> str:
pattern = re.compile(r"^\w*-\w*", re.IGNORECASE)

View file

@ -9,15 +9,16 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("streamcenter.json", exp=10_800)
TAG = "STRMCNTR"
API_FILE = Cache("streamcenter-api.json", exp=28_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
BASE_URL = "https://backendstreamcenter.youshop.pro:488/api/Parties"
TAG = "STRMCNTR"
categories = {
CATEGORIES = {
4: "Basketball",
9: "Football",
13: "Baseball",
@ -33,17 +34,15 @@ categories = {
async def refresh_api_cache(
client: httpx.AsyncClient,
url: str,
now_ts: float,
client: httpx.AsyncClient, now_ts: float
) -> list[dict[str, str | int]]:
log.info("Refreshing API cache")
try:
r = await client.get(url, params={"pageNumber": 1, "pageSize": 500})
r = await client.get(BASE_URL, params={"pageNumber": 1, "pageSize": 500})
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{url}": {e}')
log.error(f'Failed to fetch "{BASE_URL}": {e}')
return []
@ -62,11 +61,7 @@ async def get_events(
now = Time.clean(Time.now())
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
api_data = await refresh_api_cache(
client,
BASE_URL,
now.timestamp(),
)
api_data = await refresh_api_cache(client, now.timestamp())
API_FILE.write(api_data)
@ -92,7 +87,7 @@ async def get_events(
if not start_dt <= event_dt <= end_dt:
continue
if not (sport := categories.get(category_id)):
if not (sport := CATEGORIES.get(category_id)):
continue
key = f"[{sport}] {name} ({TAG})"

View file

@ -8,18 +8,17 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("streamfree.json", exp=19_800)
TAG = "STRMFREE"
BASE_URL = "https://streamfree.to"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=19_800)
TAG = "STRMFR"
BASE_URL = "https://streamfree.to/"
async def refresh_api_cache(
client: httpx.AsyncClient,
url: str,
) -> dict[str, dict[str, list]]:
async def refresh_api_cache(client: httpx.AsyncClient) -> dict[str, dict[str, list]]:
try:
url = urljoin(BASE_URL, "streams")
r = await client.get(url)
r.raise_for_status()
except Exception as e:
@ -31,7 +30,7 @@ async def refresh_api_cache(
async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | float]]:
api_data = await refresh_api_cache(client, urljoin(BASE_URL, "streams"))
api_data = await refresh_api_cache(client)
events = {}

168
M3U8/scrapers/streamhub.py Normal file
View file

@ -0,0 +1,168 @@
import asyncio
from functools import partial
import httpx
from playwright.async_api import async_playwright
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "STRMHUB"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
BASE_URL = "https://streamhub.pro/live-now"
CATEGORIES = {
"Soccer": "sport_68c02a4464a38",
"American Football": "sport_68c02a4465113",
# "Baseball": "sport_68c02a446582f",
"Basketball": "sport_68c02a4466011",
# "Cricket": "sport_68c02a44669f3",
"Hockey": "sport_68c02a4466f56",
"MMA": "sport_68c02a44674e9",
"Racing": "sport_68c02a4467a48",
# "Rugby": "sport_68c02a4467fc1",
# "Tennis": "sport_68c02a4468cf7",
# "Volleyball": "sport_68c02a4469422",
}
async def get_html_data(client: httpx.AsyncClient, sport: str) -> bytes:
try:
r = await client.get(BASE_URL, params={"sport_id": sport})
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{BASE_URL}": {e}')
return b""
return r.content
async def get_events(
client: httpx.AsyncClient, cached_keys: set[str]
) -> list[dict[str, str]]:
tasks = [get_html_data(client, sport) for sport in CATEGORIES.values()]
results = await asyncio.gather(*tasks)
soups = [HTMLParser(html) for html in results]
events = []
for soup in soups:
for section in soup.css(".events-section"):
if not (sport_node := section.css_first(".section-titlte")):
continue
sport = sport_node.text(strip=True)
logo = section.css_first(".league-icon img").attributes.get("src")
for event in section.css(".section-event"):
event_name = "Live Event"
if teams := event.css_first(".event-competitors"):
home, away = teams.text(strip=True).split("vs.")
event_name = f"{away} vs {home}"
if not (event_button := event.css_first("div.event-button a")) or not (
href := event_button.attributes.get("href")
):
continue
key = f"[{sport}] {event_name} ({TAG})"
if cached_keys & {key}:
continue
events.append(
{
"sport": sport,
"event": event_name,
"link": href,
"logo": logo,
}
)
return events
async def scrape(client: httpx.AsyncClient) -> None:
cached_urls = CACHE_FILE.load()
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(client, set(cached_urls.keys()))
log.info(f"Processing {len(events)} new URL(s)")
if events:
now = Time.now().timestamp()
async with async_playwright() as p:
browser, context = await network.browser(p)
for i, ev in enumerate(events, start=1):
handler = partial(
network.process_event,
url=ev["link"],
url_num=i,
context=context,
timeout=5,
log=log,
)
url = await network.safe_process(
handler,
url_num=i,
log=log,
)
sport, event, logo, link = (
ev["sport"],
ev["event"],
ev["logo"],
ev["link"],
)
key = f"[{sport}] {event} ({TAG})"
tvg_id, pic = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo or pic,
"base": "https://storytrench.net/",
"timestamp": now,
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
valid_count += 1
urls[key] = entry
await browser.close()
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)

View file

@ -13,9 +13,11 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("streamsgate.json", exp=10_800)
TAG = "STRMSG8"
API_FILE = Cache("streamsgate-api.json", exp=28_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
BASE_URL = "https://streamingon.org"
@ -31,8 +33,6 @@ SPORT_ENDPOINTS = [
"f1",
]
TAG = "STRMSG8"
def get_event(t1: str, t2: str) -> str:
match t1:
@ -94,8 +94,8 @@ async def get_events(
events = []
start_dt = now.delta(minutes=-30)
end_dt = now.delta(minutes=30)
start_dt = now.delta(hours=-1)
end_dt = now.delta(minutes=10)
for stream_group in api_data:
event_ts = stream_group.get("ts")

View file

@ -12,9 +12,11 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("strmd.json", exp=10_800)
TAG = "STRMD"
API_FILE = Cache("strmd-api.json", exp=28_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
MIRRORS = [
"https://streami.su",
@ -22,8 +24,6 @@ MIRRORS = [
"https://streamed.pk",
]
TAG = "STRMD"
def fix_sport(s: str) -> str:
if "-" in s:

View file

@ -10,8 +10,9 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("timstreams.json", exp=10_800)
TAG = "TIM"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_URL = "https://api.timstreams.site/main"
@ -21,9 +22,7 @@ BASE_MIRRORS = [
"https://timstreams.top",
]
TAG = "TIM"
sport_genres = {
SPORT_GENRES = {
1: "Soccer",
2: "Motorsport",
3: "MMA",
@ -44,14 +43,12 @@ sport_genres = {
}
async def refresh_api_cache(
client: httpx.AsyncClient, url: str
) -> list[dict[str, Any]]:
async def refresh_api_cache(client: httpx.AsyncClient) -> list[dict[str, Any]]:
try:
r = await client.get(url)
r = await client.get(API_URL)
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{url}": {e}')
log.error(f'Failed to fetch "{API_URL}": {e}')
return []
@ -59,12 +56,9 @@ async def refresh_api_cache(
async def get_events(
client: httpx.AsyncClient,
url: str,
cached_keys: set[str],
client: httpx.AsyncClient, cached_keys: set[str]
) -> list[dict[str, str]]:
api_data = await refresh_api_cache(client, url)
api_data = await refresh_api_cache(client)
now = Time.now().timestamp()
@ -84,7 +78,7 @@ async def get_events(
if (genre := ev["genre"]) in {16, 17}:
continue
sport = sport_genres.get(genre, "Live Event")
sport = SPORT_GENRES.get(genre, "Live Event")
streams: list[dict[str, str]] = ev["streams"]
@ -125,11 +119,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f'Scraping from "{base_url}"')
events = await get_events(
client,
API_URL,
set(cached_urls.keys()),
)
events = await get_events(client, set(cached_urls.keys()))
log.info(f"Processing {len(events)} new URL(s)")

View file

@ -8,12 +8,12 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("tvpass.json", exp=86_400)
TAG = "TVP"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=86_400)
BASE_URL = "https://tvpass.org/playlist/m3u"
TAG = "TVP"
async def get_data(client: httpx.AsyncClient) -> list[str]:
try:

View file

@ -14,9 +14,11 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("watchfty.json", exp=10_800)
TAG = "WFTY"
API_FILE = Cache("watchfty-api.json", exp=28_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
API_MIRRORS = ["https://api.watchfooty.top", "https://api.watchfooty.st"]
@ -39,8 +41,6 @@ SPORT_ENDPOINTS = [
# "volleyball",
]
TAG = "WFTY"
async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, Any]]:
try:
@ -55,8 +55,7 @@ async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, An
async def refresh_api_cache(
client: httpx.AsyncClient,
url: str,
client: httpx.AsyncClient, url: str
) -> list[dict[str, Any]]:
log.info("Refreshing API cache")

View file

@ -11,14 +11,14 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("webcast.json", exp=10_800)
TAG = "WEBCST"
HTML_CACHE = Cache("webcast-html.json", exp=86_400)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=86_400)
BASE_URLS = {"NFL": "https://nflwebcast.com", "NHL": "https://slapstreams.com"}
TAG = "WEBCST"
def fix_event(s: str) -> str:
return " vs ".join(s.split("@"))

View file

@ -1,94 +1,12 @@
## Base Log @ 2025-12-13 14:39 UTC
## Base Log @ 2025-12-14 20:40 UTC
### ✅ Working Streams: 61<br>❌ Dead Streams: 85
### ✅ Working Streams: 143<br>❌ Dead Streams: 3
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| ACC Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/9273` |
| AMC | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/18925` |
| Altitude Sports | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/79545` |
| BBC America | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20194` |
| BBC World News | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/139752` |
| Big Ten Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/9828` |
| Bloomberg TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/15158` |
| Boomerang | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/14741` |
| Bounce TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/48323` |
| CBS Sports Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10454` |
| CBS | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/120749` |
| CW | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/120893` |
| Cartoon Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46708` |
| Comedy Central | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/7466` |
| Comet TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/125831` |
| Court TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/21092` |
| Cozi TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/11868` |
| Discovery Channel | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46720` |
| Discovery Family Channel | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10538` |
| Disney XD | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/75621` |
| Disney | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/2206` |
| ESPN News | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/17707` |
| ESPN U | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10255` |
| ESPN | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/14197` |
| ESPN2 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/2210` |
| FDSN Detroit | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20936` |
| FDSN Florida | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46794` |
| FDSN Midwest | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/66795` |
| FDSN North | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/58827` |
| FDSN Ohio | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/17752` |
| FDSN Oklahoma | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20934` |
| FDSN SoCal | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/221151` |
| FDSN Southeast | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/81111` |
| FDSN Southwest | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/21843` |
| FDSN Sun | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/104917` |
| FDSN West | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20932` |
| FDSN Wisconsin | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/78599` |
| FX | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46690` |
| FXX | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46699` |
| Food Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/7323` |
| Fox Business | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/17639` |
| Fox News | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/1818` |
| Fox Sports 1 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/1846` |
| Fox Sports 2 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/1847` |
| Fox | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/121595` |
| Freeform TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/13370` |
| Game Show Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/120633` |
| HBO | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46713` |
| History Channel | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/15017` |
| ION TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/9297` |
| MSG | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/21090` |
| Marquee Sports Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/13379` |
| MotorTrend TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10399` |
| NBC Sports Bay Area | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/9900` |
| NBC Sports Boston | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20939` |
| NBC Sports California | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20940` |
| NBC Sports Philadelphia | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20943` |
| NESN | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/31637` |
| NFL Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/159117` |
| NFL RedZone | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/208830` |
| NewsNation | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/161450` |
| Nickelodeon | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/38` |
| Nicktoons | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/36` |
| Reelz Channel | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10526` |
| Root Sports | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/85232` |
| SEC Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/17608` |
| Space City Home Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/213668` |
| Spectrum SportsNet LA Dodgers | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/31636` |
| Spectrum SportsNet Lakers | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20946` |
| SportsNet New York | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20938` |
| SportsNet Pittsburgh | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/108178` |
| Sportsnet 360 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/57299` |
| Sportsnet East | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/57298` |
| Sportsnet One | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10247` |
| Sportsnet Ontario | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/11649` |
| Starz | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/9299` |
| Syfy | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46685` |
| TLC | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/12734` |
| TSN1 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/57292` |
| TSN2 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/47442` |
| The Weather Channel | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/18926` |
| USA East | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10252` |
| Vice TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46697` |
| Willow Cricket | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/41979` |
| getTV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/18366` |
| FDSN Florida | HTTP Error (403) | `http://cord-cutter.net:8080/k4Svp2/645504/46794` |
| Spectrum SportsNet LA Dodgers | HTTP Error (502) | `http://cord-cutter.net:8080/k4Svp2/645504/31636` |
| getTV | HTTP Error (403) | `http://cord-cutter.net:8080/k4Svp2/645504/18366` |
---
#### Base Channels URL
```