Compare commits

..

No commits in common. "18d8258f33e33b09fba92e355cc70c2e98f89976" and "ba37a27f1138c757e3722b1891ca2420ea393a49" have entirely different histories.

22 changed files with 90364 additions and 95851 deletions

178057
EPG/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -14,7 +14,6 @@ from scrapers import (
streambtw,
streamcenter,
streamfree,
streamhub,
streamsgate,
strmd,
timstreams,
@ -59,10 +58,9 @@ async def main() -> None:
asyncio.create_task(streambtw.scrape(network.client)),
asyncio.create_task(streamcenter.scrape(network.client)),
asyncio.create_task(streamfree.scrape(network.client)),
asyncio.create_task(streamhub.scrape(network.client)),
asyncio.create_task(streamsgate.scrape(network.client)),
asyncio.create_task(strmd.scrape(network.client)),
# asyncio.create_task(timstreams.scrape(network.client)),
asyncio.create_task(timstreams.scrape(network.client)),
asyncio.create_task(tvpass.scrape(network.client)),
asyncio.create_task(watchfooty.scrape(network.client)),
asyncio.create_task(webcast.scrape(network.client)),
@ -82,7 +80,6 @@ async def main() -> None:
| streamcenter.urls
| strmd.urls
| streamfree.urls
| streamhub.urls
| streamsgate.urls
| timstreams.urls
| tvpass.urls

View file

@ -11,12 +11,12 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "FAWA"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
CACHE_FILE = Cache("fawa.json", exp=10_800)
BASE_URL = "http://www.fawanews.sc/"
TAG = "FAWA"
async def process_event(
client: httpx.AsyncClient,

View file

@ -9,14 +9,14 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "LOTUS"
CACHE_FILE = Cache("lotus.json", exp=5_400)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=5_400)
API_CACHE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
API_CACHE = Cache("lotus-api.json", exp=28_800)
BASE_URL = "https://lotusgamehd.xyz/api-event.php"
TAG = "LOTUS"
def fix_league(s: str) -> str:
return " ".join(x.capitalize() for x in s.split()) if len(s) > 5 else s.upper()
@ -46,14 +46,16 @@ async def refresh_api_cache(
async def get_events(
client: httpx.AsyncClient, cached_keys: set[str]
client: httpx.AsyncClient,
url: str,
cached_keys: set[str],
) -> list[dict[str, str]]:
now = Time.now()
if not (api_data := API_CACHE.load(per_entry=False)):
api_data = await refresh_api_cache(
client,
BASE_URL,
url,
now.timestamp(),
)
@ -106,7 +108,11 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(client, set(cached_urls.keys()))
events = await get_events(
client,
BASE_URL,
set(cached_urls.keys()),
)
log.info(f"Processing {len(events)} new URL(s)")

View file

@ -9,14 +9,14 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "PIXEL"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=19_800)
CACHE_FILE = Cache("pixel.json", exp=19_800)
BASE_URL = "https://pixelsport.tv/backend/livetv/events"
TAG = "PIXL"
async def get_api_data() -> dict[str, list[dict, str, str]]:
async def get_api_data(url: str) -> dict[str, list[dict, str, str]]:
async with async_playwright() as p:
try:
browser, context = await network.browser(p)
@ -24,7 +24,7 @@ async def get_api_data() -> dict[str, list[dict, str, str]]:
page = await context.new_page()
await page.goto(
BASE_URL,
url,
wait_until="domcontentloaded",
timeout=10_000,
)
@ -32,7 +32,7 @@ async def get_api_data() -> dict[str, list[dict, str, str]]:
raw_json = await page.locator("pre").inner_text(timeout=5_000)
except Exception as e:
log.error(f'Failed to fetch "{BASE_URL}": {e}')
log.error(f'Failed to fetch "{url}": {e}')
return {}
@ -45,7 +45,7 @@ async def get_api_data() -> dict[str, list[dict, str, str]]:
async def get_events() -> dict[str, dict[str, str | float]]:
now = Time.clean(Time.now())
api_data = await get_api_data()
api_data = await get_api_data(BASE_URL)
events = {}

View file

@ -9,11 +9,9 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "PPV"
CACHE_FILE = Cache("ppv.json", exp=10_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=19_800)
API_FILE = Cache("ppv-api.json", exp=19_800)
API_MIRRORS = [
"https://old.ppv.to/api/streams",
@ -27,6 +25,8 @@ BASE_MIRRORS = [
"https://ppv.to",
]
TAG = "PPV"
async def refresh_api_cache(
client: httpx.AsyncClient,

View file

@ -12,11 +12,9 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "ROXIE"
CACHE_FILE = Cache("roxie.json", exp=10_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=19_800)
HTML_CACHE = Cache("roxie-html.json", exp=19_800)
BASE_URL = "https://roxiestreams.live"
@ -30,6 +28,8 @@ SPORT_ENDPOINTS = {
"soccer": "Soccer",
}
TAG = "ROXIE"
async def process_event(
client: httpx.AsyncClient,

View file

@ -10,14 +10,14 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "SHARK"
CACHE_FILE = Cache("shark.json", exp=10_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=19_800)
HTML_CACHE = Cache("shark-html.json", exp=19_800)
BASE_URL = "https://sharkstreams.net"
TAG = "SHARK"
async def process_event(
client: httpx.AsyncClient,
@ -45,16 +45,18 @@ async def process_event(
async def refresh_html_cache(
client: httpx.AsyncClient, now_ts: float
client: httpx.AsyncClient,
url: str,
now_ts: float,
) -> dict[str, dict[str, str | float]]:
log.info("Refreshing HTML cache")
try:
r = await client.get(BASE_URL)
r = await client.get(url)
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{BASE_URL}": {e}')
log.error(f'Failed to fetch "{url}": {e}')
return {}
@ -106,7 +108,11 @@ async def get_events(
now = Time.clean(Time.now())
if not (events := HTML_CACHE.load()):
events = await refresh_html_cache(client, now.timestamp())
events = await refresh_html_cache(
client,
BASE_URL,
now.timestamp(),
)
HTML_CACHE.write(events)

View file

@ -12,14 +12,14 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "SPRT9"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=3_600)
CACHE_FILE = Cache("sport9.json", exp=3_600)
BASE_URL = "https://sport9.ru"
TAG = "SPRT9"
async def get_html_data(
async def get_html(
client: httpx.AsyncClient,
url: str,
date: str,
@ -43,7 +43,7 @@ async def get_events(
now = Time.now()
tasks = [
get_html_data(client, BASE_URL, str(d.date()))
get_html(client, BASE_URL, str(d.date()))
for d in [
now.delta(days=-1),
now,

View file

@ -12,12 +12,12 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "STRMBTW"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=3_600)
CACHE_FILE = Cache("streambtw.json", exp=3_600)
BASE_URL = "https://streambtw.com"
TAG = "STRMBTW"
def fix_league(s: str) -> str:
pattern = re.compile(r"^\w*-\w*", re.IGNORECASE)

View file

@ -9,16 +9,15 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "STRMCNTR"
CACHE_FILE = Cache("streamcenter.json", exp=10_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
API_FILE = Cache("streamcenter-api.json", exp=28_800)
BASE_URL = "https://backendstreamcenter.youshop.pro:488/api/Parties"
TAG = "STRMCNTR"
CATEGORIES = {
categories = {
4: "Basketball",
9: "Football",
13: "Baseball",
@ -34,15 +33,17 @@ CATEGORIES = {
async def refresh_api_cache(
client: httpx.AsyncClient, now_ts: float
client: httpx.AsyncClient,
url: str,
now_ts: float,
) -> list[dict[str, str | int]]:
log.info("Refreshing API cache")
try:
r = await client.get(BASE_URL, params={"pageNumber": 1, "pageSize": 500})
r = await client.get(url, params={"pageNumber": 1, "pageSize": 500})
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{BASE_URL}": {e}')
log.error(f'Failed to fetch "{url}": {e}')
return []
@ -61,7 +62,11 @@ async def get_events(
now = Time.clean(Time.now())
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
api_data = await refresh_api_cache(client, now.timestamp())
api_data = await refresh_api_cache(
client,
BASE_URL,
now.timestamp(),
)
API_FILE.write(api_data)
@ -87,7 +92,7 @@ async def get_events(
if not start_dt <= event_dt <= end_dt:
continue
if not (sport := CATEGORIES.get(category_id)):
if not (sport := categories.get(category_id)):
continue
key = f"[{sport}] {name} ({TAG})"

View file

@ -8,17 +8,18 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "STRMFREE"
CACHE_FILE = Cache("streamfree.json", exp=19_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=19_800)
BASE_URL = "https://streamfree.to"
BASE_URL = "https://streamfree.to/"
TAG = "STRMFR"
async def refresh_api_cache(client: httpx.AsyncClient) -> dict[str, dict[str, list]]:
async def refresh_api_cache(
client: httpx.AsyncClient,
url: str,
) -> dict[str, dict[str, list]]:
try:
url = urljoin(BASE_URL, "streams")
r = await client.get(url)
r.raise_for_status()
except Exception as e:
@ -30,7 +31,7 @@ async def refresh_api_cache(client: httpx.AsyncClient) -> dict[str, dict[str, li
async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | float]]:
api_data = await refresh_api_cache(client)
api_data = await refresh_api_cache(client, urljoin(BASE_URL, "streams"))
events = {}

View file

@ -1,168 +0,0 @@
import asyncio
from functools import partial
import httpx
from playwright.async_api import async_playwright
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "STRMHUB"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
BASE_URL = "https://streamhub.pro/live-now"
CATEGORIES = {
"Soccer": "sport_68c02a4464a38",
"American Football": "sport_68c02a4465113",
# "Baseball": "sport_68c02a446582f",
"Basketball": "sport_68c02a4466011",
# "Cricket": "sport_68c02a44669f3",
"Hockey": "sport_68c02a4466f56",
"MMA": "sport_68c02a44674e9",
"Racing": "sport_68c02a4467a48",
# "Rugby": "sport_68c02a4467fc1",
# "Tennis": "sport_68c02a4468cf7",
# "Volleyball": "sport_68c02a4469422",
}
async def get_html_data(client: httpx.AsyncClient, sport: str) -> bytes:
try:
r = await client.get(BASE_URL, params={"sport_id": sport})
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{BASE_URL}": {e}')
return b""
return r.content
async def get_events(
client: httpx.AsyncClient, cached_keys: set[str]
) -> list[dict[str, str]]:
tasks = [get_html_data(client, sport) for sport in CATEGORIES.values()]
results = await asyncio.gather(*tasks)
soups = [HTMLParser(html) for html in results]
events = []
for soup in soups:
for section in soup.css(".events-section"):
if not (sport_node := section.css_first(".section-titlte")):
continue
sport = sport_node.text(strip=True)
logo = section.css_first(".league-icon img").attributes.get("src")
for event in section.css(".section-event"):
event_name = "Live Event"
if teams := event.css_first(".event-competitors"):
home, away = teams.text(strip=True).split("vs.")
event_name = f"{away} vs {home}"
if not (event_button := event.css_first("div.event-button a")) or not (
href := event_button.attributes.get("href")
):
continue
key = f"[{sport}] {event_name} ({TAG})"
if cached_keys & {key}:
continue
events.append(
{
"sport": sport,
"event": event_name,
"link": href,
"logo": logo,
}
)
return events
async def scrape(client: httpx.AsyncClient) -> None:
cached_urls = CACHE_FILE.load()
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(client, set(cached_urls.keys()))
log.info(f"Processing {len(events)} new URL(s)")
if events:
now = Time.now().timestamp()
async with async_playwright() as p:
browser, context = await network.browser(p)
for i, ev in enumerate(events, start=1):
handler = partial(
network.process_event,
url=ev["link"],
url_num=i,
context=context,
timeout=5,
log=log,
)
url = await network.safe_process(
handler,
url_num=i,
log=log,
)
sport, event, logo, link = (
ev["sport"],
ev["event"],
ev["logo"],
ev["link"],
)
key = f"[{sport}] {event} ({TAG})"
tvg_id, pic = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo or pic,
"base": "https://storytrench.net/",
"timestamp": now,
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
valid_count += 1
urls[key] = entry
await browser.close()
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)

View file

@ -13,11 +13,9 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "STRMSG8"
CACHE_FILE = Cache("streamsgate.json", exp=10_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
API_FILE = Cache("streamsgate-api.json", exp=28_800)
BASE_URL = "https://streamingon.org"
@ -33,6 +31,8 @@ SPORT_ENDPOINTS = [
"f1",
]
TAG = "STRMSG8"
def get_event(t1: str, t2: str) -> str:
match t1:
@ -94,8 +94,8 @@ async def get_events(
events = []
start_dt = now.delta(hours=-1)
end_dt = now.delta(minutes=10)
start_dt = now.delta(minutes=-30)
end_dt = now.delta(minutes=30)
for stream_group in api_data:
event_ts = stream_group.get("ts")

View file

@ -12,11 +12,9 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "STRMD"
CACHE_FILE = Cache("strmd.json", exp=10_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
API_FILE = Cache("strmd-api.json", exp=28_800)
MIRRORS = [
"https://streami.su",
@ -24,6 +22,8 @@ MIRRORS = [
"https://streamed.pk",
]
TAG = "STRMD"
def fix_sport(s: str) -> str:
if "-" in s:

View file

@ -10,9 +10,8 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "TIM"
CACHE_FILE = Cache("timstreams.json", exp=10_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_URL = "https://api.timstreams.site/main"
@ -22,7 +21,9 @@ BASE_MIRRORS = [
"https://timstreams.top",
]
SPORT_GENRES = {
TAG = "TIM"
sport_genres = {
1: "Soccer",
2: "Motorsport",
3: "MMA",
@ -43,12 +44,14 @@ SPORT_GENRES = {
}
async def refresh_api_cache(client: httpx.AsyncClient) -> list[dict[str, Any]]:
async def refresh_api_cache(
client: httpx.AsyncClient, url: str
) -> list[dict[str, Any]]:
try:
r = await client.get(API_URL)
r = await client.get(url)
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{API_URL}": {e}')
log.error(f'Failed to fetch "{url}": {e}')
return []
@ -56,9 +59,12 @@ async def refresh_api_cache(client: httpx.AsyncClient) -> list[dict[str, Any]]:
async def get_events(
client: httpx.AsyncClient, cached_keys: set[str]
client: httpx.AsyncClient,
url: str,
cached_keys: set[str],
) -> list[dict[str, str]]:
api_data = await refresh_api_cache(client)
api_data = await refresh_api_cache(client, url)
now = Time.now().timestamp()
@ -78,7 +84,7 @@ async def get_events(
if (genre := ev["genre"]) in {16, 17}:
continue
sport = SPORT_GENRES.get(genre, "Live Event")
sport = sport_genres.get(genre, "Live Event")
streams: list[dict[str, str]] = ev["streams"]
@ -119,7 +125,11 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f'Scraping from "{base_url}"')
events = await get_events(client, set(cached_urls.keys()))
events = await get_events(
client,
API_URL,
set(cached_urls.keys()),
)
log.info(f"Processing {len(events)} new URL(s)")

View file

@ -8,12 +8,12 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "TVP"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=86_400)
CACHE_FILE = Cache("tvpass.json", exp=86_400)
BASE_URL = "https://tvpass.org/playlist/m3u"
TAG = "TVP"
async def get_data(client: httpx.AsyncClient) -> list[str]:
try:

View file

@ -14,11 +14,9 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "WFTY"
CACHE_FILE = Cache("watchfty.json", exp=10_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
API_FILE = Cache("watchfty-api.json", exp=28_800)
API_MIRRORS = ["https://api.watchfooty.top", "https://api.watchfooty.st"]
@ -41,6 +39,8 @@ SPORT_ENDPOINTS = [
# "volleyball",
]
TAG = "WFTY"
async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, Any]]:
try:
@ -55,7 +55,8 @@ async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, An
async def refresh_api_cache(
client: httpx.AsyncClient, url: str
client: httpx.AsyncClient,
url: str,
) -> list[dict[str, Any]]:
log.info("Refreshing API cache")

View file

@ -11,14 +11,14 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "WEBCST"
CACHE_FILE = Cache("webcast.json", exp=10_800)
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
HTML_CACHE = Cache(f"{TAG.lower()}-html.json", exp=86_400)
HTML_CACHE = Cache("webcast-html.json", exp=86_400)
BASE_URLS = {"NFL": "https://nflwebcast.com", "NHL": "https://slapstreams.com"}
TAG = "WEBCST"
def fix_event(s: str) -> str:
return " vs ".join(s.split("@"))

View file

@ -1,12 +1,94 @@
## Base Log @ 2025-12-14 20:40 UTC
## Base Log @ 2025-12-13 14:39 UTC
### ✅ Working Streams: 143<br>❌ Dead Streams: 3
### ✅ Working Streams: 61<br>❌ Dead Streams: 85
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| FDSN Florida | HTTP Error (403) | `http://cord-cutter.net:8080/k4Svp2/645504/46794` |
| Spectrum SportsNet LA Dodgers | HTTP Error (502) | `http://cord-cutter.net:8080/k4Svp2/645504/31636` |
| getTV | HTTP Error (403) | `http://cord-cutter.net:8080/k4Svp2/645504/18366` |
| ACC Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/9273` |
| AMC | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/18925` |
| Altitude Sports | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/79545` |
| BBC America | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20194` |
| BBC World News | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/139752` |
| Big Ten Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/9828` |
| Bloomberg TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/15158` |
| Boomerang | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/14741` |
| Bounce TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/48323` |
| CBS Sports Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10454` |
| CBS | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/120749` |
| CW | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/120893` |
| Cartoon Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46708` |
| Comedy Central | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/7466` |
| Comet TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/125831` |
| Court TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/21092` |
| Cozi TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/11868` |
| Discovery Channel | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46720` |
| Discovery Family Channel | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10538` |
| Disney XD | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/75621` |
| Disney | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/2206` |
| ESPN News | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/17707` |
| ESPN U | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10255` |
| ESPN | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/14197` |
| ESPN2 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/2210` |
| FDSN Detroit | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20936` |
| FDSN Florida | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46794` |
| FDSN Midwest | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/66795` |
| FDSN North | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/58827` |
| FDSN Ohio | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/17752` |
| FDSN Oklahoma | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20934` |
| FDSN SoCal | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/221151` |
| FDSN Southeast | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/81111` |
| FDSN Southwest | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/21843` |
| FDSN Sun | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/104917` |
| FDSN West | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20932` |
| FDSN Wisconsin | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/78599` |
| FX | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46690` |
| FXX | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46699` |
| Food Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/7323` |
| Fox Business | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/17639` |
| Fox News | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/1818` |
| Fox Sports 1 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/1846` |
| Fox Sports 2 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/1847` |
| Fox | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/121595` |
| Freeform TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/13370` |
| Game Show Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/120633` |
| HBO | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46713` |
| History Channel | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/15017` |
| ION TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/9297` |
| MSG | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/21090` |
| Marquee Sports Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/13379` |
| MotorTrend TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10399` |
| NBC Sports Bay Area | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/9900` |
| NBC Sports Boston | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20939` |
| NBC Sports California | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20940` |
| NBC Sports Philadelphia | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20943` |
| NESN | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/31637` |
| NFL Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/159117` |
| NFL RedZone | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/208830` |
| NewsNation | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/161450` |
| Nickelodeon | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/38` |
| Nicktoons | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/36` |
| Reelz Channel | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10526` |
| Root Sports | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/85232` |
| SEC Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/17608` |
| Space City Home Network | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/213668` |
| Spectrum SportsNet LA Dodgers | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/31636` |
| Spectrum SportsNet Lakers | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20946` |
| SportsNet New York | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/20938` |
| SportsNet Pittsburgh | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/108178` |
| Sportsnet 360 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/57299` |
| Sportsnet East | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/57298` |
| Sportsnet One | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10247` |
| Sportsnet Ontario | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/11649` |
| Starz | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/9299` |
| Syfy | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46685` |
| TLC | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/12734` |
| TSN1 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/57292` |
| TSN2 | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/47442` |
| The Weather Channel | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/18926` |
| USA East | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/10252` |
| Vice TV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/46697` |
| Willow Cricket | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/41979` |
| getTV | HTTP Error (404) | `http://cord-cutter.net:8080/k4Svp2/645504/18366` |
---
#### Base Channels URL
```