Compare commits

...

31 commits

Author SHA1 Message Date
GitHub Actions Bot
1fc683d5fc update M3U8 2026-04-16 12:00:51 -04:00
GitHub Actions Bot
cce3ae53da update M3U8 2026-04-16 11:18:57 -04:00
GitHub Actions Bot
b6ed533800 update EPG 2026-04-16 11:42:26 +00:00
GitHub Actions Bot
069d58f3d0 health log 2026-04-16 09:55:41 +00:00
GitHub Actions Bot
78e280e4cf update EPG 2026-04-16 05:18:59 +00:00
GitHub Actions Bot
d2cb37145e health log 2026-04-16 05:16:46 +00:00
GitHub Actions Bot
1f84ae15a3 update M3U8 2026-04-15 23:31:04 -04:00
GitHub Actions Bot
0df32e6c1b update M3U8 2026-04-15 23:00:41 -04:00
GitHub Actions Bot
77fe0aab91 update M3U8 2026-04-15 22:30:58 -04:00
GitHub Actions Bot
b885509d1c update M3U8 2026-04-15 22:00:49 -04:00
GitHub Actions Bot
e3ca36418d update M3U8 2026-04-15 21:30:50 -04:00
GitHub Actions Bot
b6935d12e6 update M3U8 2026-04-15 21:01:14 -04:00
doms9
00000d913c e
- re-add xstreameast.py
2026-04-15 20:38:27 -04:00
GitHub Actions Bot
68e2d41659 update M3U8 2026-04-15 20:32:31 -04:00
GitHub Actions Bot
8ed866c1d7 update M3U8 2026-04-15 20:01:26 -04:00
GitHub Actions Bot
80e2b66d63 update M3U8 2026-04-15 19:31:38 -04:00
GitHub Actions Bot
b8aebb8734 update M3U8 2026-04-15 19:01:40 -04:00
GitHub Actions Bot
f66c080c9b update M3U8 2026-04-15 18:31:36 -04:00
GitHub Actions Bot
37c924c554 update M3U8 2026-04-15 18:00:47 -04:00
GitHub Actions Bot
04a5b71c63 update M3U8 2026-04-15 17:30:58 -04:00
GitHub Actions Bot
676ac6c9ed health log 2026-04-15 21:12:54 +00:00
GitHub Actions Bot
b17c2a2755 update M3U8 2026-04-15 17:00:22 -04:00
GitHub Actions Bot
df04540555 update M3U8 2026-04-15 16:30:53 -04:00
GitHub Actions Bot
25feb29435 update M3U8 2026-04-15 16:02:26 -04:00
GitHub Actions Bot
f6e96e477a update EPG 2026-04-15 19:51:40 +00:00
GitHub Actions Bot
b8e6eca55d update M3U8 2026-04-15 15:30:47 -04:00
GitHub Actions Bot
af0f6e810f update M3U8 2026-04-15 15:02:01 -04:00
GitHub Actions Bot
f1297510f4 update M3U8 2026-04-15 14:30:43 -04:00
GitHub Actions Bot
3563049472 update M3U8 2026-04-15 14:01:07 -04:00
GitHub Actions Bot
ccc1212ad4 update M3U8 2026-04-15 13:31:17 -04:00
GitHub Actions Bot
ed18832eef update M3U8 2026-04-15 13:01:24 -04:00
9 changed files with 116918 additions and 119405 deletions

File diff suppressed because it is too large Load diff

233520
M3U8/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

View file

@ -8,9 +8,7 @@ from scrapers import (
cdnlivetv, cdnlivetv,
embedhd, embedhd,
fawa, fawa,
fsports,
istreameast, istreameast,
listapreta,
livetvsx, livetvsx,
mainportal, mainportal,
ovogoal, ovogoal,
@ -26,6 +24,7 @@ from scrapers import (
tvapp, tvapp,
watchfooty, watchfooty,
webcast, webcast,
xstreameast,
) )
from scrapers.utils import get_logger, network from scrapers.utils import get_logger, network
@ -64,7 +63,6 @@ async def main() -> None:
pw_tasks = [ pw_tasks = [
asyncio.create_task(cdnlivetv.scrape(hdl_brwsr)), asyncio.create_task(cdnlivetv.scrape(hdl_brwsr)),
asyncio.create_task(embedhd.scrape(hdl_brwsr)), asyncio.create_task(embedhd.scrape(hdl_brwsr)),
# asyncio.create_task(fsports.scrape(xtrnl_brwsr)),
asyncio.create_task(ppv.scrape(xtrnl_brwsr)), asyncio.create_task(ppv.scrape(xtrnl_brwsr)),
asyncio.create_task(roxie.scrape(hdl_brwsr)), asyncio.create_task(roxie.scrape(hdl_brwsr)),
] ]
@ -72,7 +70,6 @@ async def main() -> None:
httpx_tasks = [ httpx_tasks = [
asyncio.create_task(fawa.scrape()), asyncio.create_task(fawa.scrape()),
asyncio.create_task(istreameast.scrape()), asyncio.create_task(istreameast.scrape()),
# asyncio.create_task(listapreta.scrape()),
asyncio.create_task(mainportal.scrape()), asyncio.create_task(mainportal.scrape()),
asyncio.create_task(ovogoal.scrape()), asyncio.create_task(ovogoal.scrape()),
asyncio.create_task(pawa.scrape()), asyncio.create_task(pawa.scrape()),
@ -84,6 +81,7 @@ async def main() -> None:
asyncio.create_task(totalsportek.scrape()), asyncio.create_task(totalsportek.scrape()),
asyncio.create_task(tvapp.scrape()), asyncio.create_task(tvapp.scrape()),
asyncio.create_task(webcast.scrape()), asyncio.create_task(webcast.scrape()),
asyncio.create_task(xstreameast.scrape()),
] ]
await asyncio.gather(*(pw_tasks + httpx_tasks)) await asyncio.gather(*(pw_tasks + httpx_tasks))
@ -103,9 +101,7 @@ async def main() -> None:
cdnlivetv.urls cdnlivetv.urls
| embedhd.urls | embedhd.urls
| fawa.urls | fawa.urls
| fsports.urls
| istreameast.urls | istreameast.urls
| listapreta.urls
| livetvsx.urls | livetvsx.urls
| mainportal.urls | mainportal.urls
| ovogoal.urls | ovogoal.urls
@ -121,6 +117,7 @@ async def main() -> None:
| tvapp.urls | tvapp.urls
| watchfooty.urls | watchfooty.urls
| webcast.urls | webcast.urls
| xstreameast.urls
) )
live_events: list[str] = [] live_events: list[str] = []

View file

@ -1,143 +0,0 @@
import asyncio
from functools import partial
from urllib.parse import urljoin
from playwright.async_api import Browser
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "FSPRTS"
CACHE_FILE = Cache(TAG, exp=5_400)
BASE_URL = "https://fsportshds.xyz"
SPORT_URLS = {
# "Fighting": urljoin(BASE_URL, "mmastreams.php"),
"Basketball": urljoin(BASE_URL, "nbastreams.php"),
# "Ice Hockey": urljoin(BASE_URL, "nhlstreams.php"),
# "American Football": urljoin(BASE_URL, "nflstreams.php")
} | {
sport: urljoin(BASE_URL, f"{sport}streams.php".lower())
for sport in [
"Football",
# "Boxing",
# "F1",
# "MLB",
# "MotoGP",
]
}
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
tasks = [network.request(url, log=log) for url in SPORT_URLS.values()]
results = await asyncio.gather(*tasks)
events = []
if not (
soups := [(HTMLParser(html.content), html.url) for html in results if html]
):
return events
for soup, url in soups:
sport = next((k for k, v in SPORT_URLS.items() if v == url), "Live Event")
for card in soup.css(".media.btn.btn-default.btn-lg.btn-block"):
if not (name_elem := card.css_first("h4")):
continue
if card.css_first('[id^="countdown-"]'):
continue
if not (a_elem := card.css_first("a")) or not (
href := a_elem.attributes.get("href")
):
continue
name = name_elem.text(strip=True)
if f"[{sport}] {name} ({TAG})" in cached_keys:
continue
events.append(
{
"sport": sport,
"event": name,
"link": urljoin(BASE_URL, href),
}
)
return events
async def scrape(browser: Browser) -> None:
cached_urls = CACHE_FILE.load()
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
async with network.event_context(browser, stealth=False) as context:
for i, ev in enumerate(events, start=1):
async with network.event_page(context) as page:
handler = partial(
network.process_event,
url=(link := ev["link"]),
url_num=i,
page=page,
log=log,
)
url = await network.safe_process(
handler,
url_num=i,
semaphore=network.PW_S,
log=log,
)
sport, event = ev["sport"], ev["event"]
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": "https://vividmosaica.com/",
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
valid_count += 1
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)

View file

@ -1,171 +0,0 @@
from functools import partial
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "LISTA"
CACHE_FILE = Cache(TAG, exp=28_800)
API_URL = "https://listapreta.site/sports-widget/events.php"
async def process_event(url: str, url_num: int) -> tuple[str | None, str | None]:
nones = None, None
event_id = url.split("id=")[-1]
if not (
token_req := await network.request(
"https://lista-preta-tv.site/generate_token.php",
params={"id": event_id},
log=log,
)
):
log.warning(f"URL {url_num}) Failed to load token data.")
return nones
if not (token_data := token_req.json()):
log.warning(f"URL {url_num}) No token data available.")
return nones
elif not (token := token_data.get("token")) or not (exp := token_data.get("exp")):
log.warning(f"URL {url_num}) No token data available.")
return nones
ref = f"https://lista-preta-tv.site/player-all.html?id={event_id}"
if not (
m3u8_req := await network.request(
"https://lista-preta-tv.site/m3u8.php",
headers={"Referer": ref},
params={"id": event_id, "token": token, "exp": exp},
follow_redirects=False,
log=log,
)
):
log.warning(f"URL {url_num}) Unable to fetch M3U8 request.")
return nones
elif not (m3u8 := m3u8_req.headers.get("Location")):
log.warning(f"URL {url_num}) Unable to fetch M3U8 request.")
return nones
log.info(f"URL {url_num}) Captured M3U8")
return m3u8, ref
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
now = Time.clean(Time.now())
events = []
if not (api_req := await network.request(API_URL, log=log)):
return events
elif not (api_data := api_req.json()) or api_data.get("error"):
return events
for event in api_data:
sport = event.get("sport")
t1, t2 = event.get("home"), event.get("away")
if not (sport and t1 and t2):
continue
event_name = f"{t1} vs {t2}"
if f"[{sport}] {event_name} ({TAG})" in cached_keys:
continue
event_dt = Time.from_str(event["start"], timezone="UTC")
if event_dt.date() != now.date():
continue
if not (channels := event.get("channels")):
continue
event_links: list[str] = [channel["url"] for channel in channels]
link = event_links[0]
events.append(
{
"sport": sport,
"event": event_name,
"link": link,
"timestamp": now.timestamp(),
}
)
return events
async def scrape() -> None:
cached_urls = CACHE_FILE.load()
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
log.info('Scraping from "https://listapreta.site"')
if events := await get_events(cached_urls.keys()):
log.info(f"Processing {len(events)} new URL(s)")
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
url=(link := ev["link"]),
url_num=i,
)
url, iframe = await network.safe_process(
handler,
url_num=i,
semaphore=network.HTTP_S,
log=log,
)
sport, event, ts = (
ev["sport"],
ev["event"],
ev["timestamp"],
)
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": iframe,
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
valid_count += 1
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)

View file

@ -10,7 +10,7 @@ urls: dict[str, dict[str, str | float]] = {}
TAG = "STRMCNTR" TAG = "STRMCNTR"
CACHE_FILE = Cache(TAG, exp=19_800) CACHE_FILE = Cache(TAG, exp=28_800)
API_URL = "https://backend.streamcenter.live/api/Parties" API_URL = "https://backend.streamcenter.live/api/Parties"

View file

@ -0,0 +1,179 @@
import asyncio
import re
from functools import partial
from urllib.parse import urljoin
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "XSTRMEST"
CACHE_FILE = Cache(TAG, exp=10_800)
BASE_URL = "https://xstreameast.com"
SPORT_URLS = [
urljoin(BASE_URL, f"categories/{sport}/")
for sport in [
# "mlb",
"mma",
"nba",
# "nfl",
# "nhl",
"soccer",
"wwe",
]
]
async def process_event(url: str, url_num: int) -> tuple[str | None, str | None]:
nones = None, None
if not (html_data := await network.request(url, log=log)):
log.warning(f"URL {url_num}) Failed to load url.")
return nones
soup = HTMLParser(html_data.content)
iframe = soup.css_first("iframe")
if not iframe or not (iframe_src := iframe.attributes.get("src")):
log.warning(f"URL {url_num}) No iframe element found.")
return nones
elif iframe_src == "about:blank":
log.warning(f"URL {url_num}) No iframe element found.")
return nones
if not (iframe_src_data := await network.request(iframe_src, log=log)):
log.warning(f"URL {url_num}) Failed to load iframe source.")
return nones
valid_m3u8 = re.compile(r'(var|const)\s+(\w+)\s*=\s*"([^"]*)"', re.I)
if not (match := valid_m3u8.search(iframe_src_data.text)):
log.warning(f"URL {url_num}) No Clappr source found.")
return nones
if len(encoded := match[2]) < 20:
encoded = match[3]
log.info(f"URL {url_num}) Captured M3U8")
return bytes.fromhex(encoded).decode("utf-8"), iframe_src
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
tasks = [network.request(url, log=log) for url in SPORT_URLS]
results = await asyncio.gather(*tasks)
events = []
if not (soups := [HTMLParser(html.content) for html in results if html]):
return events
sport = "Live Event"
for soup in soups:
if sport_header := soup.css_first("h1.text-3xl"):
header = sport_header.text(strip=True)
sport = header.split("Streams")[0].strip()
for card in soup.css("article.game-card"):
if not (team_elem := card.css_first("h2.text-xl.font-semibold")):
continue
if not (link_elem := card.css_first("a.stream-button")) or not (
href := link_elem.attributes.get("href")
):
continue
if (
not (live_badge := card.css_first("span.bg-green-600"))
or live_badge.text(strip=True) != "LIVE"
):
continue
event_name = team_elem.text(strip=True)
if f"[{sport}] {event_name} ({TAG})" in cached_keys:
continue
events.append(
{
"sport": sport,
"event": event_name,
"link": href,
}
)
return events
async def scrape() -> None:
cached_urls = CACHE_FILE.load()
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
url=(link := ev["link"]),
url_num=i,
)
url, iframe = await network.safe_process(
handler,
url_num=i,
semaphore=network.HTTP_S,
log=log,
)
sport, event = ev["sport"], ev["event"]
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": iframe,
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
valid_count += 1
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)

View file

@ -1,21 +1,14 @@
## Base Log @ 2026-04-15 15:46 UTC ## Base Log @ 2026-04-16 09:55 UTC
### ✅ Working Streams: 149<br>❌ Dead Streams: 12 ### ✅ Working Streams: 156<br>❌ Dead Streams: 5
| Channel | Error (Code) | Link | | Channel | Error (Code) | Link |
| ------- | ------------ | ---- | | ------- | ------------ | ---- |
| Aspire | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/150605.ts` | | FDSN Ohio | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/296675.ts` |
| Comedy TV | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/199482.ts` |
| Cozi TV | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/8392.ts` |
| FX Movie Channel | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/39873.ts` | | FX Movie Channel | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/39873.ts` |
| Game Show Network | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/466.ts` | | Hallmark Family | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/304609.ts` |
| HBO Family | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/760.ts` | | NBC Sports Bay Area | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/45785.ts` |
| Lifetime | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/4667.ts` | | TLC | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/2362.ts` |
| NBC Sports Boston | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/35132.ts` |
| Nick Jr | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/14835.ts` |
| TV One | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/13010.ts` |
| The Weather Channel | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/2361.ts` |
| YES Network | HTTP Error (404) | `http://iptvtree.net:8080/live/7e4b0dbd/1dd755dc3f/5519.ts` |
--- ---
#### Base Channels URL #### Base Channels URL
``` ```