Compare commits

..

No commits in common. "717deaec2c0f6189673fdaf0f43f630682e8fba6" and "7ea946f1afb43fb4b63622d0bf2090e0977c2e58" have entirely different histories.

10 changed files with 129278 additions and 118700 deletions

File diff suppressed because it is too large Load diff

245499
M3U8/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

View file

@ -10,6 +10,7 @@ from scrapers import (
fawa,
istreameast,
livetvsx,
ovogoal,
pawa,
ppv,
roxie,
@ -18,7 +19,8 @@ from scrapers import (
streamhub,
streamsgate,
timstreams,
totalsportek,
totalsportek1,
totalsportek3,
tvapp,
watchfooty,
webcast,
@ -64,16 +66,18 @@ async def main() -> None:
asyncio.create_task(roxie.scrape(hdl_brwsr)),
asyncio.create_task(streamcenter.scrape(hdl_brwsr)),
# asyncio.create_task(streamhub.scrape(xtrnl_brwsr)),
asyncio.create_task(streamsgate.scrape(xtrnl_brwsr)),
# asyncio.create_task(streamsgate.scrape(xtrnl_brwsr)),
asyncio.create_task(timstreams.scrape(xtrnl_brwsr)),
]
httpx_tasks = [
asyncio.create_task(fawa.scrape()),
asyncio.create_task(istreameast.scrape()),
# asyncio.create_task(ovogoal.scrape()),
asyncio.create_task(pawa.scrape()),
asyncio.create_task(shark.scrape()),
asyncio.create_task(totalsportek.scrape()),
# asyncio.create_task(totalsportek1.scrape()),
asyncio.create_task(totalsportek3.scrape()),
asyncio.create_task(tvapp.scrape()),
asyncio.create_task(webcast.scrape()),
]
@ -97,6 +101,7 @@ async def main() -> None:
| fawa.urls
| istreameast.urls
| livetvsx.urls
| ovogoal.urls
| pawa.urls
| ppv.urls
| roxie.urls
@ -105,7 +110,8 @@ async def main() -> None:
| streamhub.urls
| streamsgate.urls
| timstreams.urls
| totalsportek.urls
| totalsportek1.urls
| totalsportek3.urls
| tvapp.urls
| watchfooty.urls
| webcast.urls

145
M3U8/scrapers/ovogoal.py Normal file
View file

@ -0,0 +1,145 @@
import re
from functools import partial
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "OVOGOAL"
CACHE_FILE = Cache(TAG, exp=28_800)
BASE_URL = "https://ovogoal.plus"
async def process_event(url: str, url_num: int) -> tuple[str | None, str | None]:
nones = None, None
if not (html_data := await network.request(url, log=log)):
log.warning(f"URL {url_num}) Failed to load url.")
return nones
soup = HTMLParser(html_data.content)
iframe = soup.css_first("iframe")
if not iframe or not (iframe_src := iframe.attributes.get("src")):
log.warning(f"URL {url_num}) No iframe element found.")
return nones
if not (iframe_src_data := await network.request(iframe_src, log=log)):
log.warning(f"URL {url_num}) Failed to load iframe source.")
return nones
valid_m3u8 = re.compile(r'(var|const)\s+(\w+)\s*=\s*"([^"]*)"', re.I)
if not (match := valid_m3u8.search(iframe_src_data.text)):
log.warning(f"URL {url_num}) No Clappr source found.")
return nones
log.info(f"URL {url_num}) Captured M3U8")
return match[3], iframe_src
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
events = []
if not (html_data := await network.request(BASE_URL, log=log)):
return events
soup = HTMLParser(html_data.content)
sport = "Live Event"
for card in soup.css(".main-content .stream-row"):
if (not (watch_btn_elem := card.css_first(".watch-btn"))) or (
not (onclick := watch_btn_elem.attributes.get("onclick"))
):
continue
if not (event_name_elem := card.css_first(".stream-info")):
continue
href = onclick.split(".href=")[-1].replace("'", "")
event_name = event_name_elem.text(strip=True)
if f"[{sport}] {event_name} ({TAG})" in cached_keys:
continue
events.append(
{
"sport": sport,
"event": event_name,
"link": href,
}
)
return events
async def scrape() -> None:
cached_urls = CACHE_FILE.load()
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
url=(link := ev["link"]),
url_num=i,
)
url, iframe = await network.safe_process(
handler,
url_num=i,
semaphore=network.HTTP_S,
log=log,
)
sport, event = ev["sport"], ev["event"]
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": iframe,
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
valid_count += 1
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)

View file

@ -77,7 +77,7 @@ async def get_events(url: str, cached_keys: list[str]) -> list[dict[str, str]]:
{
"sport": sport,
"event": name,
"link": f"{iframe}#player=clappr#autoplay=true",
"link": iframe,
"logo": logo,
"timestamp": event_dt.timestamp(),
}

View file

@ -0,0 +1,170 @@
import re
from functools import partial
from urllib.parse import urljoin, urlparse
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "TOTALSPRTK1"
CACHE_FILE = Cache(TAG, exp=28_800)
BASE_URL = "https://live.totalsportek.foo"
def fix_txt(s: str) -> str:
s = " ".join(s.split())
return s.upper() if s.islower() else s
async def process_event(url: str, url_num: int) -> str | None:
if not (event_data := await network.request(url, log=log)):
log.warning(f"URL {url_num}) Failed to load url.")
return
soup_1 = HTMLParser(event_data.content)
if not (iframe := soup_1.css_first("iframe")):
log.warning(f"URL {url_num}) No iframe element found.")
return
if not (iframe_url := iframe.attributes.get("src")):
log.warning(f"URL {url_num}) No iframe source found.")
return
if not (iframe_src := await network.request(iframe_url, log=log)):
log.warning(f"URL {url_num}) Failed to load iframe source.")
return
valid_m3u8 = re.compile(r'const\s+hexEncoded\s+=\s+"([^"]*)"', re.I)
if not (match := valid_m3u8.search(iframe_src.text)):
log.warning(f"URL {url_num}) No Clappr source found.")
return
log.info(f"URL {url_num}) Captured M3U8")
return bytes.fromhex(match[1]).decode("utf-8")
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
events = []
if not (html_data := await network.request(BASE_URL, log=log)):
return events
soup = HTMLParser(html_data.content)
sport = "Live Event"
for node in soup.css("a"):
if not node.attributes.get("class"):
continue
if (parent := node.parent) and "my-1" in parent.attributes.get("class", ""):
if span := node.css_first("span"):
sport = span.text(strip=True)
sport = fix_txt(sport)
if not (teams := [t.text(strip=True) for t in node.css(".col-7 .col-12")]):
continue
if not (href := node.attributes.get("href")):
continue
href = urlparse(href).path if href.startswith("http") else href
if not (time_node := node.css_first(".col-3 span")):
continue
if time_node.text(strip=True).lower() != "matchstarted":
continue
event_name = fix_txt(" vs ".join(teams))
if f"[{sport}] {event_name} ({TAG})" in cached_keys:
continue
events.append(
{
"sport": sport,
"event": event_name,
"link": urljoin(BASE_URL, href),
}
)
return events
async def scrape() -> None:
cached_urls = CACHE_FILE.load()
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
valid_count = cached_count = len(valid_urls)
urls.update(valid_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
url=(link := ev["link"]),
url_num=i,
)
url = await network.safe_process(
handler,
url_num=i,
semaphore=network.HTTP_S,
log=log,
)
sport, event = ev["sport"], ev["event"]
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": link,
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
cached_urls[key] = entry
if url:
valid_count += 1
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)

View file

@ -11,7 +11,7 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "TOTALSPRTK"
TAG = "TOTALSPRTK3"
CACHE_FILE = Cache(TAG, exp=28_800)

View file

@ -104,7 +104,7 @@ async def pre_process(url: str, url_num: int) -> str | None:
link_data["wld"]["sn"],
)
return f"https://sportsembed.su/embed/{'/'.join(embed_path)}?player=clappr&autoplay=true"
return f"https://sportsembed.su/embed/{'/'.join(embed_path)}"
async def process_event(

View file

@ -1,15 +1,19 @@
## Base Log @ 2026-03-29 09:03 UTC
## Base Log @ 2026-03-28 09:03 UTC
### ✅ Working Streams: 156<br>❌ Dead Streams: 6
### ✅ Working Streams: 152<br>❌ Dead Streams: 10
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| AMC | HTTP Error (404) | `http://41.205.93.154/AMC/index.m3u8` |
| Altitude Sports | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/79545.m3u8` |
| Hallmark Family | HTTP Error (000) | `http://mytvstream.net:8080/live/30550113/30550113/9307.m3u8` |
| Bravo TV | HTTP Error (404) | `http://41.205.93.154/BRAVO/index.m3u8` |
| ESPN | HTTP Error (404) | `http://41.205.93.154/ESPN/index.m3u8` |
| HBO Family | HTTP Error (000) | `http://mytvstream.net:8080/live/30550113/30550113/17772.m3u8` |
| Hallmark Family | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/9307.m3u8` |
| Lifetime | HTTP Error (404) | `http://41.205.93.154/LIFETIME/index.m3u8` |
| NBC Sports California | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/20940.m3u8` |
| NHL Network | HTTP Error (000) | `http://mytvstream.net:8080/live/30550113/30550113/20179.m3u8` |
| Pop TV | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/20976.m3u8` |
| Showtime Extreme | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/13220.m3u8` |
| SportsNet New York | HTTP Error (404) | `http://212.102.60.231/SNY/index.m3u8` |
| Spectrum SportsNet LA Dodgers | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/31636.m3u8` |
---
#### Base Channels URL
```