Compare commits

...

31 commits

Author SHA1 Message Date
GitHub Actions Bot
ea2aa0a6ec update M3U8 2025-12-24 17:00:14 -05:00
GitHub Actions Bot
e3bd58fd89 update M3U8 2025-12-24 16:00:52 -05:00
GitHub Actions Bot
a59b440b74 health log 2025-12-24 20:41:25 +00:00
GitHub Actions Bot
76749c673e update M3U8 2025-12-24 15:00:19 -05:00
GitHub Actions Bot
7fe16c6716 update M3U8 2025-12-24 14:00:44 -05:00
GitHub Actions Bot
9f78899116 update EPG 2025-12-24 18:56:16 +00:00
GitHub Actions Bot
83ac72bf53 update M3U8 2025-12-24 13:00:55 -05:00
GitHub Actions Bot
16bea6c245 update M3U8 2025-12-24 12:01:08 -05:00
GitHub Actions Bot
7a2e4e4002 update M3U8 2025-12-24 11:02:16 -05:00
GitHub Actions Bot
b6e15c6694 update M3U8 2025-12-24 10:01:26 -05:00
GitHub Actions Bot
027b51cf38 health log 2025-12-24 14:42:15 +00:00
GitHub Actions Bot
59ca76ca2a update M3U8 2025-12-24 09:01:21 -05:00
GitHub Actions Bot
e2e69ce850 update M3U8 2025-12-24 08:01:38 -05:00
GitHub Actions Bot
3cbaa9fd8f update EPG 2025-12-24 10:52:50 +00:00
GitHub Actions Bot
9e7007c00f health log 2025-12-24 08:50:40 +00:00
doms9
00000d9153 e
add totalsportek.py
2025-12-24 01:54:02 -05:00
GitHub Actions Bot
2c856d9e80 update M3U8 2025-12-23 23:30:43 -05:00
GitHub Actions Bot
afb9fa2764 update M3U8 2025-12-23 23:01:07 -05:00
GitHub Actions Bot
e23f6b9aad update EPG 2025-12-24 03:48:20 +00:00
GitHub Actions Bot
185233a5e9 health log 2025-12-24 03:44:35 +00:00
GitHub Actions Bot
136f3bd001 update M3U8 2025-12-23 22:31:44 -05:00
GitHub Actions Bot
f80cf39f4a update M3U8 2025-12-23 22:01:29 -05:00
GitHub Actions Bot
f3d455420e update M3U8 2025-12-23 21:31:10 -05:00
GitHub Actions Bot
a410588646 update M3U8 2025-12-23 21:02:23 -05:00
GitHub Actions Bot
7227654779 update M3U8 2025-12-23 20:31:26 -05:00
GitHub Actions Bot
c111fc8e78 update M3U8 2025-12-23 20:01:37 -05:00
GitHub Actions Bot
f7787936fe update M3U8 2025-12-23 19:32:15 -05:00
GitHub Actions Bot
6c9009a7a5 update M3U8 2025-12-23 19:02:36 -05:00
GitHub Actions Bot
5b757f730d update M3U8 2025-12-23 18:31:46 -05:00
GitHub Actions Bot
db65be11aa update M3U8 2025-12-23 18:01:48 -05:00
GitHub Actions Bot
11380f9189 update M3U8 2025-12-23 17:31:31 -05:00
8 changed files with 87458 additions and 90688 deletions

175285
EPG/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -18,6 +18,7 @@ from scrapers import (
streamhub,
streamsgate,
strmd,
totalsportek,
tvpass,
watchfooty,
webcast,
@ -52,17 +53,18 @@ async def main() -> None:
asyncio.create_task(embedhd.scrape()),
asyncio.create_task(fawa.scrape()),
asyncio.create_task(istreameast.scrape()),
asyncio.create_task(pixel.scrape()),
# asyncio.create_task(pixel.scrape()),
asyncio.create_task(ppv.scrape()),
asyncio.create_task(roxie.scrape()),
asyncio.create_task(shark.scrape()),
asyncio.create_task(sport9.scrape()),
asyncio.create_task(streambtw.scrape()),
# asyncio.create_task(streambtw.scrape()),
asyncio.create_task(streamcenter.scrape()),
asyncio.create_task(streamfree.scrape()),
asyncio.create_task(streamhub.scrape()),
asyncio.create_task(streamsgate.scrape()),
asyncio.create_task(strmd.scrape()),
# asyncio.create_task(totalsportek.scrape()),
asyncio.create_task(tvpass.scrape()),
asyncio.create_task(webcast.scrape()),
]
@ -82,10 +84,11 @@ async def main() -> None:
| sport9.urls
| streambtw.urls
| streamcenter.urls
| strmd.urls
| streamfree.urls
| streamhub.urls
| streamsgate.urls
| strmd.urls
| totalsportek.urls
| tvpass.urls
| watchfooty.urls
| webcast.urls

View file

@ -18,8 +18,6 @@ BASE_URL = "https://istreameast.app"
async def process_event(url: str, url_num: int) -> str | None:
pattern = re.compile(r"source:\s*window\.atob\(\s*'([^']+)'\s*\)", re.IGNORECASE)
if not (event_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
@ -42,6 +40,8 @@ async def process_event(url: str, url_num: int) -> str | None:
return
pattern = re.compile(r"source:\s*window\.atob\(\s*'([^']+)'\s*\)", re.IGNORECASE)
if not (match := pattern.search(iframe_src_data.text)):
log.warning(f"URL {url_num}) No Clappr source found.")

View file

@ -0,0 +1,168 @@
import re
from functools import partial
from urllib.parse import urljoin
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "TOTALSPRTK"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=28_800)
BASE_URL = "https://live.totalsportek777.com/"
def fix_league(s: str) -> str:
return s.upper() if s.islower() else s
async def process_event(url: str, url_num: int) -> tuple[str | None, str | None]:
if not (html_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
return None, None
soup = HTMLParser(html_data.content)
if not (iframe := soup.css_first("iframe")):
log.warning(f"URL {url_num}) No iframe element found.")
return None, None
if (
not (iframe_src := iframe.attributes.get("src"))
or "xsportportal" not in iframe_src
):
log.warning(f"URL {url_num}) No valid iframe source found.")
return None, None
if not (iframe_src_data := await network.request(iframe_src, log=log)):
log.info(f"URL {url_num}) Failed to load iframe source.")
return None, None
valid_m3u8 = re.compile(r'var\s+(\w+)\s*=\s*"([^"]*)"', re.IGNORECASE)
if not (match := valid_m3u8.search(iframe_src_data.text)):
log.warning(f"URL {url_num}) No Clappr source found.")
return None, None
log.info(f"URL {url_num}) Captured M3U8")
return bytes.fromhex(match[2]).decode("utf-8"), iframe_src
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
events = []
if not (html_data := await network.request(BASE_URL, log=log)):
return events
soup = HTMLParser(html_data.content)
sport = "Live Event"
for box in soup.css(".div-main-box"):
for node in box.iter():
if not (node_class := node.attributes.get("class")):
continue
if "my-1" in node_class:
if span := node.css_first("span"):
sport = span.text(strip=True)
if node.tag == "a" and "nav-link2" in node_class:
if not (href := node.attributes.get("href")):
continue
if href.startswith("http"):
continue
sport = fix_league(sport)
teams = [t.text(strip=True) for t in node.css(".col-7 .col-12")]
event_name = " vs ".join(teams)
if f"[{sport}] {event_name} ({TAG})" in cached_keys:
continue
events.append(
{
"sport": sport,
"event": event_name,
"link": urljoin(BASE_URL, href),
}
)
return events
async def scrape() -> None:
cached_urls = CACHE_FILE.load()
cached_count = len(cached_urls)
urls.update(cached_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys())
log.info(f"Processing {len(events)} new URL(s)")
if events:
now = Time.clean(Time.now())
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
url=ev["link"],
url_num=i,
)
url, iframe = await network.safe_process(
handler,
url_num=i,
semaphore=network.HTTP_S,
log=log,
)
if url:
sport, event, link = (
ev["sport"],
ev["event"],
ev["link"],
)
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": iframe,
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
urls[key] = cached_urls[key] = entry
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)

View file

@ -10,7 +10,7 @@
{
"EUROLEAGUE": {
"logo": "https://www.euroleaguebasketball.net/images/logo-default.png",
"names": ["EUROLEAGUE BASKETBALL"]
"names": ["BASKETBALL EUROLEAGUE", "EUROLEAGUE BASKETBALL"]
}
}
],
@ -68,8 +68,10 @@
"NCAA BASKETBALL",
"NCAA FOOTBALL",
"NCAA MEN",
"NCAA MEN'S BASKETBALL",
"NCAA SPORTS",
"NCAA WOMEN",
"NCAA WOMEN'S BASKETBALL",
"NCAAB",
"NCAAB D",
"NCAAB D-I",
@ -309,6 +311,7 @@
"CARABAO CUP",
"EFL CUP",
"ENGLISH CARABAO CUP",
"ENGLISH EFL CUP",
"ENGLISH FOOTBALL LEAGUE CUP",
"LEAGUE CUP"
]

View file

@ -1,10 +1,7 @@
## Base Log @ 2025-12-23 20:41 UTC
## Base Log @ 2025-12-24 20:41 UTC
### ✅ Working Streams: 145<br>❌ Dead Streams: 1
### ✅ Working Streams: 146<br>❌ Dead Streams: 0
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| FDSN Florida | HTTP Error (403) | `http://1tv41.icu:8080/8434692955/judgen64@yahoo.com/46794` |
---
#### Base Channels URL
```