Compare commits

..

No commits in common. "ea2aa0a6ec7fcf90e060c8e7da6b4e8fdb2d99df" and "f12ef19d514c7b7c3274933f13942d4396a602f1" have entirely different histories.

8 changed files with 90522 additions and 87292 deletions

174945
EPG/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -18,7 +18,6 @@ from scrapers import (
streamhub, streamhub,
streamsgate, streamsgate,
strmd, strmd,
totalsportek,
tvpass, tvpass,
watchfooty, watchfooty,
webcast, webcast,
@ -53,18 +52,17 @@ async def main() -> None:
asyncio.create_task(embedhd.scrape()), asyncio.create_task(embedhd.scrape()),
asyncio.create_task(fawa.scrape()), asyncio.create_task(fawa.scrape()),
asyncio.create_task(istreameast.scrape()), asyncio.create_task(istreameast.scrape()),
# asyncio.create_task(pixel.scrape()), asyncio.create_task(pixel.scrape()),
asyncio.create_task(ppv.scrape()), asyncio.create_task(ppv.scrape()),
asyncio.create_task(roxie.scrape()), asyncio.create_task(roxie.scrape()),
asyncio.create_task(shark.scrape()), asyncio.create_task(shark.scrape()),
asyncio.create_task(sport9.scrape()), asyncio.create_task(sport9.scrape()),
# asyncio.create_task(streambtw.scrape()), asyncio.create_task(streambtw.scrape()),
asyncio.create_task(streamcenter.scrape()), asyncio.create_task(streamcenter.scrape()),
asyncio.create_task(streamfree.scrape()), asyncio.create_task(streamfree.scrape()),
asyncio.create_task(streamhub.scrape()), asyncio.create_task(streamhub.scrape()),
asyncio.create_task(streamsgate.scrape()), asyncio.create_task(streamsgate.scrape()),
asyncio.create_task(strmd.scrape()), asyncio.create_task(strmd.scrape()),
# asyncio.create_task(totalsportek.scrape()),
asyncio.create_task(tvpass.scrape()), asyncio.create_task(tvpass.scrape()),
asyncio.create_task(webcast.scrape()), asyncio.create_task(webcast.scrape()),
] ]
@ -84,11 +82,10 @@ async def main() -> None:
| sport9.urls | sport9.urls
| streambtw.urls | streambtw.urls
| streamcenter.urls | streamcenter.urls
| strmd.urls
| streamfree.urls | streamfree.urls
| streamhub.urls | streamhub.urls
| streamsgate.urls | streamsgate.urls
| strmd.urls
| totalsportek.urls
| tvpass.urls | tvpass.urls
| watchfooty.urls | watchfooty.urls
| webcast.urls | webcast.urls

View file

@ -18,6 +18,8 @@ BASE_URL = "https://istreameast.app"
async def process_event(url: str, url_num: int) -> str | None: async def process_event(url: str, url_num: int) -> str | None:
pattern = re.compile(r"source:\s*window\.atob\(\s*'([^']+)'\s*\)", re.IGNORECASE)
if not (event_data := await network.request(url, log=log)): if not (event_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.") log.info(f"URL {url_num}) Failed to load url.")
@ -40,8 +42,6 @@ async def process_event(url: str, url_num: int) -> str | None:
return return
pattern = re.compile(r"source:\s*window\.atob\(\s*'([^']+)'\s*\)", re.IGNORECASE)
if not (match := pattern.search(iframe_src_data.text)): if not (match := pattern.search(iframe_src_data.text)):
log.warning(f"URL {url_num}) No Clappr source found.") log.warning(f"URL {url_num}) No Clappr source found.")

View file

@ -1,168 +0,0 @@
import re
from functools import partial
from urllib.parse import urljoin
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
TAG = "TOTALSPRTK"
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=28_800)
BASE_URL = "https://live.totalsportek777.com/"
def fix_league(s: str) -> str:
return s.upper() if s.islower() else s
async def process_event(url: str, url_num: int) -> tuple[str | None, str | None]:
if not (html_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
return None, None
soup = HTMLParser(html_data.content)
if not (iframe := soup.css_first("iframe")):
log.warning(f"URL {url_num}) No iframe element found.")
return None, None
if (
not (iframe_src := iframe.attributes.get("src"))
or "xsportportal" not in iframe_src
):
log.warning(f"URL {url_num}) No valid iframe source found.")
return None, None
if not (iframe_src_data := await network.request(iframe_src, log=log)):
log.info(f"URL {url_num}) Failed to load iframe source.")
return None, None
valid_m3u8 = re.compile(r'var\s+(\w+)\s*=\s*"([^"]*)"', re.IGNORECASE)
if not (match := valid_m3u8.search(iframe_src_data.text)):
log.warning(f"URL {url_num}) No Clappr source found.")
return None, None
log.info(f"URL {url_num}) Captured M3U8")
return bytes.fromhex(match[2]).decode("utf-8"), iframe_src
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
events = []
if not (html_data := await network.request(BASE_URL, log=log)):
return events
soup = HTMLParser(html_data.content)
sport = "Live Event"
for box in soup.css(".div-main-box"):
for node in box.iter():
if not (node_class := node.attributes.get("class")):
continue
if "my-1" in node_class:
if span := node.css_first("span"):
sport = span.text(strip=True)
if node.tag == "a" and "nav-link2" in node_class:
if not (href := node.attributes.get("href")):
continue
if href.startswith("http"):
continue
sport = fix_league(sport)
teams = [t.text(strip=True) for t in node.css(".col-7 .col-12")]
event_name = " vs ".join(teams)
if f"[{sport}] {event_name} ({TAG})" in cached_keys:
continue
events.append(
{
"sport": sport,
"event": event_name,
"link": urljoin(BASE_URL, href),
}
)
return events
async def scrape() -> None:
cached_urls = CACHE_FILE.load()
cached_count = len(cached_urls)
urls.update(cached_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
log.info(f'Scraping from "{BASE_URL}"')
events = await get_events(cached_urls.keys())
log.info(f"Processing {len(events)} new URL(s)")
if events:
now = Time.clean(Time.now())
for i, ev in enumerate(events, start=1):
handler = partial(
process_event,
url=ev["link"],
url_num=i,
)
url, iframe = await network.safe_process(
handler,
url_num=i,
semaphore=network.HTTP_S,
log=log,
)
if url:
sport, event, link = (
ev["sport"],
ev["event"],
ev["link"],
)
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo,
"base": iframe,
"timestamp": now.timestamp(),
"id": tvg_id or "Live.Event.us",
"link": link,
}
urls[key] = cached_urls[key] = entry
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)

View file

@ -10,7 +10,7 @@
{ {
"EUROLEAGUE": { "EUROLEAGUE": {
"logo": "https://www.euroleaguebasketball.net/images/logo-default.png", "logo": "https://www.euroleaguebasketball.net/images/logo-default.png",
"names": ["BASKETBALL EUROLEAGUE", "EUROLEAGUE BASKETBALL"] "names": ["EUROLEAGUE BASKETBALL"]
} }
} }
], ],
@ -68,10 +68,8 @@
"NCAA BASKETBALL", "NCAA BASKETBALL",
"NCAA FOOTBALL", "NCAA FOOTBALL",
"NCAA MEN", "NCAA MEN",
"NCAA MEN'S BASKETBALL",
"NCAA SPORTS", "NCAA SPORTS",
"NCAA WOMEN", "NCAA WOMEN",
"NCAA WOMEN'S BASKETBALL",
"NCAAB", "NCAAB",
"NCAAB D", "NCAAB D",
"NCAAB D-I", "NCAAB D-I",
@ -311,7 +309,6 @@
"CARABAO CUP", "CARABAO CUP",
"EFL CUP", "EFL CUP",
"ENGLISH CARABAO CUP", "ENGLISH CARABAO CUP",
"ENGLISH EFL CUP",
"ENGLISH FOOTBALL LEAGUE CUP", "ENGLISH FOOTBALL LEAGUE CUP",
"LEAGUE CUP" "LEAGUE CUP"
] ]

View file

@ -1,7 +1,10 @@
## Base Log @ 2025-12-24 20:41 UTC ## Base Log @ 2025-12-23 20:41 UTC
### ✅ Working Streams: 146<br>❌ Dead Streams: 0 ### ✅ Working Streams: 145<br>❌ Dead Streams: 1
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| FDSN Florida | HTTP Error (403) | `http://1tv41.icu:8080/8434692955/judgen64@yahoo.com/46794` |
--- ---
#### Base Channels URL #### Base Channels URL
``` ```