Compare commits

...

37 commits

Author SHA1 Message Date
doms9
00000d9f2b e
block nfl endpoints (superbowl finished)
2026-02-09 00:28:55 -05:00
GitHub Actions Bot
1608018249 update EPG 2026-02-09 04:51:13 +00:00
GitHub Actions Bot
c7ce7d1d0b health log 2026-02-09 04:37:51 +00:00
GitHub Actions Bot
70f6f05a41 update M3U8 2026-02-08 23:30:58 -05:00
GitHub Actions Bot
60a664ff54 update M3U8 2026-02-08 23:00:23 -05:00
GitHub Actions Bot
4b20baf51d update M3U8 2026-02-08 22:30:53 -05:00
GitHub Actions Bot
a6ccdd0e86 update M3U8 2026-02-08 22:00:28 -05:00
GitHub Actions Bot
cfb13a8fed update M3U8 2026-02-08 21:30:59 -05:00
GitHub Actions Bot
d73db17768 update M3U8 2026-02-08 21:01:26 -05:00
GitHub Actions Bot
1324373b0f update M3U8 2026-02-08 20:31:38 -05:00
GitHub Actions Bot
771998845a update M3U8 2026-02-08 20:01:09 -05:00
GitHub Actions Bot
d31c585b58 update M3U8 2026-02-08 19:31:00 -05:00
GitHub Actions Bot
bbda14ee0e update M3U8 2026-02-08 19:05:18 -05:00
GitHub Actions Bot
65765b750c update M3U8 2026-02-08 18:32:45 -05:00
GitHub Actions Bot
3d79cbe99b update M3U8 2026-02-08 18:02:21 -05:00
GitHub Actions Bot
7c893423ed update M3U8 2026-02-08 17:31:58 -05:00
GitHub Actions Bot
2bee8e3d81 update M3U8 2026-02-08 17:01:50 -05:00
GitHub Actions Bot
26907a0204 update M3U8 2026-02-08 16:32:51 -05:00
GitHub Actions Bot
3c5562344b update M3U8 2026-02-08 16:07:42 -05:00
GitHub Actions Bot
39d375f0aa health log 2026-02-08 20:49:59 +00:00
GitHub Actions Bot
0f8690b41f update M3U8 2026-02-08 15:32:39 -05:00
GitHub Actions Bot
3d7209c665 update M3U8 2026-02-08 15:05:29 -05:00
doms9
00000d987c e 2026-02-08 14:57:54 -05:00
doms9
00000d9f85 e
fix streambtw.py scraping
2026-02-08 14:55:52 -05:00
GitHub Actions Bot
443b8465d4 update M3U8 2026-02-08 14:33:21 -05:00
GitHub Actions Bot
c19db9fa65 update M3U8 2026-02-08 14:06:38 -05:00
GitHub Actions Bot
3d998b68b4 update EPG 2026-02-08 19:05:50 +00:00
GitHub Actions Bot
95ebed3f79 update M3U8 2026-02-08 13:31:56 -05:00
GitHub Actions Bot
1ad0f87e23 update M3U8 2026-02-08 13:05:13 -05:00
GitHub Actions Bot
03da1caee4 update M3U8 2026-02-08 12:08:32 -05:00
GitHub Actions Bot
afa3fbaac6 update M3U8 2026-02-08 11:05:42 -05:00
GitHub Actions Bot
86a7414021 update M3U8 2026-02-08 10:07:25 -05:00
GitHub Actions Bot
9360e8fca4 health log 2026-02-08 14:52:38 +00:00
GitHub Actions Bot
2ad46d0cb3 update M3U8 2026-02-08 09:06:32 -05:00
GitHub Actions Bot
4928702706 update M3U8 2026-02-08 08:06:48 -05:00
GitHub Actions Bot
dad53aa921 update EPG 2026-02-08 11:00:32 +00:00
GitHub Actions Bot
1d4c3200ff health log 2026-02-08 08:56:55 +00:00
14 changed files with 108934 additions and 109571 deletions

File diff suppressed because it is too large Load diff

215512
M3U8/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

View file

@ -72,7 +72,7 @@ async def main() -> None:
asyncio.create_task(streamsgate.scrape(xtrnl_brwsr)), asyncio.create_task(streamsgate.scrape(xtrnl_brwsr)),
asyncio.create_task(totalsportek.scrape(hdl_brwsr)), asyncio.create_task(totalsportek.scrape(hdl_brwsr)),
asyncio.create_task(tvpass.scrape(hdl_brwsr)), asyncio.create_task(tvpass.scrape(hdl_brwsr)),
asyncio.create_task(webcast.scrape(hdl_brwsr)), # asyncio.create_task(webcast.scrape(hdl_brwsr)),
] ]
httpx_tasks = [ httpx_tasks = [

View file

@ -24,7 +24,7 @@ SPORT_ENDPOINTS = {
# "mlb": "MLB", # "mlb": "MLB",
"motorsports": "Racing", "motorsports": "Racing",
"nba": "NBA", "nba": "NBA",
"nfl": "American Football", # "nfl": "American Football",
"nhl": "NHL", "nhl": "NHL",
"soccer": "Soccer", "soccer": "Soccer",
} }

View file

@ -1,9 +1,7 @@
import base64 import base64
import json
import re import re
from functools import partial from functools import partial
from urllib.parse import urljoin
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network from .utils import Cache, Time, get_logger, leagues, network
@ -15,6 +13,8 @@ TAG = "STRMBTW"
CACHE_FILE = Cache(TAG, exp=3_600) CACHE_FILE = Cache(TAG, exp=3_600)
API_FILE = Cache(f"{TAG}-api", exp=28_800)
BASE_URL = "https://hiteasport.info" BASE_URL = "https://hiteasport.info"
@ -32,7 +32,6 @@ async def process_event(url: str, url_num: int) -> str | None:
if not (match := valid_m3u8.search(html_data.text)): if not (match := valid_m3u8.search(html_data.text)):
log.info(f"URL {url_num}) No M3U8 found") log.info(f"URL {url_num}) No M3U8 found")
return return
stream_link: str = match[2] stream_link: str = match[2]
@ -46,56 +45,49 @@ async def process_event(url: str, url_num: int) -> str | None:
async def get_events() -> list[dict[str, str]]: async def get_events() -> list[dict[str, str]]:
now = Time.clean(Time.now())
if not (api_data := API_FILE.load(per_entry=False)):
log.info("Refreshing API cache")
api_data = {"timestamp": now.timestamp()}
if r := await network.request(
urljoin(BASE_URL, "public/api.php"),
log=log,
params={"action": "get"},
):
api_data: dict = r.json()
api_data["timestamp"] = now.timestamp()
API_FILE.write(api_data)
events = [] events = []
if not (html_data := await network.request(BASE_URL, log=log)): if last_update := api_data.get("updated_at"):
return events last_update_dt = Time.from_str(last_update, timezone="UTC")
soup = HTMLParser(html_data.content) if last_update_dt.date() != now.date():
return events
script_text = None for info in api_data.get("groups", []):
if not (sport := info["title"]):
sport = "Live Event"
for s in soup.css("script"): if items := info.get("items"):
t = s.text() or "" for event in items:
event_name: str = event["title"]
if "const DATA" in t: link: str = event["url"]
script_text = t
break
if not script_text: events.append(
return events {
"sport": fix_league(sport),
if not ( "event": event_name,
match := re.search(r"const\s+DATA\s*=\s*(\[\s*.*?\s*\]);", script_text, re.S) "link": link,
): }
return events )
data_js = match[1].replace("\n ", "").replace("\n ", "")
s1 = re.sub(r"{\s", '{"', data_js)
s2 = re.sub(r':"', '":"', s1)
s3 = re.sub(r":\[", '":[', s2)
s4 = re.sub(r"},\]", "}]", s3)
s5 = re.sub(r'",\s', '","', s4)
data: list[dict[str, str]] = json.loads(s5)
for matches in data:
league = matches["title"]
items: list[dict[str, str]] = matches["items"]
for info in items:
title = info["title"]
url = info["url"]
events.append(
{
"sport": fix_league(league),
"event": title,
"link": url,
}
)
return events return events

View file

@ -19,8 +19,8 @@ BASE_URL = "https://backend.streamcenter.live/api/Parties"
CATEGORIES = { CATEGORIES = {
4: "Basketball", 4: "Basketball",
9: "Football", 9: "Football",
13: "Baseball", # 13: "Baseball",
14: "American Football", # 14: "American Football",
15: "Motor Sport", 15: "Motor Sport",
16: "Hockey", 16: "Hockey",
17: "Fight MMA", 17: "Fight MMA",

View file

@ -21,7 +21,7 @@ BASE_URL = "https://streamhub.pro/"
CATEGORIES = { CATEGORIES = {
"Soccer": "sport_68c02a4464a38", "Soccer": "sport_68c02a4464a38",
"American Football": "sport_68c02a4465113", # "American Football": "sport_68c02a4465113",
# "Baseball": "sport_68c02a446582f", # "Baseball": "sport_68c02a446582f",
"Basketball": "sport_68c02a4466011", "Basketball": "sport_68c02a4466011",
"Cricket": "sport_68c02a44669f3", "Cricket": "sport_68c02a44669f3",

View file

@ -22,10 +22,10 @@ BASE_URL = "https://streamingon.org"
SPORT_ENDPOINTS = [ SPORT_ENDPOINTS = [
"soccer", "soccer",
"nfl", # "nfl",
"nba", "nba",
"cfb", "cfb",
"mlb", # "mlb",
"nhl", "nhl",
"ufc", "ufc",
"boxing", "boxing",

View file

@ -101,6 +101,7 @@ class Time(datetime):
"%Y-%m-%d %H:%M %p", "%Y-%m-%d %H:%M %p",
"%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M:%S",
"%Y-%m-%dT%H:%M:%SZ", "%Y-%m-%dT%H:%M:%SZ",
"%Y-%m-%dT%H:%M:%S%z",
"%Y-%m-%dT%H:%M:%S.%fZ", "%Y-%m-%dT%H:%M:%S.%fZ",
"%Y/%m/%d %H:%M", "%Y/%m/%d %H:%M",
"%Y/%m/%d %H:%M:%S", "%Y/%m/%d %H:%M:%S",

View file

@ -28,7 +28,7 @@ BASE_MIRRORS = [
] ]
VALID_SPORTS = [ VALID_SPORTS = [
"american-football", # "american-football",
# "australian-football", # "australian-football",
# "baseball", # "baseball",
"basketball", "basketball",

View file

@ -16,7 +16,10 @@ CACHE_FILE = Cache(TAG, exp=10_800)
HTML_CACHE = Cache(f"{TAG}-html", exp=86_400) HTML_CACHE = Cache(f"{TAG}-html", exp=86_400)
BASE_URLS = {"NFL": "https://nflwebcast.com", "NHL": "https://slapstreams.com"} BASE_URLS = {
# "NFL": "https://nflwebcast.com",
"NHL": "https://slapstreams.com",
}
def fix_event(s: str) -> str: def fix_event(s: str) -> str:

View file

@ -22,7 +22,7 @@ SPORT_ENDPOINTS = [
# "mlb", # "mlb",
"mma", "mma",
"nba", "nba",
"nfl", # "nfl",
"nhl", "nhl",
"soccer", "soccer",
"wwe", "wwe",

View file

@ -1,16 +1,13 @@
## Base Log @ 2026-02-08 04:41 UTC ## Base Log @ 2026-02-09 04:37 UTC
### ✅ Working Streams: 139<br>❌ Dead Streams: 7 ### ✅ Working Streams: 142<br>❌ Dead Streams: 4
| Channel | Error (Code) | Link | | Channel | Error (Code) | Link |
| ------- | ------------ | ---- | | ------- | ------------ | ---- |
| CW | HTTP Error (404) | `https://fl1.moveonjoy.com/CW_ORLANDO/index.m3u8` | | CW | HTTP Error (404) | `https://fl1.moveonjoy.com/CW_ORLANDO/index.m3u8` |
| ESPN | HTTP Error (404) | `http://41.205.93.154/ESPN/index.m3u8` |
| FXX | HTTP Error (404) | `https://fl1.moveonjoy.com/FXX/index.m3u8` | | FXX | HTTP Error (404) | `https://fl1.moveonjoy.com/FXX/index.m3u8` |
| Spectrum SportsNet LA Dodgers | HTTP Error (403) | `http://mytvstream.net:8080/live/bn80NG/909467/31636.m3u8` | | Spectrum SportsNet LA Dodgers | HTTP Error (403) | `http://mytvstream.net:8080/live/bn80NG/909467/31636.m3u8` |
| Sportsnet 360 | HTTP Error (403) | `http://mytvstream.net:8080/live/bn80NG/909467/2219.m3u8` | | Sportsnet 360 | HTTP Error (403) | `http://mytvstream.net:8080/live/bn80NG/909467/2219.m3u8` |
| Sportsnet One | HTTP Error (403) | `http://mytvstream.net:8080/live/bn80NG/909467/57297.m3u8` |
| TSN1 | HTTP Error (403) | `http://mytvstream.net:8080/live/bn80NG/909467/57292.m3u8` |
--- ---
#### Base Channels URL #### Base Channels URL
``` ```