Compare commits

..

35 commits

Author SHA1 Message Date
GitHub Actions Bot
eadd60d126 update M3U8 2026-03-21 08:03:06 -04:00
GitHub Actions Bot
a9e59158e7 update EPG 2026-03-21 10:59:24 +00:00
GitHub Actions Bot
1e53853620 health log 2026-03-21 08:55:42 +00:00
GitHub Actions Bot
bf1d49e9c1 update EPG 2026-03-21 04:18:54 +00:00
GitHub Actions Bot
1d4b53a735 health log 2026-03-21 04:16:54 +00:00
GitHub Actions Bot
e3c0a5c360 update M3U8 2026-03-20 23:31:09 -04:00
GitHub Actions Bot
3ab0129673 update M3U8 2026-03-20 23:00:39 -04:00
GitHub Actions Bot
149dcc6dd7 update M3U8 2026-03-20 22:31:07 -04:00
GitHub Actions Bot
0e86058c74 update M3U8 2026-03-20 22:01:27 -04:00
GitHub Actions Bot
02766b22d8 update M3U8 2026-03-20 21:31:30 -04:00
GitHub Actions Bot
8106424418 update M3U8 2026-03-20 21:01:29 -04:00
GitHub Actions Bot
fd998e3305 update M3U8 2026-03-20 20:31:17 -04:00
GitHub Actions Bot
626ffd9607 update M3U8 2026-03-20 20:01:31 -04:00
GitHub Actions Bot
9d7bd3d0cf update M3U8 2026-03-20 19:31:13 -04:00
GitHub Actions Bot
8e2cb249a5 update M3U8 2026-03-20 19:01:12 -04:00
GitHub Actions Bot
42143fa029 update M3U8 2026-03-20 18:30:59 -04:00
GitHub Actions Bot
599abc0220 update M3U8 2026-03-20 18:01:14 -04:00
GitHub Actions Bot
73459771cb update M3U8 2026-03-20 17:30:31 -04:00
GitHub Actions Bot
914e0dff62 update M3U8 2026-03-20 17:01:01 -04:00
GitHub Actions Bot
2cdfad4079 health log 2026-03-20 20:56:15 +00:00
GitHub Actions Bot
e9dc287538 update M3U8 2026-03-20 16:31:20 -04:00
GitHub Actions Bot
8e835e9c47 update M3U8 2026-03-20 16:01:28 -04:00
GitHub Actions Bot
9695cd9b04 update M3U8 2026-03-20 15:31:53 -04:00
GitHub Actions Bot
32ae237b66 update EPG 2026-03-20 19:25:26 +00:00
GitHub Actions Bot
f7b032d09c update M3U8 2026-03-20 15:02:20 -04:00
GitHub Actions Bot
40da34fc62 update M3U8 2026-03-20 14:31:27 -04:00
GitHub Actions Bot
32e3f7f4f8 update M3U8 2026-03-20 14:01:19 -04:00
GitHub Actions Bot
e4bea0f9eb update M3U8 2026-03-20 13:30:59 -04:00
GitHub Actions Bot
03158474fe update M3U8 2026-03-20 13:01:55 -04:00
doms9
00000d9411 e
- edit scraping for streamsgate.py
2026-03-20 12:52:11 -04:00
GitHub Actions Bot
0ae675a583 update M3U8 2026-03-20 12:00:58 -04:00
GitHub Actions Bot
885699874b health log 2026-03-20 15:09:54 +00:00
GitHub Actions Bot
b00362e735 update M3U8 2026-03-20 11:00:56 -04:00
GitHub Actions Bot
4b6b036f35 update M3U8 2026-03-20 10:01:22 -04:00
GitHub Actions Bot
f53948c078 update M3U8 2026-03-20 09:00:41 -04:00
7 changed files with 183543 additions and 178365 deletions

File diff suppressed because it is too large Load diff

236522
M3U8/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

View file

@ -69,7 +69,7 @@ async def main() -> None:
asyncio.create_task(sportzone.scrape(xtrnl_brwsr)), asyncio.create_task(sportzone.scrape(xtrnl_brwsr)),
asyncio.create_task(streamcenter.scrape(hdl_brwsr)), asyncio.create_task(streamcenter.scrape(hdl_brwsr)),
# asyncio.create_task(streamhub.scrape(xtrnl_brwsr)), # asyncio.create_task(streamhub.scrape(xtrnl_brwsr)),
# asyncio.create_task(streamsgate.scrape(xtrnl_brwsr)), asyncio.create_task(streamsgate.scrape(xtrnl_brwsr)),
asyncio.create_task(timstreams.scrape(xtrnl_brwsr)), asyncio.create_task(timstreams.scrape(xtrnl_brwsr)),
] ]

View file

@ -1,10 +1,10 @@
import json import asyncio
import re
from functools import partial from functools import partial
from itertools import chain
from typing import Any from typing import Any
from urllib.parse import urljoin
from playwright.async_api import Browser from playwright.async_api import Browser
from selectolax.parser import HTMLParser
from .utils import Cache, Time, get_logger, leagues, network from .utils import Cache, Time, get_logger, leagues, network
@ -16,95 +16,101 @@ TAG = "STRMSGATE"
CACHE_FILE = Cache(TAG, exp=10_800) CACHE_FILE = Cache(TAG, exp=10_800)
BASE_URL = "https://streamingon.org/index.php" API_FILE = Cache(f"{TAG}-api", exp=19_800)
BASE_URL = "https://streamingon.org"
SPORT_URLS = [
urljoin(BASE_URL, f"data/{sport}.json")
for sport in [
# "cfb",
"mlb",
"nba",
# "nfl",
"nhl",
"soccer",
"ufc",
]
]
def get_event(t1: str, t2: str) -> str:
match t1:
case "RED ZONE":
return "NFL RedZone"
case "TBD":
return "TBD"
case _:
return f"{t1.strip()} vs {t2.strip()}"
async def refresh_api_cache(now_ts: float) -> list[dict[str, Any]]:
tasks = [network.request(url, log=log) for url in SPORT_URLS]
results = await asyncio.gather(*tasks)
if not (data := [*chain.from_iterable(r.json() for r in results if r)]):
return [{"timestamp": now_ts}]
for ev in data:
ev["ts"] = ev.pop("timestamp")
data[-1]["timestamp"] = now_ts
return data
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]: async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
now = Time.clean(Time.now()) now = Time.clean(Time.now())
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
log.info("Refreshing API cache")
api_data = await refresh_api_cache(now.timestamp())
API_FILE.write(api_data)
events = [] events = []
if not ( start_dt = now.delta(hours=-1)
html_data := await network.request( end_dt = now.delta(minutes=5)
BASE_URL,
params={
"sport": "all",
"league": "all",
"sort": "time",
"stream": "available",
"day": "all",
},
log=log,
)
):
return events
link_data_ptrn = re.compile(r"var\s+linkData\s+=\s+({.*?});", re.I | re.S) for stream_group in api_data:
date = stream_group.get("time")
if not (match := link_data_ptrn.search(html_data.text)): sport = stream_group.get("league")
log.warning("No `linkData` variable found.")
return events
link_data: dict[str, dict[str, Any]] = json.loads(match[1]) t1, t2 = stream_group.get("away"), stream_group.get("home")
start_dt = now.delta(minutes=-30) event = get_event(t1, t2)
end_dt = now.delta(minutes=30)
soup = HTMLParser(html_data.content) if not (date and sport):
for body in soup.css(".sport-body"):
if not (date_elem := body.css_first(".date-label")):
continue continue
event_date = date_elem.text(strip=True) if f"[{sport}] {event} ({TAG})" in cached_keys:
continue
for card in soup.css(".game-card"): event_dt = Time.from_str(date, timezone="UTC")
if not (event_id := card.attributes.get("data-id")):
continue
if not (league_elem := card.css_first(".card-league")): if not start_dt <= event_dt <= end_dt:
continue continue
if not (teams := card.css(".card-teams .card-team-name")): if not (streams := stream_group.get("streams")):
continue continue
if not (time_elem := card.css_first(".card-time")): if not (url := streams[0].get("url")):
continue continue
event_dt = Time.from_str( events.append(
f"{event_date} {time_elem.text(strip=True)}", {
timezone="CET", "sport": sport,
) "event": event,
"link": url,
if not start_dt <= event_dt <= end_dt: "timestamp": event_dt.timestamp(),
continue }
)
sport = league_elem.text(strip=True)
team_1, team_2 = (team.text(strip=True) for team in teams)
event_name = f"{team_2} vs {team_1}"
if f"[{sport}] {event_name} ({TAG})" in cached_keys:
continue
if not (event_info := link_data.get(event_id)):
continue
if not (stream_links := event_info.get("streamLinks")):
continue
if not (url := stream_links[0].get("url")):
continue
events.append(
{
"sport": sport,
"event": event_name,
"link": url,
"timestamp": now.timestamp(),
}
)
return events return events
@ -167,6 +173,8 @@ async def scrape(browser: Browser) -> None:
if url: if url:
valid_count += 1 valid_count += 1
entry["url"] = url.split("&e")[0]
urls[key] = entry urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)") log.info(f"Collected and cached {valid_count - cached_count} new event(s)")

File diff suppressed because it is too large Load diff

View file

@ -1,13 +1,14 @@
## Base Log @ 2026-03-20 09:04 UTC ## Base Log @ 2026-03-21 08:55 UTC
### ✅ Working Streams: 158<br>❌ Dead Streams: 4 ### ✅ Working Streams: 157<br>❌ Dead Streams: 5
| Channel | Error (Code) | Link | | Channel | Error (Code) | Link |
| ------- | ------------ | ---- | | ------- | ------------ | ---- |
| Altitude Sports | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/79545.m3u8` | | Altitude Sports | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/79545.m3u8` |
| Hallmark Family | HTTP Error (000) | `http://mytvstream.net:8080/live/30550113/30550113/9307.m3u8` | | Hallmark Family | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/9307.m3u8` |
| NBC Sports California | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/20940.m3u8` | | NBC Sports California | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/20940.m3u8` |
| NHL Network | HTTP Error (000) | `http://mytvstream.net:8080/live/30550113/30550113/20179.m3u8` | | NHL Network | HTTP Error (000) | `http://mytvstream.net:8080/live/30550113/30550113/20179.m3u8` |
| Pop TV | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/20976.m3u8` |
--- ---
#### Base Channels URL #### Base Channels URL
``` ```