Compare commits

..

38 commits

Author SHA1 Message Date
doms9
00000d92ea e
add timstreams
2025-12-12 15:20:10 -05:00
GitHub Actions Bot
9f6a235243 update M3U8 2025-12-12 15:02:21 -05:00
GitHub Actions Bot
9e5af2c38c update M3U8 2025-12-12 14:32:11 -05:00
GitHub Actions Bot
c86b5ff09e update M3U8 2025-12-12 14:01:54 -05:00
doms9
00000d95af e 2025-12-12 13:59:13 -05:00
GitHub Actions Bot
18ab0fd65b update EPG 2025-12-12 18:58:17 +00:00
GitHub Actions Bot
438fbd7592 update M3U8 2025-12-12 13:30:44 -05:00
GitHub Actions Bot
bf68058eba update M3U8 2025-12-12 13:01:03 -05:00
GitHub Actions Bot
d53c55c97d update M3U8 2025-12-12 12:01:03 -05:00
GitHub Actions Bot
8b00d2ebed update M3U8 2025-12-12 11:01:12 -05:00
doms9
00000d9233 e
fix hanging if no available mirrors
2025-12-12 10:46:56 -05:00
GitHub Actions Bot
3a1d2742f1 update M3U8 2025-12-12 10:17:14 -05:00
GitHub Actions Bot
6cfd2fd934 health log 2025-12-12 14:39:51 +00:00
GitHub Actions Bot
2b42f0ab1a update EPG 2025-12-12 10:53:49 +00:00
GitHub Actions Bot
0a0b43c446 health log 2025-12-12 08:51:34 +00:00
doms9
00000d934d e
drop default quality for streamfree
2025-12-12 03:45:53 -05:00
GitHub Actions Bot
65324a1fde update M3U8 2025-12-11 23:30:42 -05:00
GitHub Actions Bot
d4fde8ffa4 update M3U8 2025-12-11 23:00:21 -05:00
GitHub Actions Bot
e1b5725a3a update EPG 2025-12-12 03:47:25 +00:00
GitHub Actions Bot
812e0fe058 health log 2025-12-12 03:44:22 +00:00
GitHub Actions Bot
91f5ff401a update M3U8 2025-12-11 22:30:57 -05:00
GitHub Actions Bot
6a222c290d update M3U8 2025-12-11 22:01:19 -05:00
GitHub Actions Bot
4ba2507cae update M3U8 2025-12-11 21:31:39 -05:00
GitHub Actions Bot
4b47137e8a update M3U8 2025-12-11 21:01:45 -05:00
GitHub Actions Bot
9f49e161f4 update M3U8 2025-12-11 20:31:01 -05:00
GitHub Actions Bot
96107e8c66 update M3U8 2025-12-11 20:02:19 -05:00
GitHub Actions Bot
bdac00eb01 update M3U8 2025-12-11 19:32:25 -05:00
GitHub Actions Bot
5145f26d99 update M3U8 2025-12-11 19:02:16 -05:00
GitHub Actions Bot
101278d75b health log 2025-12-11 18:35:49 -05:00
GitHub Actions Bot
a90e1b68d7 update M3U8 2025-12-11 18:31:51 -05:00
GitHub Actions Bot
d0aa681293 update M3U8 2025-12-11 18:01:34 -05:00
GitHub Actions Bot
5f595f8304 update M3U8 2025-12-11 17:30:45 -05:00
GitHub Actions Bot
d12616cba0 update M3U8 2025-12-11 17:00:26 -05:00
doms9
00000d9d27 e 2025-12-11 16:56:50 -05:00
GitHub Actions Bot
985bb418ee update M3U8 2025-12-11 16:30:55 -05:00
GitHub Actions Bot
53c3b56338 update M3U8 2025-12-11 16:01:11 -05:00
GitHub Actions Bot
f1138b2999 health log 2025-12-11 20:41:06 +00:00
GitHub Actions Bot
52641cfbac update M3U8 2025-12-11 15:31:08 -05:00
17 changed files with 26072 additions and 24343 deletions

43716
EPG/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -16,6 +16,7 @@ from scrapers import (
streamfree, streamfree,
streamsgate, streamsgate,
strmd, strmd,
timstreams,
tvpass, tvpass,
watchfooty, watchfooty,
webcast, webcast,
@ -59,6 +60,7 @@ async def main() -> None:
asyncio.create_task(streamfree.scrape(network.client)), asyncio.create_task(streamfree.scrape(network.client)),
asyncio.create_task(streamsgate.scrape(network.client)), asyncio.create_task(streamsgate.scrape(network.client)),
asyncio.create_task(strmd.scrape(network.client)), asyncio.create_task(strmd.scrape(network.client)),
asyncio.create_task(timstreams.scrape(network.client)),
asyncio.create_task(tvpass.scrape(network.client)), asyncio.create_task(tvpass.scrape(network.client)),
asyncio.create_task(watchfooty.scrape(network.client)), asyncio.create_task(watchfooty.scrape(network.client)),
asyncio.create_task(webcast.scrape(network.client)), asyncio.create_task(webcast.scrape(network.client)),
@ -79,6 +81,7 @@ async def main() -> None:
| strmd.urls | strmd.urls
| streamfree.urls | streamfree.urls
| streamsgate.urls | streamsgate.urls
| timstreams.urls
| tvpass.urls | tvpass.urls
| watchfooty.urls | watchfooty.urls
| webcast.urls | webcast.urls

View file

@ -136,6 +136,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
ev["event"], ev["event"],
ev["link"], ev["link"],
) )
key = f"[{sport}] {event} ({TAG})" key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event) tvg_id, logo = leagues.get_tvg_info(sport, event)

View file

@ -25,7 +25,7 @@ def fix_league(s: str) -> str:
async def refresh_api_cache( async def refresh_api_cache(
client: httpx.AsyncClient, client: httpx.AsyncClient,
url: str, url: str,
ts: float, now_ts: float,
) -> dict[str, dict[str, str]]: ) -> dict[str, dict[str, str]]:
log.info("Refreshing API cache") log.info("Refreshing API cache")
@ -37,9 +37,10 @@ async def refresh_api_cache(
return {} return {}
data = r.json() if not (data := r.json()):
return {}
data["timestamp"] = ts data["timestamp"] = now_ts
return data return data

View file

@ -115,6 +115,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
CACHE_FILE.write(cached_urls) CACHE_FILE.write(cached_urls)
return return
log.info(f'Scraping from "{base_url}"')
events = await get_events( events = await get_events(
client, client,
api_url, api_url,

View file

@ -12,7 +12,7 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {} urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("streambtw.json", exp=5_400) CACHE_FILE = Cache("streambtw.json", exp=3_600)
BASE_URL = "https://streambtw.com" BASE_URL = "https://streambtw.com"

View file

@ -45,9 +45,10 @@ async def refresh_api_cache(
except Exception as e: except Exception as e:
log.error(f'Failed to fetch "{url}": {e}') log.error(f'Failed to fetch "{url}": {e}')
return {} return []
data = r.json() if not (data := r.json()):
return []
data[-1]["timestamp"] = now_ts data[-1]["timestamp"] = now_ts

View file

@ -64,7 +64,7 @@ async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | flo
events[key] = { events[key] = {
"url": network.build_proxy_url( "url": network.build_proxy_url(
tag=TAG, tag=TAG,
path=f"{stream_key}720p/index.m3u8", path=f"{stream_key}/index.m3u8",
query={"stream_name": name}, query={"stream_name": name},
), ),
"logo": logo or pic, "logo": logo or pic,

View file

@ -59,7 +59,8 @@ async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, An
async def refresh_api_cache( async def refresh_api_cache(
client: httpx.AsyncClient, ts: float client: httpx.AsyncClient,
now_ts: float,
) -> list[dict[str, Any]]: ) -> list[dict[str, Any]]:
log.info("Refreshing API cache") log.info("Refreshing API cache")
@ -70,12 +71,13 @@ async def refresh_api_cache(
results = await asyncio.gather(*tasks) results = await asyncio.gather(*tasks)
data = list(chain(*results)) if not (data := list(chain(*results))):
return []
for ev in data: for ev in data:
ev["ts"] = ev.pop("timestamp") ev["ts"] = ev.pop("timestamp")
data[-1]["timestamp"] = ts data[-1]["timestamp"] = now_ts
return data return data

View file

@ -38,7 +38,7 @@ def fix_sport(s: str) -> str:
async def refresh_api_cache( async def refresh_api_cache(
client: httpx.AsyncClient, client: httpx.AsyncClient,
url: str, url: str,
ts: float, now_ts: float,
) -> list[dict[str, Any]]: ) -> list[dict[str, Any]]:
log.info("Refreshing API cache") log.info("Refreshing API cache")
@ -49,11 +49,12 @@ async def refresh_api_cache(
except Exception as e: except Exception as e:
log.error(f'Failed to fetch "{url}": {e}') log.error(f'Failed to fetch "{url}": {e}')
return {} return []
data = r.json() if not (data := r.json()):
return []
data[-1]["timestamp"] = ts data[-1]["timestamp"] = now_ts
return data return data
@ -88,7 +89,7 @@ async def get_events(
if not (ts := event["date"]): if not (ts := event["date"]):
continue continue
start_ts = int(f"{ts}"[:-3]) start_ts = float(f"{ts}"[:-3])
event_dt = Time.from_ts(start_ts) event_dt = Time.from_ts(start_ts)

185
M3U8/scrapers/timstreams.py Normal file
View file

@ -0,0 +1,185 @@
from functools import partial
from typing import Any
import httpx
from playwright.async_api import async_playwright
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("timstreams.json", exp=10_800)
API_URL = "https://api.timstreams.site/main"
BASE_MIRRORS = [
"https://timstreams.site",
"https://timstreams.space",
"https://timstreams.top",
]
TAG = "TIM"
sport_genres = {
1: "Soccer",
2: "Motorsport",
3: "MMA",
4: "Fight",
5: "Boxing",
6: "Wrestling",
7: "Basketball",
8: "American Football",
9: "Baseball",
10: "Tennis",
11: "Hockey",
12: "Darts",
13: "Cricket",
14: "Cycling",
15: "Rugby",
16: "Live Shows",
17: "Other",
}
async def refresh_api_cache(
client: httpx.AsyncClient, url: str
) -> list[dict[str, Any]]:
try:
r = await client.get(url)
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{url}": {e}')
return []
return r.json()
async def get_events(
client: httpx.AsyncClient,
url: str,
cached_keys: set[str],
) -> list[dict[str, str]]:
api_data = await refresh_api_cache(client, url)
now = Time.now().timestamp()
events = []
for info in api_data:
if not (category := info.get("category")) or category != "Events":
continue
stream_events: list[dict[str, Any]] = info["events"]
for ev in stream_events:
name: str = ev["name"]
logo = ev.get("logo")
if (genre := ev["genre"]) in {16, 17}:
continue
sport = sport_genres.get(genre, "Live Event")
streams: list[dict[str, str]] = ev["streams"]
for z, stream in enumerate(streams, start=1):
key = f"[{sport}] {name} {z} ({TAG})"
if cached_keys & {key}:
continue
if not (url := stream.get("url")):
continue
events.append(
{
"key": key,
"sport": sport,
"event": name,
"link": url,
"logo": logo,
"timestamp": now,
}
)
return events
async def scrape(client: httpx.AsyncClient) -> None:
cached_urls = CACHE_FILE.load()
cached_count = len(cached_urls)
urls.update(cached_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
if not (base_url := await network.get_base(BASE_MIRRORS)):
log.warning("No working Timstreams mirrors")
CACHE_FILE.write(cached_urls)
return
log.info(f'Scraping from "{base_url}"')
events = await get_events(
client,
API_URL,
set(cached_urls.keys()),
)
log.info(f"Processing {len(events)} new URL(s)")
if events:
async with async_playwright() as p:
browser, context = await network.browser(p)
for i, ev in enumerate(events, start=1):
handler = partial(
network.process_event,
url=ev["link"],
url_num=i,
context=context,
log=log,
)
url = await network.safe_process(
handler,
url_num=i,
log=log,
)
if url:
sport, event, logo, ts, link, key = (
ev["sport"],
ev["event"],
ev["logo"],
ev["timestamp"],
ev["link"],
ev["key"],
)
tvg_id, pic = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo or pic,
"base": base_url,
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
"link": link,
}
urls[key] = cached_urls[key] = entry
await browser.close()
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)

View file

@ -656,7 +656,7 @@
{ {
"UEFA EUROPA LEAGUE": { "UEFA EUROPA LEAGUE": {
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2310.png", "logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2310.png",
"names": ["EUROPA LEAGUE"] "names": ["EUROPA LEAGUE", "EUROPE UEFA CONFERENCE LEAGUE"]
} }
}, },
{ {

View file

@ -53,7 +53,7 @@ class Network:
async def check_status(self, url: str) -> bool: async def check_status(self, url: str) -> bool:
try: try:
r = await self.client.get(url) r = await self.client.get(url, timeout=5)
r.raise_for_status() r.raise_for_status()
return r.status_code == 200 return r.status_code == 200
except (httpx.HTTPError, httpx.TimeoutException) as e: except (httpx.HTTPError, httpx.TimeoutException) as e:

View file

@ -67,12 +67,13 @@ async def refresh_api_cache(
results = await asyncio.gather(*tasks) results = await asyncio.gather(*tasks)
data = list(chain(*results)) if not (data := list(chain(*results))):
return []
for ev in data: for ev in data:
ev["ts"] = ev.pop("timestamp") ev["ts"] = ev.pop("timestamp")
data[-1]["timestamp"] = Time.now().timestamp() data[-1]["timestamp"] = Time.now().timestamp()
return data return data
@ -192,7 +193,7 @@ async def get_events(
if not (ts := event.get("ts")): if not (ts := event.get("ts")):
continue continue
start_ts = int(f"{ts}"[:-3]) start_ts = float(f"{ts}"[:-3])
event_dt = Time.from_ts(start_ts) event_dt = Time.from_ts(start_ts)

View file

@ -1,13 +1,7 @@
## Base Log @ 2025-12-11 14:46 UTC ## Base Log @ 2025-12-12 14:39 UTC
### ✅ Working Streams: 142<br>❌ Dead Streams: 4 ### ✅ Working Streams: 146<br>❌ Dead Streams: 0
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| BBC World News | HTTP Error (403) | `http://cord-cutter.net:8080/30550113/30550113/139752` |
| FDSN Florida | HTTP Error (403) | `http://cord-cutter.net:8080/30550113/30550113/46794` |
| Fox Sports 2 | HTTP Error (403) | `http://cord-cutter.net:8080/30550113/30550113/1847` |
| NESN | HTTP Error (403) | `http://cord-cutter.net:8080/30550113/30550113/31637` |
--- ---
#### Base Channels URL #### Base Channels URL
``` ```