Compare commits

..

No commits in common. "00000d92ead6c1d61d811873c24fc67e197aa8b5" and "f41268a1b4a1ce3f67d4a86b72cd7e24fc5cac52" have entirely different histories.

17 changed files with 23951 additions and 25680 deletions

43232
EPG/TV.xml

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -16,7 +16,6 @@ from scrapers import (
streamfree,
streamsgate,
strmd,
timstreams,
tvpass,
watchfooty,
webcast,
@ -60,7 +59,6 @@ async def main() -> None:
asyncio.create_task(streamfree.scrape(network.client)),
asyncio.create_task(streamsgate.scrape(network.client)),
asyncio.create_task(strmd.scrape(network.client)),
asyncio.create_task(timstreams.scrape(network.client)),
asyncio.create_task(tvpass.scrape(network.client)),
asyncio.create_task(watchfooty.scrape(network.client)),
asyncio.create_task(webcast.scrape(network.client)),
@ -81,7 +79,6 @@ async def main() -> None:
| strmd.urls
| streamfree.urls
| streamsgate.urls
| timstreams.urls
| tvpass.urls
| watchfooty.urls
| webcast.urls

View file

@ -136,7 +136,6 @@ async def scrape(client: httpx.AsyncClient) -> None:
ev["event"],
ev["link"],
)
key = f"[{sport}] {event} ({TAG})"
tvg_id, logo = leagues.get_tvg_info(sport, event)

View file

@ -25,7 +25,7 @@ def fix_league(s: str) -> str:
async def refresh_api_cache(
client: httpx.AsyncClient,
url: str,
now_ts: float,
ts: float,
) -> dict[str, dict[str, str]]:
log.info("Refreshing API cache")
@ -37,10 +37,9 @@ async def refresh_api_cache(
return {}
if not (data := r.json()):
return {}
data = r.json()
data["timestamp"] = now_ts
data["timestamp"] = ts
return data

View file

@ -115,8 +115,6 @@ async def scrape(client: httpx.AsyncClient) -> None:
CACHE_FILE.write(cached_urls)
return
log.info(f'Scraping from "{base_url}"')
events = await get_events(
client,
api_url,

View file

@ -12,7 +12,7 @@ log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("streambtw.json", exp=3_600)
CACHE_FILE = Cache("streambtw.json", exp=5_400)
BASE_URL = "https://streambtw.com"

View file

@ -45,10 +45,9 @@ async def refresh_api_cache(
except Exception as e:
log.error(f'Failed to fetch "{url}": {e}')
return []
return {}
if not (data := r.json()):
return []
data = r.json()
data[-1]["timestamp"] = now_ts

View file

@ -64,7 +64,7 @@ async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | flo
events[key] = {
"url": network.build_proxy_url(
tag=TAG,
path=f"{stream_key}/index.m3u8",
path=f"{stream_key}720p/index.m3u8",
query={"stream_name": name},
),
"logo": logo or pic,

View file

@ -59,8 +59,7 @@ async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, An
async def refresh_api_cache(
client: httpx.AsyncClient,
now_ts: float,
client: httpx.AsyncClient, ts: float
) -> list[dict[str, Any]]:
log.info("Refreshing API cache")
@ -71,13 +70,12 @@ async def refresh_api_cache(
results = await asyncio.gather(*tasks)
if not (data := list(chain(*results))):
return []
data = list(chain(*results))
for ev in data:
ev["ts"] = ev.pop("timestamp")
data[-1]["timestamp"] = now_ts
data[-1]["timestamp"] = ts
return data

View file

@ -38,7 +38,7 @@ def fix_sport(s: str) -> str:
async def refresh_api_cache(
client: httpx.AsyncClient,
url: str,
now_ts: float,
ts: float,
) -> list[dict[str, Any]]:
log.info("Refreshing API cache")
@ -49,12 +49,11 @@ async def refresh_api_cache(
except Exception as e:
log.error(f'Failed to fetch "{url}": {e}')
return []
return {}
if not (data := r.json()):
return []
data = r.json()
data[-1]["timestamp"] = now_ts
data[-1]["timestamp"] = ts
return data
@ -89,7 +88,7 @@ async def get_events(
if not (ts := event["date"]):
continue
start_ts = float(f"{ts}"[:-3])
start_ts = int(f"{ts}"[:-3])
event_dt = Time.from_ts(start_ts)

View file

@ -1,185 +0,0 @@
from functools import partial
from typing import Any
import httpx
from playwright.async_api import async_playwright
from .utils import Cache, Time, get_logger, leagues, network
log = get_logger(__name__)
urls: dict[str, dict[str, str | float]] = {}
CACHE_FILE = Cache("timstreams.json", exp=10_800)
API_URL = "https://api.timstreams.site/main"
BASE_MIRRORS = [
"https://timstreams.site",
"https://timstreams.space",
"https://timstreams.top",
]
TAG = "TIM"
sport_genres = {
1: "Soccer",
2: "Motorsport",
3: "MMA",
4: "Fight",
5: "Boxing",
6: "Wrestling",
7: "Basketball",
8: "American Football",
9: "Baseball",
10: "Tennis",
11: "Hockey",
12: "Darts",
13: "Cricket",
14: "Cycling",
15: "Rugby",
16: "Live Shows",
17: "Other",
}
async def refresh_api_cache(
client: httpx.AsyncClient, url: str
) -> list[dict[str, Any]]:
try:
r = await client.get(url)
r.raise_for_status()
except Exception as e:
log.error(f'Failed to fetch "{url}": {e}')
return []
return r.json()
async def get_events(
client: httpx.AsyncClient,
url: str,
cached_keys: set[str],
) -> list[dict[str, str]]:
api_data = await refresh_api_cache(client, url)
now = Time.now().timestamp()
events = []
for info in api_data:
if not (category := info.get("category")) or category != "Events":
continue
stream_events: list[dict[str, Any]] = info["events"]
for ev in stream_events:
name: str = ev["name"]
logo = ev.get("logo")
if (genre := ev["genre"]) in {16, 17}:
continue
sport = sport_genres.get(genre, "Live Event")
streams: list[dict[str, str]] = ev["streams"]
for z, stream in enumerate(streams, start=1):
key = f"[{sport}] {name} {z} ({TAG})"
if cached_keys & {key}:
continue
if not (url := stream.get("url")):
continue
events.append(
{
"key": key,
"sport": sport,
"event": name,
"link": url,
"logo": logo,
"timestamp": now,
}
)
return events
async def scrape(client: httpx.AsyncClient) -> None:
cached_urls = CACHE_FILE.load()
cached_count = len(cached_urls)
urls.update(cached_urls)
log.info(f"Loaded {cached_count} event(s) from cache")
if not (base_url := await network.get_base(BASE_MIRRORS)):
log.warning("No working Timstreams mirrors")
CACHE_FILE.write(cached_urls)
return
log.info(f'Scraping from "{base_url}"')
events = await get_events(
client,
API_URL,
set(cached_urls.keys()),
)
log.info(f"Processing {len(events)} new URL(s)")
if events:
async with async_playwright() as p:
browser, context = await network.browser(p)
for i, ev in enumerate(events, start=1):
handler = partial(
network.process_event,
url=ev["link"],
url_num=i,
context=context,
log=log,
)
url = await network.safe_process(
handler,
url_num=i,
log=log,
)
if url:
sport, event, logo, ts, link, key = (
ev["sport"],
ev["event"],
ev["logo"],
ev["timestamp"],
ev["link"],
ev["key"],
)
tvg_id, pic = leagues.get_tvg_info(sport, event)
entry = {
"url": url,
"logo": logo or pic,
"base": base_url,
"timestamp": ts,
"id": tvg_id or "Live.Event.us",
"link": link,
}
urls[key] = cached_urls[key] = entry
await browser.close()
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")
CACHE_FILE.write(cached_urls)

View file

@ -656,7 +656,7 @@
{
"UEFA EUROPA LEAGUE": {
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2310.png",
"names": ["EUROPA LEAGUE", "EUROPE UEFA CONFERENCE LEAGUE"]
"names": ["EUROPA LEAGUE"]
}
},
{

View file

@ -53,7 +53,7 @@ class Network:
async def check_status(self, url: str) -> bool:
try:
r = await self.client.get(url, timeout=5)
r = await self.client.get(url)
r.raise_for_status()
return r.status_code == 200
except (httpx.HTTPError, httpx.TimeoutException) as e:

View file

@ -67,13 +67,12 @@ async def refresh_api_cache(
results = await asyncio.gather(*tasks)
if not (data := list(chain(*results))):
return []
data = list(chain(*results))
for ev in data:
ev["ts"] = ev.pop("timestamp")
data[-1]["timestamp"] = Time.now().timestamp()
data[-1]["timestamp"] = Time.now().timestamp()
return data
@ -193,7 +192,7 @@ async def get_events(
if not (ts := event.get("ts")):
continue
start_ts = float(f"{ts}"[:-3])
start_ts = int(f"{ts}"[:-3])
event_dt = Time.from_ts(start_ts)

View file

@ -1,7 +1,13 @@
## Base Log @ 2025-12-12 14:39 UTC
## Base Log @ 2025-12-11 14:46 UTC
### ✅ Working Streams: 146<br>❌ Dead Streams: 0
### ✅ Working Streams: 142<br>❌ Dead Streams: 4
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| BBC World News | HTTP Error (403) | `http://cord-cutter.net:8080/30550113/30550113/139752` |
| FDSN Florida | HTTP Error (403) | `http://cord-cutter.net:8080/30550113/30550113/46794` |
| Fox Sports 2 | HTTP Error (403) | `http://cord-cutter.net:8080/30550113/30550113/1847` |
| NESN | HTTP Error (403) | `http://cord-cutter.net:8080/30550113/30550113/31637` |
---
#### Base Channels URL
```