Compare commits

...

35 commits

Author SHA1 Message Date
GitHub Actions Bot
c901ed52e8 health log 2026-03-08 08:53:46 +00:00
GitHub Actions Bot
4bf9b9dc3b update M3U8 2026-03-07 23:31:22 -05:00
GitHub Actions Bot
63c3a81edd update EPG 2026-03-08 04:25:47 +00:00
GitHub Actions Bot
b388d178f3 health log 2026-03-08 04:23:04 +00:00
GitHub Actions Bot
bba96ff8e9 update M3U8 2026-03-07 23:02:37 -05:00
GitHub Actions Bot
9bd995ee00 update M3U8 2026-03-07 22:31:17 -05:00
GitHub Actions Bot
459a92d414 update M3U8 2026-03-07 22:03:15 -05:00
GitHub Actions Bot
15f4640929 update M3U8 2026-03-07 21:30:48 -05:00
GitHub Actions Bot
9dd220558a update M3U8 2026-03-07 21:01:56 -05:00
GitHub Actions Bot
d12e50ac1b update M3U8 2026-03-07 20:32:54 -05:00
GitHub Actions Bot
b28693b2b6 update M3U8 2026-03-07 20:03:48 -05:00
GitHub Actions Bot
234bb4c140 update M3U8 2026-03-07 19:32:43 -05:00
GitHub Actions Bot
e03781a23c update M3U8 2026-03-07 19:04:29 -05:00
GitHub Actions Bot
c6b580895d update M3U8 2026-03-07 18:32:18 -05:00
GitHub Actions Bot
25aaa46d13 update M3U8 2026-03-07 18:02:46 -05:00
GitHub Actions Bot
535ad23745 update M3U8 2026-03-07 17:31:57 -05:00
GitHub Actions Bot
103f39f21b update M3U8 2026-03-07 17:02:59 -05:00
GitHub Actions Bot
b465ff0fba update M3U8 2026-03-07 16:33:09 -05:00
GitHub Actions Bot
ea1e6c0fdb update M3U8 2026-03-07 16:04:49 -05:00
GitHub Actions Bot
f959b79f92 health log 2026-03-07 20:47:10 +00:00
GitHub Actions Bot
e616b56994 update M3U8 2026-03-07 15:33:12 -05:00
GitHub Actions Bot
ec1a34a3c5 update M3U8 2026-03-07 15:06:48 -05:00
GitHub Actions Bot
b1580bcfe2 update M3U8 2026-03-07 14:33:39 -05:00
GitHub Actions Bot
06e2e34cfb update M3U8 2026-03-07 14:08:16 -05:00
GitHub Actions Bot
6eae2553f9 update EPG 2026-03-07 19:00:58 +00:00
GitHub Actions Bot
9fbccc5f46 update M3U8 2026-03-07 13:33:15 -05:00
GitHub Actions Bot
aa4fc1d744 update M3U8 2026-03-07 13:09:00 -05:00
GitHub Actions Bot
c3978ca189 update M3U8 2026-03-07 12:12:23 -05:00
doms9
00000d9379 e
- edit domain for watchfooty.py
- misc edits
2026-03-07 11:52:59 -05:00
GitHub Actions Bot
7e924aa3a9 update M3U8 2026-03-07 11:02:21 -05:00
GitHub Actions Bot
b0a574637a update M3U8 2026-03-07 10:04:43 -05:00
GitHub Actions Bot
26ef6b9b5c health log 2026-03-07 14:47:35 +00:00
GitHub Actions Bot
39774d0e65 update M3U8 2026-03-07 09:02:59 -05:00
GitHub Actions Bot
067419591b update M3U8 2026-03-07 08:03:16 -05:00
GitHub Actions Bot
f9d1f11d0f update EPG 2026-03-07 10:55:19 +00:00
8 changed files with 47629 additions and 47130 deletions

File diff suppressed because it is too large Load diff

87138
M3U8/TV.xml

File diff suppressed because one or more lines are too long

View file

@ -43,7 +43,7 @@ http://212.102.60.231/BET/index.m3u8
http://23.237.104.106:8080/USA_BTN/index.m3u8 http://23.237.104.106:8080/USA_BTN/index.m3u8
#EXTINF:-1 tvg-chno="15" tvg-id="Bloomberg.HD.us2" tvg-name="Bloomberg TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s71799_dark_360w_270h.png" group-title="TV",Bloomberg TV #EXTINF:-1 tvg-chno="15" tvg-id="Bloomberg.HD.us2" tvg-name="Bloomberg TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s71799_dark_360w_270h.png" group-title="TV",Bloomberg TV
http://41.205.93.154/BLOOMBERG/index.m3u8 https://live-manifest.production-public.tubi.io/live/a5aabf5a-9615-4987-8163-18edfa1360c6/playlist.m3u8
#EXTINF:-1 tvg-chno="16" tvg-id="Boomerang.us2" tvg-name="Boomerang" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s21883_dark_360w_270h.png" group-title="TV",Boomerang #EXTINF:-1 tvg-chno="16" tvg-id="Boomerang.us2" tvg-name="Boomerang" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s21883_dark_360w_270h.png" group-title="TV",Boomerang
http://23.237.104.106:8080/USA_BOOMERANG/index.m3u8 http://23.237.104.106:8080/USA_BOOMERANG/index.m3u8

File diff suppressed because it is too large Load diff

View file

@ -72,7 +72,7 @@ async def main() -> None:
asyncio.create_task(roxie.scrape(hdl_brwsr)), asyncio.create_task(roxie.scrape(hdl_brwsr)),
asyncio.create_task(sportzone.scrape(xtrnl_brwsr)), asyncio.create_task(sportzone.scrape(xtrnl_brwsr)),
asyncio.create_task(streamcenter.scrape(hdl_brwsr)), asyncio.create_task(streamcenter.scrape(hdl_brwsr)),
# asyncio.create_task(streamhub.scrape(xtrnl_brwsr)), asyncio.create_task(streamhub.scrape(xtrnl_brwsr)),
asyncio.create_task(streamsgate.scrape(xtrnl_brwsr)), asyncio.create_task(streamsgate.scrape(xtrnl_brwsr)),
# asyncio.create_task(timstreams.scrape(xtrnl_brwsr)), # asyncio.create_task(timstreams.scrape(xtrnl_brwsr)),
] ]

View file

@ -17,19 +17,19 @@ CACHE_FILE = Cache(TAG, exp=10_800)
HTML_CACHE = Cache(f"{TAG}-html", exp=19_800) HTML_CACHE = Cache(f"{TAG}-html", exp=19_800)
BASE_URL = "https://livesports4u.net" BASE_URL = "https://streamhub.pro"
SPORT_ENDPOINTS = [ SPORT_ENDPOINTS = [
f"sport_{sport_id}" f"sport_{sport_id}"
for sport_id in [ for sport_id in [
# "68c02a4465113", # American Football # "68c02a4465113", # American Football
"68c02a446582f", # Baseball # "68c02a446582f", # Baseball
"68c02a4466011", # Basketball "68c02a4466011", # Basketball
"68c02a4466f56", # Hockey "68c02a4466f56", # Hockey
"68c02a44674e9", # MMA # "68c02a44674e9", # MMA
"68c02a4467a48", # Racing # "68c02a4467a48", # Racing
"68c02a4464a38", # Soccer "68c02a4464a38", # Soccer
"68c02a4468cf7", # Tennis # "68c02a4468cf7", # Tennis
] ]
] ]
@ -107,7 +107,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
now.timestamp(), now.timestamp(),
) )
for date in [now.date(), now.delta(days=1).date()] for date in [now.date(), now.delta(days=1).date()]
for sport_id in SPORT_ENDPOINTS.values() for sport_id in SPORT_ENDPOINTS
] ]
results = await asyncio.gather(*tasks) results = await asyncio.gather(*tasks)
@ -193,6 +193,8 @@ async def scrape(browser: Browser) -> None:
if url: if url:
valid_count += 1 valid_count += 1
entry["url"] = url.split("?")[0]
urls[key] = entry urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)") log.info(f"Collected and cached {valid_count - cached_count} new event(s)")

View file

@ -19,13 +19,9 @@ CACHE_FILE = Cache(TAG, exp=10_800)
API_FILE = Cache(f"{TAG}-api", exp=19_800) API_FILE = Cache(f"{TAG}-api", exp=19_800)
API_URL = "https://api.watchfooty.st" BASE_DOMAIN = "watchfooty.pw"
BASE_MIRRORS = [ API_URL, BASE_URL = f"https://api.{BASE_DOMAIN}", f"https://www.{BASE_DOMAIN}"
"https://www.watchfooty.top",
"https://www.watchfooty.st",
"https://www.watchfooty.su",
]
VALID_SPORTS = [ VALID_SPORTS = [
# "american-football", # "american-football",
@ -123,7 +119,7 @@ async def process_event(
embed = re.sub( embed = re.sub(
pattern=r"^.*\/stream", pattern=r"^.*\/stream",
repl="https://spiderembed.top/embed", repl="https://pikachusports.top/embed",
string=stream_url, string=stream_url,
) )
@ -169,7 +165,7 @@ async def process_event(
page.remove_listener("request", handler) page.remove_listener("request", handler)
async def get_events(base_url: str, cached_keys: list[str]) -> list[dict[str, str]]: async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
now = Time.clean(Time.now()) now = Time.clean(Time.now())
if not (api_data := API_FILE.load(per_entry=False, index=-1)): if not (api_data := API_FILE.load(per_entry=False, index=-1)):
@ -218,7 +214,7 @@ async def get_events(base_url: str, cached_keys: list[str]) -> list[dict[str, st
{ {
"sport": sport, "sport": sport,
"event": name, "event": name,
"link": urljoin(base_url, f"stream/{match_id}"), "link": urljoin(BASE_URL, f"stream/{match_id}"),
"match-id": match_id, "match-id": match_id,
"logo": logo, "logo": logo,
"timestamp": event_dt.timestamp(), "timestamp": event_dt.timestamp(),
@ -239,16 +235,9 @@ async def scrape(browser: Browser) -> None:
log.info(f"Loaded {cached_count} event(s) from cache") log.info(f"Loaded {cached_count} event(s) from cache")
if not (base_url := await network.get_base(BASE_MIRRORS)): log.info(f'Scraping from "{BASE_URL}"')
log.warning("No working Watch Footy mirrors")
CACHE_FILE.write(cached_urls) if events := await get_events(cached_urls.keys()):
return
log.info(f'Scraping from "{base_url}"')
if events := await get_events(base_url, cached_urls.keys()):
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, stealth=False) as context: async with network.event_context(browser, stealth=False) as context:

View file

@ -1,12 +1,12 @@
## Base Log @ 2026-03-07 08:53 UTC ## Base Log @ 2026-03-08 08:53 UTC
### ✅ Working Streams: 158<br>❌ Dead Streams: 3 ### ✅ Working Streams: 158<br>❌ Dead Streams: 3
| Channel | Error (Code) | Link | | Channel | Error (Code) | Link |
| ------- | ------------ | ---- | | ------- | ------------ | ---- |
| Bloomberg TV | HTTP Error (404) | `http://41.205.93.154/BLOOMBERG/index.m3u8` | | MSNBC | HTTP Error (404) | `http://41.205.93.154/MSNBC/index.m3u8` |
| Discovery Science | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/20866.m3u8` | | NBC Sports Bay Area | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/9900.m3u8` |
| Showtime Extreme | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/13220.m3u8` | | Spectrum SportsNet Lakers | HTTP Error (403) | `http://mytvstream.net:8080/live/30550113/30550113/21842.m3u8` |
--- ---
#### Base Channels URL #### Base Channels URL
``` ```