Compare commits

...

36 commits

Author SHA1 Message Date
GitHub Actions Bot
b6abb9371f health log 2026-03-04 09:02:58 +00:00
GitHub Actions Bot
8a797edffa update M3U8 2026-03-03 23:31:21 -05:00
GitHub Actions Bot
3c8e6919c0 update EPG 2026-03-04 04:22:02 +00:00
GitHub Actions Bot
7f51b54c7a health log 2026-03-04 04:19:36 +00:00
GitHub Actions Bot
098cddaff6 update M3U8 2026-03-03 23:01:46 -05:00
GitHub Actions Bot
ce2990298f update M3U8 2026-03-03 22:31:35 -05:00
GitHub Actions Bot
10d19b9904 update M3U8 2026-03-03 22:02:15 -05:00
GitHub Actions Bot
3d848d3185 update M3U8 2026-03-03 21:31:46 -05:00
GitHub Actions Bot
301082e11d update M3U8 2026-03-03 21:03:47 -05:00
GitHub Actions Bot
f14b365aa0 update M3U8 2026-03-03 20:31:41 -05:00
GitHub Actions Bot
cc5e612541 update M3U8 2026-03-03 20:03:45 -05:00
doms9
00000d9895 e 2026-03-03 19:48:15 -05:00
GitHub Actions Bot
530fe3e562 update M3U8 2026-03-03 19:32:48 -05:00
GitHub Actions Bot
f8ba834d40 update M3U8 2026-03-03 19:03:20 -05:00
GitHub Actions Bot
6ec2fe0efc update M3U8 2026-03-03 18:31:18 -05:00
GitHub Actions Bot
7b2478cd13 update M3U8 2026-03-03 18:02:07 -05:00
GitHub Actions Bot
c62ccaecff update M3U8 2026-03-03 17:30:23 -05:00
GitHub Actions Bot
2689c5500b update M3U8 2026-03-03 17:01:53 -05:00
GitHub Actions Bot
a3ae6415d2 health log 2026-03-03 16:59:24 -05:00
doms9
00000d9c59 e
- edit watchfooty.py scraping
2026-03-03 16:59:09 -05:00
GitHub Actions Bot
75f1d95b12 update M3U8 2026-03-03 16:31:34 -05:00
GitHub Actions Bot
84608762af update M3U8 2026-03-03 16:01:28 -05:00
GitHub Actions Bot
d19ffb0540 health log 2026-03-03 20:55:34 +00:00
GitHub Actions Bot
e9273b39fc update M3U8 2026-03-03 15:31:54 -05:00
GitHub Actions Bot
db9cf0cdb6 update M3U8 2026-03-03 15:05:32 -05:00
GitHub Actions Bot
fd56402f2f update M3U8 2026-03-03 14:32:52 -05:00
GitHub Actions Bot
8a0ef3edfa update M3U8 2026-03-03 14:03:06 -05:00
GitHub Actions Bot
e227694838 update M3U8 2026-03-03 13:32:25 -05:00
GitHub Actions Bot
eaceb88585 update M3U8 2026-03-03 13:04:13 -05:00
GitHub Actions Bot
2c24b41c0b update M3U8 2026-03-03 12:03:39 -05:00
GitHub Actions Bot
8976901f3b update M3U8 2026-03-03 11:02:30 -05:00
GitHub Actions Bot
09f5e10a04 health log 2026-03-03 15:12:06 +00:00
GitHub Actions Bot
099e12294f update M3U8 2026-03-03 10:02:08 -05:00
GitHub Actions Bot
1437b45bc6 update M3U8 2026-03-03 09:01:57 -05:00
GitHub Actions Bot
d80399d4a6 update M3U8 2026-03-03 08:02:38 -05:00
GitHub Actions Bot
420afea2cb update EPG 2026-03-03 11:19:01 +00:00
23 changed files with 39340 additions and 34853 deletions

File diff suppressed because it is too large Load diff

68935
M3U8/TV.xml

File diff suppressed because one or more lines are too long

View file

@ -58,7 +58,7 @@ http://41.205.93.154/BRAVO/index.m3u8
https://buzzrota-web.amagi.tv/playlist.m3u8
#EXTINF:-1 tvg-chno="20" tvg-id="CSPAN.us2" tvg-name="C-SPAN" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10161_dark_360w_270h.png" group-title="TV",C-SPAN
http://mytvstream.net:8080/live/bn80NG/909467/136589.m3u8
http://mytvstream.net:8080/live/30550113/30550113/136589.m3u8
#EXTINF:-1 tvg-chno="21" tvg-id="Cartoon.Network.HD.us2" tvg-name="Cartoon Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s12131_dark_360w_270h.png" group-title="TV",Cartoon Network
http://23.237.104.106:8080/USA_CARTOON_NETWORK/index.m3u8
@ -127,7 +127,7 @@ http://hardcoremedia.xyz/live/rabdsbmz/3731346838/129866.ts
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/129867.ts
#EXTINF:-1 tvg-chno="43" tvg-id="Disney.Channel.HD.us2" tvg-name="Disney" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10171_dark_360w_270h.png" group-title="TV",Disney Channel
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/257087.ts
http://89.105.221.127/Disney/index.m3u8?token=test
#EXTINF:-1 tvg-chno="44" tvg-id="Disney.XD.HD.us2" tvg-name="Disney XD" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18279_dark_360w_270h.png" group-title="TV",Disney XD
http://23.237.104.106:8080/USA_DISNEY_XD/index.m3u8
@ -148,40 +148,40 @@ http://23.237.104.106:8080/USA_ESPNU/index.m3u8
http://snowbank.houseinventer.com/6501/index.m3u8?token=M1lDdWljYkdyZGFhZzVxeGc2Mkt5OGJicWNxd2xheDkzWWVieEt5b3lZVERxNEduaVp1UnBxU2VlWmF0ZnRlRGxaMm1zNStDbnJOOXFZMlhtcStybmc9PQ==
#EXTINF:-1 tvg-chno="50" tvg-id="FanDuel.Sports.Network.Detroit.24/7.HDTV.us" tvg-name="FDSN Detroit" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s54286_dark_360w_270h.png" group-title="TV",FDSN Detroit
http://mytvstream.net:8080/live/bn80NG/909467/20930.m3u8
http://mytvstream.net:8080/live/30550113/30550113/20930.m3u8
#EXTINF:-1 tvg-chno="51" tvg-id="FanDuel.Sports.Network.Florida.HDTV.(Out.of.Market).us" tvg-name="FDSN Florida" tvg-logo="https://i.gyazo.com/fad701fbaaafe161b13b23ed9b50179b.png" group-title="TV",FDSN Florida
http://mytvstream.net:8080/live/bn80NG/909467/46794.m3u8
http://mytvstream.net:8080/live/30550113/30550113/46794.m3u8
#EXTINF:-1 tvg-chno="52" tvg-id="FanDuel.Sports.Network.Midwest.24/7.HDTV.us" tvg-name="FDSN Midwest" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11058_dark_360w_270h.png" group-title="TV",FDSN Midwest
http://mytvstream.net:8080/live/bn80NG/909467/66795.m3u8
http://mytvstream.net:8080/live/30550113/30550113/66795.m3u8
#EXTINF:-1 tvg-chno="53" tvg-id="FanDuel.Sports.Network.North.HDTV.us" tvg-name="FDSN North" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10977_dark_360w_270h.png" group-title="TV",FDSN North
http://mytvstream.net:8080/live/bn80NG/909467/58827.m3u8
http://mytvstream.net:8080/live/30550113/30550113/58827.m3u8
#EXTINF:-1 tvg-chno="54" tvg-id="FanDuel.Sports.Network.Ohio.(Cleveland).HDTV.us" tvg-name="FDSN Ohio" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s49691_dark_360w_270h.png" group-title="TV",FDSN Ohio
http://mytvstream.net:8080/live/bn80NG/909467/17752.m3u8
http://mytvstream.net:8080/live/30550113/30550113/17752.m3u8
#EXTINF:-1 tvg-chno="55" tvg-id="FanDuel.Sports.Network.Oklahoma.24/7.HDTV.(Tulsa).us" tvg-name="FDSN Oklahoma" tvg-logo="https://i.gyazo.com/80ad6fd142cd67f06eef58d9ce5aa72b.png" group-title="TV",FDSN Oklahoma
http://mytvstream.net:8080/live/bn80NG/909467/20934.m3u8
http://mytvstream.net:8080/live/30550113/30550113/20934.m3u8
#EXTINF:-1 tvg-chno="56" tvg-id="FanDuel.Sports.Network.SoCal.HDTV.us" tvg-name="FDSN SoCal" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16743_dark_360w_270h.png" group-title="TV",FDSN SoCal
http://mytvstream.net:8080/live/bn80NG/909467/221151.m3u8
http://mytvstream.net:8080/live/30550113/30550113/221151.m3u8
#EXTINF:-1 tvg-chno="57" tvg-id="FanDuel.Sports.Network.Southeast.HDTV.(Mont./Birm./Dothan/Mobile.AL).us" tvg-name="FDSN Southeast" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s20789_dark_360w_270h.png" group-title="TV",FDSN Southeast
http://mytvstream.net:8080/live/bn80NG/909467/2213.m3u8
http://mytvstream.net:8080/live/30550113/30550113/2213.m3u8
#EXTINF:-1 tvg-chno="58" tvg-id="FanDuel.Sports.Network.Southwest.HDTV.24/7.(Main).us" tvg-name="FDSN Southwest" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s59629_dark_360w_270h.png" group-title="TV",FDSN Southwest
http://mytvstream.net:8080/live/bn80NG/909467/21843.m3u8
http://mytvstream.net:8080/live/30550113/30550113/21843.m3u8
#EXTINF:-1 tvg-chno="59" tvg-id="FanDuel.Sports.Network.Sun.South.24/7.HDTV.(South.Marlins,.Rays,.Heat).us" tvg-name="FDSN Sun" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s61084_dark_360w_270h.png" group-title="TV",FDSN Sun
http://mytvstream.net:8080/live/bn80NG/909467/104917.m3u8
http://mytvstream.net:8080/live/30550113/30550113/104917.m3u8
#EXTINF:-1 tvg-chno="60" tvg-id="FanDuel.Sports.Network.West.HDTV.us" tvg-name="FDSN West" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s59627_dark_360w_270h.png" group-title="TV",FDSN West
http://mytvstream.net:8080/live/bn80NG/909467/20932.m3u8
http://mytvstream.net:8080/live/30550113/30550113/20932.m3u8
#EXTINF:-1 tvg-chno="61" tvg-id="FanDuel.Sports.Network.Wisconsin.24/7.HDTV.us" tvg-name="FDSN Wisconsin" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16348_dark_360w_270h.png" group-title="TV",FDSN Wisconsin
http://mytvstream.net:8080/live/bn80NG/909467/78599.m3u8
http://mytvstream.net:8080/live/30550113/30550113/78599.m3u8
#EXTINF:-1 tvg-chno="62" tvg-id="plex.tv.FIFA+.plex" tvg-name="FIFA+ TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s136235_dark_360w_270h.png" group-title="TV",FIFA+ TV
https://jmp2.uk/stvp-IN270000230
@ -253,7 +253,7 @@ http://23.237.104.106:8080/USA_HBO2/index.m3u8
http://23.237.104.106:8080/USA_HBO_COMEDY/index.m3u8
#EXTINF:-1 tvg-chno="85" tvg-id="HBO.Drama.us2" tvg-name="HBO Family" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s34879_dark_360w_270h.png" group-title="TV",HBO Family
http://mytvstream.net:8080/live/bn80NG/909467/17772.m3u8
http://mytvstream.net:8080/live/30550113/30550113/17772.m3u8
#EXTINF:-1 tvg-chno="86" tvg-id="HBO.Zone.HD.us2" tvg-name="HBO Zone" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18431_dark_360w_270h.png" group-title="TV",HBO Zone
http://23.237.104.106:8080/USA_HBO_ZONE/index.m3u8
@ -307,10 +307,10 @@ http://212.102.60.231/NBA_TV/index.m3u8
http://stream.cammonitorplus.net/1765/index.m3u8?token=MnE3ZWg1YkgxdFdWZlo2c2hLMkltWnJhcFo1OHhxcXVyb2pKazZXaWxZRERxNEduaVp1UnBxU2VlWmF0ZnRlRGxaMm1zNStDbnJOOXFZMlhtcStybmc9PQ==
#EXTINF:-1 tvg-chno="103" tvg-id="NBC.Sports.Bay.Area.HD.us2" tvg-name="NBC Sports Bay Area" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s63138_dark_360w_270h.png" group-title="TV",NBC Sports Bay Area
http://mytvstream.net:8080/live/bn80NG/909467/9900.m3u8
http://mytvstream.net:8080/live/30550113/30550113/9900.m3u8
#EXTINF:-1 tvg-chno="104" tvg-id="NBC.Sports.Boston.HD.us2" tvg-name="NBC Sports Boston" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s49198_dark_360w_270h.png" group-title="TV",NBC Sports Boston
http://mytvstream.net:8080/live/bn80NG/909467/20939.m3u8
http://mytvstream.net:8080/live/30550113/30550113/20939.m3u8
#EXTINF:-1 tvg-chno="105" tvg-id="NBC.Sports.California.SAT.us2" tvg-name="NBC Sports California" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s45540_dark_360w_270h.png" group-title="TV",NBC Sports California
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/136474.ts

File diff suppressed because it is too large Load diff

View file

@ -80,7 +80,7 @@ async def main() -> None:
asyncio.create_task(fawa.scrape()),
asyncio.create_task(istreameast.scrape()),
asyncio.create_task(ovogoal.scrape()),
asyncio.create_task(pawa.scrape()),
# asyncio.create_task(pawa.scrape()),
asyncio.create_task(shark.scrape()),
asyncio.create_task(streambtw.scrape()),
asyncio.create_task(totalsportek.scrape()),

View file

@ -19,7 +19,7 @@ BASE_URL = "http://www.fawanews.sc/"
async def process_event(url: str, url_num: int) -> str | None:
if not (html_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
log.warning(f"URL {url_num}) Failed to load url.")
return

View file

@ -19,7 +19,7 @@ BASE_URL = "https://istreameast.app"
async def process_event(url: str, url_num: int) -> str | None:
if not (event_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
log.warning(f"URL {url_num}) Failed to load url.")
return
@ -36,7 +36,7 @@ async def process_event(url: str, url_num: int) -> str | None:
return
if not (iframe_src_data := await network.request(iframe_src, log=log)):
log.info(f"URL {url_num}) Failed to load iframe source.")
log.warning(f"URL {url_num}) Failed to load iframe source.")
return

View file

@ -47,8 +47,6 @@ async def process_event(
page: Page,
) -> str | None:
event_id_pattern = re.compile(r"&c=(\d*)", re.I)
captured: list[str] = []
got_one = asyncio.Event()
@ -68,8 +66,10 @@ async def process_event(
timeout=10_000,
)
if resp.status != 200:
log.warning(f"URL {url_num}) Status Code: {resp.status}")
if not resp or resp.status != 200:
log.warning(
f"URL {url_num}) Status Code: {resp.status if resp else 'None'}"
)
return
try:
@ -80,12 +80,7 @@ async def process_event(
log.warning(f"URL {url_num}) No valid sources found.")
return
if (match := event_id_pattern.search(href)) and (
event_id := match[1]
).isalnum():
event_url = f"https://emb.apl392.me/player/live.php?id={event_id}"
else:
event_url = href if href.startswith("http") else f"https:{href}"
event_url = href if href.startswith("http") else f"https:{href}"
await page.goto(
event_url,

View file

@ -20,7 +20,7 @@ async def process_event(url: str, url_num: int) -> tuple[str | None, str | None]
nones = None, None
if not (html_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
log.warning(f"URL {url_num}) Failed to load url.")
return nones
soup = HTMLParser(html_data.content)

View file

@ -15,12 +15,12 @@ TAG = "PAWA"
CACHE_FILE = Cache(TAG, exp=10_800)
BASE_URL = "https://pawastreams.net/feed"
BASE_URL = "https://pawastreams.net/feed/"
async def process_event(url: str, url_num: int) -> str | None:
if not (event_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
log.warning(f"URL {url_num}) Failed to load url.")
return
@ -37,7 +37,7 @@ async def process_event(url: str, url_num: int) -> str | None:
return
if not (iframe_src_data := await network.request(iframe_src, log=log)):
log.info(f"URL {url_num}) Failed to load iframe source.")
log.warning(f"URL {url_num}) Failed to load iframe source.")
return

View file

@ -25,8 +25,8 @@ async def get_api_data(page: Page) -> dict[str, list[dict, str, str]]:
timeout=6_000,
)
if resp.status != 200:
log.warning(f"{url} Status Code: {resp.status}")
if not resp or resp.status != 200:
log.warning(f"{url} Status Code: {resp.status if resp else 'None'}")
return {}

View file

@ -99,8 +99,11 @@ async def process_event(
timeout=6_000,
)
if resp.status != 200:
log.warning(f"URL {url_num}) Status Code: {resp.status}")
if not resp or resp.status != 200:
log.warning(
f"URL {url_num}) Status Code: {resp.status if resp else 'None'}"
)
return
try:

View file

@ -20,14 +20,14 @@ BASE_URL = "https://sharkstreams.net"
async def process_event(url: str, url_num: int) -> str | None:
if not (r := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
log.warning(f"URL {url_num}) Failed to load url.")
return
data: dict[str, list[str]] = r.json()
if not (urls := data.get("urls")):
log.info(f"URL {url_num}) No M3U8 found")
log.warning(f"URL {url_num}) No M3U8 found")
return
@ -39,8 +39,6 @@ async def process_event(url: str, url_num: int) -> str | None:
async def refresh_html_cache(now_ts: float) -> dict[str, dict[str, str | float]]:
log.info("Refreshing HTML cache")
events = {}
if not (html_data := await network.request(BASE_URL, log=log)):
@ -92,6 +90,8 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
now = Time.clean(Time.now())
if not (events := HTML_CACHE.load()):
log.info("Refreshing HTML cache")
events = await refresh_html_cache(now.timestamp())
HTML_CACHE.write(events)

View file

@ -31,7 +31,7 @@ async def process_event(url: str, url_num: int) -> str | None:
valid_m3u8 = re.compile(r'var\s+(\w+)\s*=\s*"([^"]*)"', re.I)
if not (match := valid_m3u8.search(html_data.text)):
log.info(f"URL {url_num}) No M3U8 found")
log.warning(f"URL {url_num}) No M3U8 found")
return
stream_link: str = match[2]

View file

@ -46,8 +46,6 @@ def get_event(t1: str, t2: str) -> str:
async def refresh_api_cache(now_ts: float) -> list[dict[str, Any]]:
log.info("Refreshing API cache")
tasks = [
network.request(
urljoin(BASE_URL, f"data/{sport}.json"),
@ -73,6 +71,8 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
now = Time.clean(Time.now())
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
log.info("Refreshing API cache")
api_data = await refresh_api_cache(now.timestamp())
API_FILE.write(api_data)

View file

@ -26,7 +26,7 @@ def fix_txt(s: str) -> str:
async def process_event(url: str, url_num: int) -> str | None:
if not (event_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
log.warning(f"URL {url_num}) Failed to load url.")
return
@ -43,7 +43,7 @@ async def process_event(url: str, url_num: int) -> str | None:
return
if not (iframe_1_src_data := await network.request(iframe_1_src, log=log)):
log.info(f"URL {url_num}) Failed to load iframe source.")
log.warning(f"URL {url_num}) Failed to load iframe source.")
return
@ -66,7 +66,7 @@ async def process_event(url: str, url_num: int) -> str | None:
headers={"Referer": iframe_1_src},
)
):
log.info(f"URL {url_num}) Failed to load iframe source.")
log.warning(f"URL {url_num}) Failed to load iframe source.")
return

View file

@ -18,7 +18,7 @@ BASE_URL = "https://thetvapp.to"
async def process_event(url: str, url_num: int) -> str | None:
if not (html_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
log.warning(f"URL {url_num}) Failed to load url.")
return

View file

@ -256,8 +256,10 @@ class Network:
timeout=6_000,
)
if resp.status != 200:
log.warning(f"URL {url_num}) Status Code: {resp.status}")
if not resp or resp.status != 200:
log.warning(
f"URL {url_num}) Status Code: {resp.status if resp else 'None'}"
)
return

View file

@ -30,7 +30,7 @@ def fix_event(s: str) -> str:
async def process_event(url: str, url_num: int) -> str | None:
if not (event_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
log.warning(f"URL {url_num}) Failed to load url.")
return
@ -53,7 +53,7 @@ async def process_event(url: str, url_num: int) -> str | None:
log=log,
)
):
log.info(f"URL {url_num}) Failed to load iframe source.")
log.warning(f"URL {url_num}) Failed to load iframe source.")
return

View file

@ -5,7 +5,7 @@ from itertools import chain
from typing import Any
from urllib.parse import urljoin
from playwright.async_api import Browser, Page, TimeoutError
from playwright.async_api import Browser, Page, Response, TimeoutError
from .utils import Cache, Time, get_logger, leagues, network
@ -42,8 +42,6 @@ VALID_SPORTS = [
async def refresh_api_cache(now: Time) -> list[dict[str, Any]]:
log.info("Refreshing API cache")
tasks = [
network.request(
urljoin(API_URL, "api/v1/matches/all"),
@ -66,16 +64,25 @@ async def refresh_api_cache(now: Time) -> list[dict[str, Any]]:
return data
def sift_xhr(resp: Response, match_id: int) -> bool:
resp_url = resp.url
return (
f"/en/stream/{match_id}/" in resp_url
and "_rsc=" not in resp_url
and resp.status == 200
)
async def process_event(
url: str,
match_id: int,
url_num: int,
page: Page,
) -> tuple[str | None, str | None]:
nones = None, None
pattern = re.compile(r"\((\d+)\)")
captured: list[str] = []
got_one = asyncio.Event()
@ -86,46 +93,30 @@ async def process_event(
got_one=got_one,
)
strm_handler = partial(sift_xhr, match_id=match_id)
page.on("request", handler)
try:
resp = await page.goto(
url,
wait_until="domcontentloaded",
timeout=8_000,
)
if resp.status != 200:
log.warning(f"URL {url_num}) Status Code: {resp.status}")
return
await page.wait_for_timeout(2_000)
try:
header = await page.wait_for_selector("text=/Stream Links/i", timeout=4_000)
async with page.expect_response(strm_handler, timeout=2_500) as strm_resp:
resp = await page.goto(
url,
wait_until="domcontentloaded",
timeout=6_000,
)
text = await header.inner_text()
if not resp or resp.status != 200:
log.warning(
f"URL {url_num}) Status Code: {resp.status if resp else 'None'}"
)
return nones
response = await strm_resp.value
stream_url = response.url
except TimeoutError:
log.warning(f"URL {url_num}) Can't find stream links header.")
return nones
if not (match := pattern.search(text)) or int(match[1]) == 0:
log.warning(f"URL {url_num}) No available stream links.")
return nones
try:
first_available = await page.wait_for_selector(
'a[href*="/stream/"]',
timeout=3_000,
)
except TimeoutError:
log.warning(f"URL {url_num}) No available stream links.")
return nones
if not (href := await first_available.get_attribute("href")):
log.warning(f"URL {url_num}) No available stream links.")
return nones
@ -133,7 +124,7 @@ async def process_event(
embed = re.sub(
pattern=r"^.*\/stream",
repl="https://spiderembed.top/embed",
string=href,
string=stream_url,
)
await page.goto(
@ -182,6 +173,8 @@ async def get_events(base_url: str, cached_keys: list[str]) -> list[dict[str, st
now = Time.clean(Time.now())
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
log.info("Refreshing API cache")
api_data = await refresh_api_cache(now)
API_FILE.write(api_data)
@ -226,6 +219,7 @@ async def get_events(base_url: str, cached_keys: list[str]) -> list[dict[str, st
"sport": sport,
"event": name,
"link": urljoin(base_url, f"stream/{match_id}"),
"match-id": match_id,
"logo": logo,
"timestamp": event_dt.timestamp(),
}
@ -263,6 +257,7 @@ async def scrape(browser: Browser) -> None:
handler = partial(
process_event,
url=(link := ev["link"]),
match_id=ev["match-id"],
url_num=i,
page=page,
)

View file

@ -27,7 +27,7 @@ def fix_event(s: str) -> str:
async def process_event(url: str, url_num: int) -> str | None:
if not (event_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
log.warning(f"URL {url_num}) Failed to load url.")
return
@ -50,7 +50,7 @@ async def process_event(url: str, url_num: int) -> str | None:
log=log,
)
):
log.info(f"URL {url_num}) Failed to load iframe source.")
log.warning(f"URL {url_num}) Failed to load iframe source.")
return

View file

@ -32,7 +32,7 @@ async def process_event(url: str, url_num: int) -> tuple[str | None, str | None]
nones = None, None
if not (html_data := await network.request(url, log=log)):
log.info(f"URL {url_num}) Failed to load url.")
log.warning(f"URL {url_num}) Failed to load url.")
return nones
soup = HTMLParser(html_data.content)

View file

@ -1,10 +1,11 @@
## Base Log @ 2026-03-03 09:07 UTC
## Base Log @ 2026-03-04 09:02 UTC
### ✅ Working Streams: 160<br>❌ Dead Streams: 1
### ✅ Working Streams: 159<br>❌ Dead Streams: 2
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| Golf Channel | HTTP Error (000) | `http://hardcoremedia.xyz/live/rabdsbmz/3731346838/258721.ts` |
| Altitude Sports | HTTP Error (403) | `http://hardcoremedia.xyz/live/rabdsbmz/3731346838/141447.ts` |
| Space City Home Network | HTTP Error (403) | `http://hardcoremedia.xyz/live/rabdsbmz/3731346838/229934.ts` |
---
#### Base Channels URL
```