Compare commits

..

No commits in common. "8df77c1f139f20513cf7cdc20fd50f8301386918" and "cdb7d9f94d139f21ca25c6161b179a9014b8a245" have entirely different histories.

30 changed files with 116613 additions and 120393 deletions

File diff suppressed because it is too large Load diff

231245
M3U8/TV.xml

File diff suppressed because one or more lines are too long

View file

@ -3,7 +3,7 @@
#EXTINF:-1 tvg-chno="1" tvg-id="A.and.E.HD.East.us2" tvg-name="A&E TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s51529_dark_360w_270h.png" group-title="TV",A&E TV
http://41.205.93.154/AandE/index.m3u8
#EXTINF:-1 tvg-chno="2" tvg-id="ABC.National.Feed.us2" tvg-name="ABC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10003_dark_360w_270h.png" group-title="TV",ABC
#EXTINF:-1 tvg-chno="2" tvg-id="KMBC-DT.us_locals1" tvg-name="ABC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10003_dark_360w_270h.png" group-title="TV",ABC
http://stream.cammonitorplus.net/1790/index.m3u8?token=MnE3ZWg1YkgxdFdWZlo2c2hLMkltWnJhcFo1OHhxcXVyb2pKazZXaWxZRERxNEduaVp1UnBxU2VlWmF0ZnRlRGxaMm1zNStDbnJOOXFZMlhtcStybmc9PQ==
#EXTINF:-1 tvg-chno="3" tvg-id="ACC.Network.us2" tvg-name="ACC Network" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s111871_dark_360w_270h.png" group-title="TV",ACC Network
@ -43,7 +43,7 @@ http://212.102.60.231/BET/index.m3u8
http://23.237.104.106:8080/USA_BTN/index.m3u8
#EXTINF:-1 tvg-chno="15" tvg-id="Bloomberg.HD.us2" tvg-name="Bloomberg TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s71799_dark_360w_270h.png" group-title="TV",Bloomberg TV
http://41.205.93.154/BLOOMBERG/index.m3u8
https://live-manifest.production-public.tubi.io/live/a5aabf5a-9615-4987-8163-18edfa1360c6/playlist.m3u8
#EXTINF:-1 tvg-chno="16" tvg-id="Boomerang.us2" tvg-name="Boomerang" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s21883_dark_360w_270h.png" group-title="TV",Boomerang
http://23.237.104.106:8080/USA_BOOMERANG/index.m3u8
@ -63,7 +63,7 @@ http://mytvstream.net:8080/live/bn80NG/909467/136589.m3u8
#EXTINF:-1 tvg-chno="21" tvg-id="Cartoon.Network.HD.us2" tvg-name="Cartoon Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s12131_dark_360w_270h.png" group-title="TV",Cartoon Network
http://23.237.104.106:8080/USA_CARTOON_NETWORK/index.m3u8
#EXTINF:-1 tvg-chno="22" tvg-id="CBS.Streaming.SD.East.feed.us2" tvg-name="CBS" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10098_dark_360w_270h.png" group-title="TV",CBS
#EXTINF:-1 tvg-chno="22" tvg-id="WCBS-DT.us_locals1" tvg-name="CBS" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10098_dark_360w_270h.png" group-title="TV",CBS
http://stream.cammonitorplus.net/1810/index.m3u8?token=MnE3ZWg1YkgxdFdWZlo2c2hLMkltWnJhcFo1OHhxcXVyb2pKazZXaWxZRERxNEduaVp1UnBxU2VlWmF0ZnRlRGxaMm1zNStDbnJOOXFZMlhtcStybmc9PQ==
#EXTINF:-1 tvg-chno="23" tvg-id="CBS.News.National.Stream.us2" tvg-name="CBS News 24/7" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s104846_dark_360w_270h.png" group-title="TV",CBS News 24/7
@ -130,7 +130,7 @@ http://hardcoremedia.xyz/live/rabdsbmz/3731346838/129867.ts
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/257087.ts
#EXTINF:-1 tvg-chno="44" tvg-id="Disney.XD.HD.us2" tvg-name="Disney XD" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18279_dark_360w_270h.png" group-title="TV",Disney XD
http://23.237.104.106:8080/USA_DISNEY_XD/index.m3u8
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/130092.ts
#EXTINF:-1 tvg-chno="45" tvg-id="E!.Entertainment.Television.HD.us2" tvg-name="E! Entertainment" tvg-logo="https://i.gyazo.com/f73b80e3eb56cec06df6705d00e2f422.png" group-title="TV",E! Entertainment
http://23.237.104.106:8080/USA_E/index.m3u8
@ -187,7 +187,7 @@ http://mytvstream.net:8080/live/bn80NG/909467/78599.m3u8
https://jmp2.uk/stvp-IN270000230
#EXTINF:-1 tvg-chno="63" tvg-id="Food.Network.HD.us2" tvg-name="Food Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s44718_dark_360w_270h.png" group-title="TV",Food Network
http://23.237.104.106:8080/USA_FOOD_NETWORK/index.m3u8
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/129872.ts
#EXTINF:-1 tvg-chno="64" tvg-id="WJBK-DT.us_locals1" tvg-name="Fox" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s28719_dark_360w_270h.png" group-title="TV",Fox
http://stream.cammonitorplus.net/1772/index.m3u8?token=MnE3ZWg1YkgxdFdWZlo2c2hLMkltWnJhcFo1OHhxcXVyb2pKazZXaWxZRERxNEduaVp1UnBxU2VlWmF0ZnRlRGxaMm1zNStDbnJOOXFZMlhtcStybmc9PQ==
@ -250,13 +250,13 @@ http://23.237.104.106:8080/USA_HBO/index.m3u8
http://23.237.104.106:8080/USA_HBO2/index.m3u8
#EXTINF:-1 tvg-chno="84" tvg-id="HBO.Comedy.HD.us2" tvg-name="HBO Comedy" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s59839_dark_360w_270h.png" group-title="TV",HBO Comedy
http://23.237.104.106:8080/USA_HBO_COMEDY/index.m3u8
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/129990.ts
#EXTINF:-1 tvg-chno="85" tvg-id="HBO.Drama.us2" tvg-name="HBO Family" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s34879_dark_360w_270h.png" group-title="TV",HBO Family
http://mytvstream.net:8080/live/bn80NG/909467/17772.m3u8
#EXTINF:-1 tvg-chno="86" tvg-id="HBO.Zone.HD.us2" tvg-name="HBO Zone" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18431_dark_360w_270h.png" group-title="TV",HBO Zone
http://23.237.104.106:8080/USA_HBO_ZONE/index.m3u8
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/129995.ts
#EXTINF:-1 tvg-chno="87" tvg-id="History.HD.us2" tvg-name="History Channel" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s87679_dark_360w_270h.png" group-title="TV",History Channel
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/129892.ts
@ -303,8 +303,8 @@ http://23.237.104.106:8080/USA_NAT_GEO/index.m3u8
#EXTINF:-1 tvg-chno="101" tvg-id="NBA.TV.HD.us2" tvg-name="NBA TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s32281_dark_360w_270h.png" group-title="TV",NBA TV
http://212.102.60.231/NBA_TV/index.m3u8
#EXTINF:-1 tvg-chno="102" tvg-id="NBC.East.Stream.us2" tvg-name="NBC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10991_dark_360w_270h.png" group-title="TV",NBC
http://stream.cammonitorplus.net/1765/index.m3u8?token=MnE3ZWg1YkgxdFdWZlo2c2hLMkltWnJhcFo1OHhxcXVyb2pKazZXaWxZRERxNEduaVp1UnBxU2VlWmF0ZnRlRGxaMm1zNStDbnJOOXFZMlhtcStybmc9PQ==
#EXTINF:-1 tvg-chno="102" tvg-id="WFLA-DT.us_locals1" tvg-name="NBC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10991_dark_360w_270h.png" group-title="TV",NBC
http://stream.cammonitorplus.net/1842/index.m3u8?token=MnE3ZWg1YkgxdFdWZlo2c2hLMkltWnJhcFo1OHhxcXVyb2pKazZXaWxZRERxNEduaVp1UnBxU2VlWmF0ZnRlRGxaMm1zNStDbnJOOXFZMlhtcStybmc9PQ==
#EXTINF:-1 tvg-chno="103" tvg-id="NBC.Sports.Bay.Area.HD.us2" tvg-name="NBC Sports Bay Area" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s63138_dark_360w_270h.png" group-title="TV",NBC Sports Bay Area
http://mytvstream.net:8080/live/bn80NG/909467/9900.m3u8
@ -322,7 +322,7 @@ https://starshare.st/live/P4B9TB9xR8/humongous2tonight/1001.ts
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/136477.ts
#EXTINF:-1 tvg-chno="108" tvg-id="New.England.Sports.Network.HD.us2" tvg-name="NESN" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s35038_dark_360w_270h.png" group-title="TV",NESN
http://23.237.104.106:8080/USA_NESN/index.m3u8
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/141484.ts
#EXTINF:-1 tvg-chno="109" tvg-id="NewsNation.us2" tvg-name="NewsNation" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s91096_dark_360w_270h.png" group-title="TV",NewsNation
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/129931.ts
@ -469,10 +469,10 @@ http://hardcoremedia.xyz/live/rabdsbmz/3731346838/129971.ts
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/129973.ts
#EXTINF:-1 tvg-chno="157" tvg-id="VH1.HD.us2" tvg-name="VH1" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11218_dark_360w_270h.png" group-title="TV",VH1
http://23.237.104.106:8080/USA_VH1/index.m3u8
https://starshare.st/live/P4B9TB9xR8/humongous2tonight/3414.ts
#EXTINF:-1 tvg-chno="158" tvg-id="Vice.HD.us2" tvg-name="Vice TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18822_dark_360w_270h.png" group-title="TV",Vice TV
http://23.237.104.106:8080/USA_VICETV/index.m3u8
http://hardcoremedia.xyz/live/rabdsbmz/3731346838/129976.ts
#EXTINF:-1 tvg-chno="159" tvg-id="WE.tv.HD.us2" tvg-name="We Tv" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16409_dark_360w_270h.png" group-title="TV",We Tv
http://23.237.104.106:8080/USA_WE_TV/index.m3u8

File diff suppressed because it is too large Load diff

View file

@ -73,7 +73,7 @@ async def main() -> None:
asyncio.create_task(streamcenter.scrape(hdl_brwsr)),
# asyncio.create_task(streamhub.scrape(xtrnl_brwsr)),
asyncio.create_task(streamsgate.scrape(xtrnl_brwsr)),
# asyncio.create_task(timstreams.scrape(xtrnl_brwsr)),
asyncio.create_task(timstreams.scrape(xtrnl_brwsr)),
]
httpx_tasks = [
@ -86,7 +86,7 @@ async def main() -> None:
asyncio.create_task(totalsportek.scrape()),
asyncio.create_task(tvapp.scrape()),
asyncio.create_task(volokit.scrape()),
# asyncio.create_task(xstreameast.scrape()),
asyncio.create_task(xstreameast.scrape()),
asyncio.create_task(webcast.scrape()),
]

View file

@ -97,7 +97,9 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{API_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser) as context:
@ -140,7 +142,8 @@ async def scrape(browser: Browser) -> None:
urls[key] = cached_urls[key] = entry
log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -87,7 +87,9 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser) as context:
@ -130,7 +132,8 @@ async def scrape(browser: Browser) -> None:
urls[key] = cached_urls[key] = entry
log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -99,7 +99,9 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_hrefs):
events = await get_events(cached_hrefs)
if events:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
@ -141,7 +143,8 @@ async def scrape() -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -112,7 +112,9 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
@ -153,7 +155,8 @@ async def scrape() -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -204,7 +204,9 @@ async def scrape(browser: Browser) -> None:
log.info('Scraping from "https://livetv.sx/enx/"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, ignore_https=True) as context:
@ -252,7 +254,8 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -96,7 +96,9 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
@ -137,7 +139,8 @@ async def scrape() -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -99,7 +99,9 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
@ -140,7 +142,8 @@ async def scrape() -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -97,6 +97,6 @@ async def scrape(browser: Browser) -> None:
urls.update(events or {})
log.info(f"Collected and cached {len(urls)} new event(s)")
CACHE_FILE.write(urls)
log.info(f"Collected and cached {len(urls)} new event(s)")

View file

@ -106,7 +106,9 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{api_url}"')
if events := await get_events(api_url, cached_urls.keys()):
events = await get_events(api_url, cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, stealth=False) as context:
@ -157,7 +159,8 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -199,7 +199,9 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser) as context:
@ -245,7 +247,8 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -124,7 +124,9 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
for i, ev in enumerate(events, start=1):
@ -163,7 +165,8 @@ async def scrape() -> None:
urls[key] = cached_urls[key] = entry
log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -99,7 +99,9 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
@ -140,7 +142,8 @@ async def scrape(browser: Browser) -> None:
urls[key] = cached_urls[key] = entry
log.info(f"Collected and cached {len(cached_urls) - cached_count} new event(s)")
if new_count := len(cached_urls) - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -103,7 +103,9 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events():
events = await get_events()
if events:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())

View file

@ -102,7 +102,9 @@ async def scrape(browser: Browser) -> None:
log.info('Scraping from "https://streamcenter.xyz"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser) as context:
@ -151,7 +153,8 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -143,7 +143,9 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, stealth=False) as context:
@ -192,7 +194,8 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -133,7 +133,9 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, stealth=False) as context:
@ -182,7 +184,8 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -117,7 +117,9 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, stealth=False) as context:
@ -166,7 +168,8 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -145,7 +145,9 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
@ -186,7 +188,8 @@ async def scrape() -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -85,7 +85,9 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events():
events = await get_events()
if events:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())

View file

@ -115,13 +115,8 @@ class Network:
return
@cache
@staticmethod
def stealth_js() -> str:
return (Path(__file__).parent / "stealth.js").read_text(encoding="utf-8")
@cache
@staticmethod
def blocked_domains() -> list[str]:
return (
(Path(__file__).parent / "easylist.txt")
@ -177,7 +172,7 @@ class Network:
),
)
await context.add_init_script(script=Network.stealth_js())
await context.add_init_script(path=Path(__file__).parent / "stealth.js")
await context.route("**/*", Network._adblock)

View file

@ -124,7 +124,9 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
@ -165,7 +167,8 @@ async def scrape() -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -250,7 +250,9 @@ async def scrape(browser: Browser) -> None:
log.info(f'Scraping from "{base_url}"')
if events := await get_events(base_url, cached_urls.keys()):
events = await get_events(base_url, cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
async with network.event_context(browser, stealth=False) as context:
@ -300,7 +302,8 @@ async def scrape(browser: Browser) -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -124,7 +124,9 @@ async def scrape() -> None:
log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
@ -165,7 +167,8 @@ async def scrape() -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -133,7 +133,9 @@ async def scrape() -> None:
log.info(f'Scraping from "{BASE_URL}"')
if events := await get_events(cached_urls.keys()):
events = await get_events(cached_urls.keys())
if events:
log.info(f"Processing {len(events)} new URL(s)")
now = Time.clean(Time.now())
@ -174,7 +176,8 @@ async def scrape() -> None:
urls[key] = entry
log.info(f"Collected and cached {valid_count - cached_count} new event(s)")
if new_count := valid_count - cached_count:
log.info(f"Collected and cached {new_count} new event(s)")
else:
log.info("No new events found")

View file

@ -1,12 +1,14 @@
## Base Log @ 2026-03-02 09:06 UTC
## Base Log @ 2026-03-01 08:53 UTC
### ✅ Working Streams: 158<br>❌ Dead Streams: 3
### ✅ Working Streams: 156<br>❌ Dead Streams: 5
| Channel | Error (Code) | Link |
| ------- | ------------ | ---- |
| CMT | HTTP Error (404) | `http://23.237.104.106:8080/USA_CMT/index.m3u8` |
| Disney XD | HTTP Error (000) | `http://hardcoremedia.xyz/live/rabdsbmz/3731346838/130092.ts` |
| Disney | HTTP Error (000) | `http://hardcoremedia.xyz/live/rabdsbmz/3731346838/257087.ts` |
| FYI TV | HTTP Error (000) | `http://hardcoremedia.xyz/live/rabdsbmz/3731346838/130105.ts` |
| Golf Channel | HTTP Error (000) | `http://hardcoremedia.xyz/live/rabdsbmz/3731346838/258721.ts` |
| NBC Sports California | HTTP Error (403) | `http://hardcoremedia.xyz/live/rabdsbmz/3731346838/136474.ts` |
| NBC Sports NOW | HTTP Error (403) | `https://starshare.st/live/P4B9TB9xR8/humongous2tonight/1001.ts` |
---
#### Base Channels URL
```