update M3U8
This commit is contained in:
parent
7bb23b897e
commit
714a967226
9 changed files with 445 additions and 1965 deletions
1200
M3U8/TV.m3u8
1200
M3U8/TV.m3u8
File diff suppressed because it is too large
Load diff
|
|
@ -364,7 +364,7 @@ http://fl1.moveonjoy.com/SHOWTIME/index.m3u8
|
|||
http://fl1.moveonjoy.com/SMITHSONIAN_CHANNEL/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="122" tvg-id="Sony.Movie.Channel.HD.us2" tvg-name="Sony Movie Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s69130_dark_360w_270h.png" group-title="TV",Sony Movie Channel
|
||||
http://nocable.cc:8080/91161088/91161088/2214
|
||||
http://fl1.moveonjoy.com/Sony_Movie_Channel/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="123" tvg-id="Space.City.Home.Network.HD.us2" tvg-name="Space City Home Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s77744_dark_360w_270h.png" group-title="TV",Space City Home Network
|
||||
http://nocable.cc:8080/91161088/91161088/213668
|
||||
|
|
|
|||
1198
M3U8/events.m3u8
1198
M3U8/events.m3u8
File diff suppressed because it is too large
Load diff
|
|
@ -42,7 +42,8 @@ async def process_event(
|
|||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient, cached_hrefs: set[str]
|
||||
client: httpx.AsyncClient,
|
||||
cached_hrefs: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
try:
|
||||
r = await client.get(BASE_URL)
|
||||
|
|
|
|||
|
|
@ -101,7 +101,6 @@ async def get_events(
|
|||
client: httpx.AsyncClient,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (events := HTML_CACHE.load()):
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ async def get_html(
|
|||
url: str,
|
||||
date: str,
|
||||
) -> bytes:
|
||||
|
||||
try:
|
||||
r = await client.get(url, params={"date": date})
|
||||
r.raise_for_status()
|
||||
|
|
|
|||
|
|
@ -41,6 +41,7 @@ async def get_events(
|
|||
url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
|
|
|
|||
|
|
@ -31,7 +31,8 @@ def fix_sport(s: str) -> str:
|
|||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient, url: str
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
) -> list[dict[str, Any]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
|
|
|
|||
|
|
@ -81,6 +81,7 @@ async def process_event(
|
|||
url_num: int,
|
||||
context: BrowserContext,
|
||||
) -> str | None:
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
captured: list[str] = []
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue