mirror of
https://github.com/doms9/iptv.git
synced 2026-01-21 03:59:03 +01:00
e
fix hanging if no available mirrors
This commit is contained in:
parent
3a1d2742f1
commit
00000d9233
8 changed files with 24 additions and 16 deletions
|
|
@ -60,7 +60,7 @@ async def main() -> None:
|
|||
asyncio.create_task(streamsgate.scrape(network.client)),
|
||||
asyncio.create_task(strmd.scrape(network.client)),
|
||||
asyncio.create_task(tvpass.scrape(network.client)),
|
||||
#asyncio.create_task(watchfooty.scrape(network.client)),
|
||||
# asyncio.create_task(watchfooty.scrape(network.client)),
|
||||
asyncio.create_task(webcast.scrape(network.client)),
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ def fix_league(s: str) -> str:
|
|||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
ts: float,
|
||||
now_ts: float,
|
||||
) -> dict[str, dict[str, str]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
|
|
@ -37,9 +37,10 @@ async def refresh_api_cache(
|
|||
|
||||
return {}
|
||||
|
||||
data = r.json()
|
||||
if not (data := r.json()):
|
||||
return {}
|
||||
|
||||
data["timestamp"] = ts
|
||||
data["timestamp"] = now_ts
|
||||
|
||||
return data
|
||||
|
||||
|
|
|
|||
|
|
@ -115,6 +115,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
api_url,
|
||||
|
|
|
|||
|
|
@ -45,9 +45,10 @@ async def refresh_api_cache(
|
|||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
return []
|
||||
|
||||
data = r.json()
|
||||
if not (data := r.json()):
|
||||
return []
|
||||
|
||||
data[-1]["timestamp"] = now_ts
|
||||
|
||||
|
|
|
|||
|
|
@ -59,7 +59,8 @@ async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, An
|
|||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient, ts: float
|
||||
client: httpx.AsyncClient,
|
||||
now_ts: float,
|
||||
) -> list[dict[str, Any]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
|
|
@ -70,12 +71,13 @@ async def refresh_api_cache(
|
|||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
data = list(chain(*results))
|
||||
if not (data := list(chain(*results))):
|
||||
return []
|
||||
|
||||
for ev in data:
|
||||
ev["ts"] = ev.pop("timestamp")
|
||||
|
||||
data[-1]["timestamp"] = ts
|
||||
data[-1]["timestamp"] = now_ts
|
||||
|
||||
return data
|
||||
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ def fix_sport(s: str) -> str:
|
|||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
ts: float,
|
||||
now_ts: float,
|
||||
) -> list[dict[str, Any]]:
|
||||
|
||||
log.info("Refreshing API cache")
|
||||
|
|
@ -49,11 +49,12 @@ async def refresh_api_cache(
|
|||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
return []
|
||||
|
||||
data = r.json()
|
||||
if not (data := r.json()):
|
||||
return []
|
||||
|
||||
data[-1]["timestamp"] = ts
|
||||
data[-1]["timestamp"] = now_ts
|
||||
|
||||
return data
|
||||
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ class Network:
|
|||
|
||||
async def check_status(self, url: str) -> bool:
|
||||
try:
|
||||
r = await self.client.get(url)
|
||||
r = await self.client.get(url, timeout=5)
|
||||
r.raise_for_status()
|
||||
return r.status_code == 200
|
||||
except (httpx.HTTPError, httpx.TimeoutException) as e:
|
||||
|
|
|
|||
|
|
@ -67,12 +67,13 @@ async def refresh_api_cache(
|
|||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
data = list(chain(*results))
|
||||
if not (data := list(chain(*results))):
|
||||
return []
|
||||
|
||||
for ev in data:
|
||||
ev["ts"] = ev.pop("timestamp")
|
||||
|
||||
data[-1]["timestamp"] = Time.now().timestamp()
|
||||
data[-1]["timestamp"] = Time.now().timestamp()
|
||||
|
||||
return data
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue