e
This commit is contained in:
parent
308d607660
commit
00000d9a4d
3 changed files with 29 additions and 23 deletions
|
|
@ -1,8 +1,9 @@
|
|||
import json
|
||||
import re
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
|
@ -16,36 +17,45 @@ BASE_URL = "https://pixelsport.tv/backend/livetv/events"
|
|||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
ts: float,
|
||||
) -> dict[str, list[dict, str, str]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
async with async_playwright() as p:
|
||||
try:
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
return {}
|
||||
page = await context.new_page()
|
||||
|
||||
data = r.json()
|
||||
await page.goto(
|
||||
url,
|
||||
wait_until="domcontentloaded",
|
||||
timeout=10_000,
|
||||
)
|
||||
|
||||
raw_json = await page.locator("pre").inner_text()
|
||||
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
finally:
|
||||
await browser.close()
|
||||
|
||||
data = json.loads(raw_json)
|
||||
|
||||
data["timestamp"] = ts
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
cached_keys: set[str],
|
||||
) -> dict[str, str | float]:
|
||||
async def get_events(cached_keys: set[str]) -> dict[str, str | float]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False)):
|
||||
api_data = await refresh_api_cache(
|
||||
client,
|
||||
BASE_URL,
|
||||
now.timestamp(),
|
||||
)
|
||||
|
|
@ -100,7 +110,7 @@ async def get_events(
|
|||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
async def scrape() -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
|
@ -109,7 +119,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client, set(cached_urls.keys()))
|
||||
events = await get_events(set(cached_urls.keys()))
|
||||
|
||||
if events:
|
||||
for d in (urls, cached_urls):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue