diff --git a/M3U8/fetch.py b/M3U8/fetch.py index ab8e551..50a0d49 100644 --- a/M3U8/fetch.py +++ b/M3U8/fetch.py @@ -5,6 +5,7 @@ from pathlib import Path from scrapers import ( fstv, + lotus, pixel, ppv, roxie, @@ -43,6 +44,7 @@ async def main() -> None: tasks = [ asyncio.create_task(fstv.scrape(network.client)), + asyncio.create_task(lotus.scrape(network.client)), asyncio.create_task(pixel.scrape(network.client)), asyncio.create_task(ppv.scrape(network.client)), asyncio.create_task(roxie.scrape(network.client)), @@ -58,6 +60,7 @@ async def main() -> None: additions = ( fstv.urls + | lotus.urls | pixel.urls | ppv.urls | roxie.urls diff --git a/M3U8/scrapers/old/lotus.py b/M3U8/scrapers/lotus.py similarity index 89% rename from M3U8/scrapers/old/lotus.py rename to M3U8/scrapers/lotus.py index 6553509..9ddfd6e 100644 --- a/M3U8/scrapers/old/lotus.py +++ b/M3U8/scrapers/lotus.py @@ -10,7 +10,7 @@ log = get_logger(__name__) urls: dict[str, dict[str, str | float]] = {} -CACHE_FILE = Cache(Path(__file__).parent / "caches" / "lotus.json", exp=10_800) +CACHE_FILE = Cache(Path(__file__).parent / "caches" / "lotus.json", exp=3_600) API_CACHE = Cache(Path(__file__).parent / "caches" / "lotus_api.json", exp=28_800) @@ -60,9 +60,6 @@ async def get_events( events: list[dict[str, str]] = [] - start_dt = now.delta(minutes=-30).timestamp() - end_dt = now.delta(minutes=30).timestamp() - for info in api_data["days"]: day = Time.from_str(info["day_et"], "%Y-%m-%d") @@ -75,16 +72,13 @@ async def get_events( if event_league == "channel tv": continue - sport = fix_league(event_league) - event_name = event["title"] - event_ts: int = event["ts_et"] event_streams: list[dict] = event["streams"] if not (event_link := event_streams[0].get("link")): continue - if not start_dt <= event_ts <= end_dt: - continue + sport = fix_league(event_league) + event_name = event["title"] key = f"[{sport}] {event_name} (LOTUS)" @@ -96,7 +90,6 @@ async def get_events( "sport": sport, "event": event_name, "link": event_link, - "timestamp": event_ts, } ) @@ -121,6 +114,8 @@ async def scrape(client: httpx.AsyncClient) -> None: log.info(f"Processing {len(events)} new URL(s)") if events: + now = Time.now().timestamp() + async with async_playwright() as p: browser, context = await network.browser(p, browser="brave") @@ -140,7 +135,7 @@ async def scrape(client: httpx.AsyncClient) -> None: ) if url: - sport, event, ts = ev["sport"], ev["event"], ev["timestamp"] + sport, event = ev["sport"], ev["event"] tvg_id, logo = leagues.get_tvg_info(sport, event) @@ -149,8 +144,8 @@ async def scrape(client: httpx.AsyncClient) -> None: entry = { "url": url, "logo": logo, - "base": "", - "timestamp": ts, + "base": "https://vividmosaica.com/", + "timestamp": now, "id": tvg_id or "Live.Event.us", }