e
This commit is contained in:
parent
fbee979f85
commit
00000d9fe7
2 changed files with 11 additions and 13 deletions
|
|
@ -5,6 +5,7 @@ from pathlib import Path
|
||||||
|
|
||||||
from scrapers import (
|
from scrapers import (
|
||||||
fstv,
|
fstv,
|
||||||
|
lotus,
|
||||||
pixel,
|
pixel,
|
||||||
ppv,
|
ppv,
|
||||||
roxie,
|
roxie,
|
||||||
|
|
@ -43,6 +44,7 @@ async def main() -> None:
|
||||||
|
|
||||||
tasks = [
|
tasks = [
|
||||||
asyncio.create_task(fstv.scrape(network.client)),
|
asyncio.create_task(fstv.scrape(network.client)),
|
||||||
|
asyncio.create_task(lotus.scrape(network.client)),
|
||||||
asyncio.create_task(pixel.scrape(network.client)),
|
asyncio.create_task(pixel.scrape(network.client)),
|
||||||
asyncio.create_task(ppv.scrape(network.client)),
|
asyncio.create_task(ppv.scrape(network.client)),
|
||||||
asyncio.create_task(roxie.scrape(network.client)),
|
asyncio.create_task(roxie.scrape(network.client)),
|
||||||
|
|
@ -58,6 +60,7 @@ async def main() -> None:
|
||||||
|
|
||||||
additions = (
|
additions = (
|
||||||
fstv.urls
|
fstv.urls
|
||||||
|
| lotus.urls
|
||||||
| pixel.urls
|
| pixel.urls
|
||||||
| ppv.urls
|
| ppv.urls
|
||||||
| roxie.urls
|
| roxie.urls
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
urls: dict[str, dict[str, str | float]] = {}
|
urls: dict[str, dict[str, str | float]] = {}
|
||||||
|
|
||||||
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "lotus.json", exp=10_800)
|
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "lotus.json", exp=3_600)
|
||||||
|
|
||||||
API_CACHE = Cache(Path(__file__).parent / "caches" / "lotus_api.json", exp=28_800)
|
API_CACHE = Cache(Path(__file__).parent / "caches" / "lotus_api.json", exp=28_800)
|
||||||
|
|
||||||
|
|
@ -60,9 +60,6 @@ async def get_events(
|
||||||
|
|
||||||
events: list[dict[str, str]] = []
|
events: list[dict[str, str]] = []
|
||||||
|
|
||||||
start_dt = now.delta(minutes=-30).timestamp()
|
|
||||||
end_dt = now.delta(minutes=30).timestamp()
|
|
||||||
|
|
||||||
for info in api_data["days"]:
|
for info in api_data["days"]:
|
||||||
day = Time.from_str(info["day_et"], "%Y-%m-%d")
|
day = Time.from_str(info["day_et"], "%Y-%m-%d")
|
||||||
|
|
||||||
|
|
@ -75,16 +72,13 @@ async def get_events(
|
||||||
if event_league == "channel tv":
|
if event_league == "channel tv":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
sport = fix_league(event_league)
|
|
||||||
event_name = event["title"]
|
|
||||||
event_ts: int = event["ts_et"]
|
|
||||||
event_streams: list[dict] = event["streams"]
|
event_streams: list[dict] = event["streams"]
|
||||||
|
|
||||||
if not (event_link := event_streams[0].get("link")):
|
if not (event_link := event_streams[0].get("link")):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not start_dt <= event_ts <= end_dt:
|
sport = fix_league(event_league)
|
||||||
continue
|
event_name = event["title"]
|
||||||
|
|
||||||
key = f"[{sport}] {event_name} (LOTUS)"
|
key = f"[{sport}] {event_name} (LOTUS)"
|
||||||
|
|
||||||
|
|
@ -96,7 +90,6 @@ async def get_events(
|
||||||
"sport": sport,
|
"sport": sport,
|
||||||
"event": event_name,
|
"event": event_name,
|
||||||
"link": event_link,
|
"link": event_link,
|
||||||
"timestamp": event_ts,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -121,6 +114,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
if events:
|
if events:
|
||||||
|
now = Time.now().timestamp()
|
||||||
|
|
||||||
async with async_playwright() as p:
|
async with async_playwright() as p:
|
||||||
browser, context = await network.browser(p, browser="brave")
|
browser, context = await network.browser(p, browser="brave")
|
||||||
|
|
||||||
|
|
@ -140,7 +135,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
)
|
)
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
|
sport, event = ev["sport"], ev["event"]
|
||||||
|
|
||||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||||
|
|
||||||
|
|
@ -149,8 +144,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
entry = {
|
entry = {
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": "",
|
"base": "https://vividmosaica.com/",
|
||||||
"timestamp": ts,
|
"timestamp": now,
|
||||||
"id": tvg_id or "Live.Event.us",
|
"id": tvg_id or "Live.Event.us",
|
||||||
}
|
}
|
||||||
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue