e
This commit is contained in:
parent
d7c64fdeae
commit
00000d94fc
8 changed files with 38 additions and 33 deletions
|
|
@ -60,7 +60,7 @@ async def get_events(
|
|||
|
||||
events = []
|
||||
|
||||
for info in api_data["days"]:
|
||||
for info in api_data.get("days", []):
|
||||
day = Time.from_str(info["day_et"])
|
||||
|
||||
if now.date() != day.date():
|
||||
|
|
|
|||
|
|
@ -55,13 +55,13 @@ async def get_events(
|
|||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for stream_group in api_data["streams"]:
|
||||
for stream_group in api_data.get("streams", []):
|
||||
sport = stream_group["category"]
|
||||
|
||||
if sport == "24/7 Streams":
|
||||
continue
|
||||
|
||||
for event in stream_group["streams"]:
|
||||
for event in stream_group.get("streams", []):
|
||||
name = event.get("name")
|
||||
start_ts = event.get("starts_at")
|
||||
logo = event.get("poster")
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ CACHE_FILE = Cache("roxie.json", exp=10_800)
|
|||
|
||||
HTML_CACHE = Cache("roxie-html.json", exp=19_800)
|
||||
|
||||
MIRRORS = ["https://roxiestreams.live", "https://roxiestreams.cc"]
|
||||
BASE_URL = "https://roxiestreams.live"
|
||||
|
||||
|
||||
async def process_event(
|
||||
|
|
@ -143,15 +143,10 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working Roxie mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
sport_urls = {
|
||||
sport: urljoin(base_url, sport.lower())
|
||||
sport: urljoin(BASE_URL, sport.lower())
|
||||
for sport in ["Soccer", "MLB", "NBA", "NFL", "Fighting", "Motorsports"]
|
||||
}
|
||||
|
||||
|
|
@ -188,7 +183,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": base_url,
|
||||
"base": BASE_URL,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working StreamEast mirrors")
|
||||
log.warning("No working Streameast mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ async def get_events(
|
|||
start_dt = now.delta(hours=-1)
|
||||
end_dt = now.delta(minutes=10)
|
||||
|
||||
for category, streams in api_data["streams"].items():
|
||||
for category, streams in api_data.get("streams", {}).items():
|
||||
if not streams:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ API_FILE = Cache("strmd-api.json", exp=28_800)
|
|||
MIRRORS = ["https://streamed.pk", "https://streami.su", "https://streamed.st"]
|
||||
|
||||
|
||||
def validate_category(s: str) -> str:
|
||||
def fix_sport(s: str) -> str:
|
||||
if "-" in s:
|
||||
return " ".join(i.capitalize() for i in s.split("-"))
|
||||
|
||||
|
|
@ -138,9 +138,7 @@ async def get_events(
|
|||
pattern = re.compile(r"[\n\r]+|\s{2,}")
|
||||
|
||||
for event in api_data:
|
||||
category = event["category"]
|
||||
|
||||
if category == "other":
|
||||
if (category := event.get("category")) == "other":
|
||||
continue
|
||||
|
||||
if not (ts := event["date"]):
|
||||
|
|
@ -153,7 +151,7 @@ async def get_events(
|
|||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
sport = validate_category(category)
|
||||
sport = fix_sport(category)
|
||||
|
||||
parts = pattern.split(event["title"].strip())
|
||||
name = " | ".join(p.strip() for p in parts if p.strip())
|
||||
|
|
@ -198,7 +196,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working PPV mirrors")
|
||||
log.warning("No working STRMD mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ CACHE_FILE = Cache("volo.json", exp=10_800)
|
|||
|
||||
HTML_CACHE = Cache("volo-html.json", exp=86_400)
|
||||
|
||||
BASE_URL = "http://volokit2.com/sport/"
|
||||
BASE_URL = "http://volokit2.com"
|
||||
|
||||
valid_sports = {
|
||||
"boxing": "Boxing",
|
||||
|
|
@ -131,15 +131,26 @@ async def refresh_html_cache(
|
|||
events = {}
|
||||
|
||||
for card in soup.css("#events .table .vevent.theevent"):
|
||||
name = card.css_first(".teamtd.event").text(strip=True)
|
||||
time = card.css_first(".time").text(strip=True)
|
||||
|
||||
if not (href := card.css_first("a").attributes.get("href")):
|
||||
continue
|
||||
|
||||
name_node = card.css_first(".teamtd.event")
|
||||
time_node = card.css_first(".time").text(strip=True)
|
||||
|
||||
if not (name_node and time_node):
|
||||
continue
|
||||
|
||||
name = name_node.text(strip=True)
|
||||
time = time_node.text(strip=True)
|
||||
|
||||
event_sport = valid_sports[sport]
|
||||
event_name = fix_event(name)
|
||||
event_dt = Time.from_only_time(time, now.date(), "UTC")
|
||||
|
||||
event_dt = Time.from_only_time(
|
||||
time,
|
||||
now.date(),
|
||||
timezone="UTC",
|
||||
)
|
||||
|
||||
key = f"[{event_sport}] {event_name} (VOLO)"
|
||||
|
||||
|
|
@ -204,9 +215,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
sport_urls = {
|
||||
sport: urljoin(BASE_URL, sport.lower()) for sport in valid_sports.keys()
|
||||
}
|
||||
sport_urls = {sport: urljoin(BASE_URL, f"sport/{sport}") for sport in valid_sports}
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
|
|
@ -244,7 +253,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": "http://volokit2.com",
|
||||
"base": BASE_URL,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
|
|
|||
|
|
@ -177,9 +177,12 @@ async def get_events(
|
|||
pattern = re.compile(r"\-+|\(")
|
||||
|
||||
for event in api_data:
|
||||
match_id = event["matchId"]
|
||||
name = event["title"]
|
||||
league = event["league"]
|
||||
match_id = event.get("matchId")
|
||||
name = event.get("title")
|
||||
league = event.get("league")
|
||||
|
||||
if not (match_id and name and league):
|
||||
continue
|
||||
|
||||
if not (ts := event.get("ts")):
|
||||
continue
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue