e
This commit is contained in:
parent
d7c64fdeae
commit
00000d94fc
8 changed files with 38 additions and 33 deletions
|
|
@ -60,7 +60,7 @@ async def get_events(
|
||||||
|
|
||||||
events = []
|
events = []
|
||||||
|
|
||||||
for info in api_data["days"]:
|
for info in api_data.get("days", []):
|
||||||
day = Time.from_str(info["day_et"])
|
day = Time.from_str(info["day_et"])
|
||||||
|
|
||||||
if now.date() != day.date():
|
if now.date() != day.date():
|
||||||
|
|
|
||||||
|
|
@ -55,13 +55,13 @@ async def get_events(
|
||||||
start_dt = now.delta(minutes=-30)
|
start_dt = now.delta(minutes=-30)
|
||||||
end_dt = now.delta(minutes=30)
|
end_dt = now.delta(minutes=30)
|
||||||
|
|
||||||
for stream_group in api_data["streams"]:
|
for stream_group in api_data.get("streams", []):
|
||||||
sport = stream_group["category"]
|
sport = stream_group["category"]
|
||||||
|
|
||||||
if sport == "24/7 Streams":
|
if sport == "24/7 Streams":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for event in stream_group["streams"]:
|
for event in stream_group.get("streams", []):
|
||||||
name = event.get("name")
|
name = event.get("name")
|
||||||
start_ts = event.get("starts_at")
|
start_ts = event.get("starts_at")
|
||||||
logo = event.get("poster")
|
logo = event.get("poster")
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ CACHE_FILE = Cache("roxie.json", exp=10_800)
|
||||||
|
|
||||||
HTML_CACHE = Cache("roxie-html.json", exp=19_800)
|
HTML_CACHE = Cache("roxie-html.json", exp=19_800)
|
||||||
|
|
||||||
MIRRORS = ["https://roxiestreams.live", "https://roxiestreams.cc"]
|
BASE_URL = "https://roxiestreams.live"
|
||||||
|
|
||||||
|
|
||||||
async def process_event(
|
async def process_event(
|
||||||
|
|
@ -143,15 +143,10 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
if not (base_url := await network.get_base(MIRRORS)):
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
log.warning("No working Roxie mirrors")
|
|
||||||
CACHE_FILE.write(cached_urls)
|
|
||||||
return
|
|
||||||
|
|
||||||
log.info(f'Scraping from "{base_url}"')
|
|
||||||
|
|
||||||
sport_urls = {
|
sport_urls = {
|
||||||
sport: urljoin(base_url, sport.lower())
|
sport: urljoin(BASE_URL, sport.lower())
|
||||||
for sport in ["Soccer", "MLB", "NBA", "NFL", "Fighting", "Motorsports"]
|
for sport in ["Soccer", "MLB", "NBA", "NFL", "Fighting", "Motorsports"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -188,7 +183,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
entry = {
|
entry = {
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": base_url,
|
"base": BASE_URL,
|
||||||
"timestamp": ts,
|
"timestamp": ts,
|
||||||
"id": tvg_id or "Live.Event.us",
|
"id": tvg_id or "Live.Event.us",
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -114,7 +114,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
if not (base_url := await network.get_base(MIRRORS)):
|
if not (base_url := await network.get_base(MIRRORS)):
|
||||||
log.warning("No working StreamEast mirrors")
|
log.warning("No working Streameast mirrors")
|
||||||
CACHE_FILE.write(cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -58,7 +58,7 @@ async def get_events(
|
||||||
start_dt = now.delta(hours=-1)
|
start_dt = now.delta(hours=-1)
|
||||||
end_dt = now.delta(minutes=10)
|
end_dt = now.delta(minutes=10)
|
||||||
|
|
||||||
for category, streams in api_data["streams"].items():
|
for category, streams in api_data.get("streams", {}).items():
|
||||||
if not streams:
|
if not streams:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ API_FILE = Cache("strmd-api.json", exp=28_800)
|
||||||
MIRRORS = ["https://streamed.pk", "https://streami.su", "https://streamed.st"]
|
MIRRORS = ["https://streamed.pk", "https://streami.su", "https://streamed.st"]
|
||||||
|
|
||||||
|
|
||||||
def validate_category(s: str) -> str:
|
def fix_sport(s: str) -> str:
|
||||||
if "-" in s:
|
if "-" in s:
|
||||||
return " ".join(i.capitalize() for i in s.split("-"))
|
return " ".join(i.capitalize() for i in s.split("-"))
|
||||||
|
|
||||||
|
|
@ -138,9 +138,7 @@ async def get_events(
|
||||||
pattern = re.compile(r"[\n\r]+|\s{2,}")
|
pattern = re.compile(r"[\n\r]+|\s{2,}")
|
||||||
|
|
||||||
for event in api_data:
|
for event in api_data:
|
||||||
category = event["category"]
|
if (category := event.get("category")) == "other":
|
||||||
|
|
||||||
if category == "other":
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not (ts := event["date"]):
|
if not (ts := event["date"]):
|
||||||
|
|
@ -153,7 +151,7 @@ async def get_events(
|
||||||
if not start_dt <= event_dt <= end_dt:
|
if not start_dt <= event_dt <= end_dt:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
sport = validate_category(category)
|
sport = fix_sport(category)
|
||||||
|
|
||||||
parts = pattern.split(event["title"].strip())
|
parts = pattern.split(event["title"].strip())
|
||||||
name = " | ".join(p.strip() for p in parts if p.strip())
|
name = " | ".join(p.strip() for p in parts if p.strip())
|
||||||
|
|
@ -198,7 +196,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
if not (base_url := await network.get_base(MIRRORS)):
|
if not (base_url := await network.get_base(MIRRORS)):
|
||||||
log.warning("No working PPV mirrors")
|
log.warning("No working STRMD mirrors")
|
||||||
CACHE_FILE.write(cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ CACHE_FILE = Cache("volo.json", exp=10_800)
|
||||||
|
|
||||||
HTML_CACHE = Cache("volo-html.json", exp=86_400)
|
HTML_CACHE = Cache("volo-html.json", exp=86_400)
|
||||||
|
|
||||||
BASE_URL = "http://volokit2.com/sport/"
|
BASE_URL = "http://volokit2.com"
|
||||||
|
|
||||||
valid_sports = {
|
valid_sports = {
|
||||||
"boxing": "Boxing",
|
"boxing": "Boxing",
|
||||||
|
|
@ -131,15 +131,26 @@ async def refresh_html_cache(
|
||||||
events = {}
|
events = {}
|
||||||
|
|
||||||
for card in soup.css("#events .table .vevent.theevent"):
|
for card in soup.css("#events .table .vevent.theevent"):
|
||||||
name = card.css_first(".teamtd.event").text(strip=True)
|
|
||||||
time = card.css_first(".time").text(strip=True)
|
|
||||||
|
|
||||||
if not (href := card.css_first("a").attributes.get("href")):
|
if not (href := card.css_first("a").attributes.get("href")):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
name_node = card.css_first(".teamtd.event")
|
||||||
|
time_node = card.css_first(".time").text(strip=True)
|
||||||
|
|
||||||
|
if not (name_node and time_node):
|
||||||
|
continue
|
||||||
|
|
||||||
|
name = name_node.text(strip=True)
|
||||||
|
time = time_node.text(strip=True)
|
||||||
|
|
||||||
event_sport = valid_sports[sport]
|
event_sport = valid_sports[sport]
|
||||||
event_name = fix_event(name)
|
event_name = fix_event(name)
|
||||||
event_dt = Time.from_only_time(time, now.date(), "UTC")
|
|
||||||
|
event_dt = Time.from_only_time(
|
||||||
|
time,
|
||||||
|
now.date(),
|
||||||
|
timezone="UTC",
|
||||||
|
)
|
||||||
|
|
||||||
key = f"[{event_sport}] {event_name} (VOLO)"
|
key = f"[{event_sport}] {event_name} (VOLO)"
|
||||||
|
|
||||||
|
|
@ -204,9 +215,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
log.info(f'Scraping from "{BASE_URL}"')
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
|
|
||||||
sport_urls = {
|
sport_urls = {sport: urljoin(BASE_URL, f"sport/{sport}") for sport in valid_sports}
|
||||||
sport: urljoin(BASE_URL, sport.lower()) for sport in valid_sports.keys()
|
|
||||||
}
|
|
||||||
|
|
||||||
events = await get_events(
|
events = await get_events(
|
||||||
client,
|
client,
|
||||||
|
|
@ -244,7 +253,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
entry = {
|
entry = {
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": "http://volokit2.com",
|
"base": BASE_URL,
|
||||||
"timestamp": ts,
|
"timestamp": ts,
|
||||||
"id": tvg_id or "Live.Event.us",
|
"id": tvg_id or "Live.Event.us",
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -177,9 +177,12 @@ async def get_events(
|
||||||
pattern = re.compile(r"\-+|\(")
|
pattern = re.compile(r"\-+|\(")
|
||||||
|
|
||||||
for event in api_data:
|
for event in api_data:
|
||||||
match_id = event["matchId"]
|
match_id = event.get("matchId")
|
||||||
name = event["title"]
|
name = event.get("title")
|
||||||
league = event["league"]
|
league = event.get("league")
|
||||||
|
|
||||||
|
if not (match_id and name and league):
|
||||||
|
continue
|
||||||
|
|
||||||
if not (ts := event.get("ts")):
|
if not (ts := event.get("ts")):
|
||||||
continue
|
continue
|
||||||
|
|
@ -222,7 +225,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
if not (base_url := await network.get_base(MIRRORS)):
|
if not (base_url := await network.get_base(MIRRORS)):
|
||||||
log.warning("No working WatchFooty mirrors")
|
log.warning("No working Watch Footy mirrors")
|
||||||
CACHE_FILE.write(cached_urls)
|
CACHE_FILE.write(cached_urls)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue