e
This commit is contained in:
parent
a180e18935
commit
00000d9050
5 changed files with 295 additions and 189 deletions
|
|
@ -5,11 +5,11 @@ from pathlib import Path
|
||||||
|
|
||||||
from scrapers import (
|
from scrapers import (
|
||||||
fstv,
|
fstv,
|
||||||
|
pixel,
|
||||||
ppv,
|
ppv,
|
||||||
roxie,
|
roxie,
|
||||||
streambtw,
|
streambtw,
|
||||||
streameast,
|
streameast,
|
||||||
streamed,
|
|
||||||
strmd,
|
strmd,
|
||||||
strmfree,
|
strmfree,
|
||||||
tvpass,
|
tvpass,
|
||||||
|
|
@ -43,11 +43,11 @@ async def main() -> None:
|
||||||
|
|
||||||
tasks = [
|
tasks = [
|
||||||
asyncio.create_task(fstv.scrape(network.client)),
|
asyncio.create_task(fstv.scrape(network.client)),
|
||||||
|
asyncio.create_task(pixel.scrape(network.client)),
|
||||||
asyncio.create_task(ppv.scrape(network.client)),
|
asyncio.create_task(ppv.scrape(network.client)),
|
||||||
asyncio.create_task(roxie.scrape(network.client)),
|
asyncio.create_task(roxie.scrape(network.client)),
|
||||||
asyncio.create_task(streambtw.scrape(network.client)),
|
asyncio.create_task(streambtw.scrape(network.client)),
|
||||||
asyncio.create_task(streameast.scrape(network.client)),
|
asyncio.create_task(streameast.scrape(network.client)),
|
||||||
#asyncio.create_task(streamed.scrape(network.client)),
|
|
||||||
asyncio.create_task(strmd.scrape(network.client)),
|
asyncio.create_task(strmd.scrape(network.client)),
|
||||||
asyncio.create_task(strmfree.scrape(network.client)),
|
asyncio.create_task(strmfree.scrape(network.client)),
|
||||||
asyncio.create_task(tvpass.scrape(network.client)),
|
asyncio.create_task(tvpass.scrape(network.client)),
|
||||||
|
|
@ -58,11 +58,11 @@ async def main() -> None:
|
||||||
|
|
||||||
additions = (
|
additions = (
|
||||||
fstv.urls
|
fstv.urls
|
||||||
|
| pixel.urls
|
||||||
| ppv.urls
|
| ppv.urls
|
||||||
| roxie.urls
|
| roxie.urls
|
||||||
| streambtw.urls
|
| streambtw.urls
|
||||||
| streameast.urls
|
| streameast.urls
|
||||||
| streamed.urls
|
|
||||||
| strmd.urls
|
| strmd.urls
|
||||||
| strmfree.urls
|
| strmfree.urls
|
||||||
| tvpass.urls
|
| tvpass.urls
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,8 @@
|
||||||
import re
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from playwright.async_api import async_playwright
|
from playwright.async_api import async_playwright
|
||||||
from selectolax.parser import HTMLParser
|
|
||||||
|
|
||||||
from .utils import Cache, Time, get_logger, leagues, network
|
from .utils import Cache, Time, get_logger, leagues, network
|
||||||
|
|
||||||
|
|
@ -12,96 +10,97 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
urls: dict[str, dict[str, str | float]] = {}
|
urls: dict[str, dict[str, str | float]] = {}
|
||||||
|
|
||||||
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "streamed.json", exp=10_800)
|
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "lotus.json", exp=10_800)
|
||||||
|
|
||||||
HTML_CACHE = Cache(Path(__file__).parent / "caches" / "streamed_php.json", exp=28_800)
|
API_CACHE = Cache(Path(__file__).parent / "caches" / "lotus_api.json", exp=28_800)
|
||||||
|
|
||||||
BASE_URL = "https://streamed.site/webmaster.php"
|
BASE_URL = "https://lotusgamehd.xyz/api-event.php"
|
||||||
|
|
||||||
|
|
||||||
async def refresh_html_cache(
|
def fix_league(s: str) -> str:
|
||||||
client: httpx.AsyncClient, url: str
|
return " ".join(x.capitalize() for x in s.split()) if len(s) > 5 else s.upper()
|
||||||
) -> dict[str, str | float]:
|
|
||||||
|
|
||||||
|
async def refresh_api_cache(
|
||||||
|
client: httpx.AsyncClient,
|
||||||
|
url: str,
|
||||||
|
ts: float,
|
||||||
|
) -> dict[str, dict[str, str]]:
|
||||||
|
log.info("Refreshing API cache")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = await client.get(url)
|
r = await client.get(url)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.error(f'Failed to fetch "{url}": {e}')
|
log.error(f'Failed to fetch "{url}": {e}')
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
soup = HTMLParser(r.text)
|
data = r.json()
|
||||||
events = {}
|
|
||||||
now = Time.now()
|
|
||||||
|
|
||||||
for row in soup.css("div.wrap div.row"):
|
data["timestamp"] = ts
|
||||||
if not (date := row.css_first("div.date")):
|
|
||||||
continue
|
|
||||||
|
|
||||||
event_date = date.text(strip=True).replace("\t", " ")
|
return data
|
||||||
|
|
||||||
event_dt = Time.from_str(event_date)
|
|
||||||
|
|
||||||
if event_dt.date() != now.date():
|
|
||||||
continue
|
|
||||||
|
|
||||||
league = row.css_first("div.league")
|
|
||||||
|
|
||||||
title = row.css_first("div.title")
|
|
||||||
|
|
||||||
hds_a = row.css_first("div.hds a")
|
|
||||||
|
|
||||||
if not (league and title and hds_a):
|
|
||||||
continue
|
|
||||||
|
|
||||||
sport, event = league.text(strip=True), title.text(strip=True)
|
|
||||||
|
|
||||||
sport = sport.split("—")[-1].strip()
|
|
||||||
|
|
||||||
onclick = hds_a.attributes.get("onclick", "")
|
|
||||||
|
|
||||||
if not (m := re.search(r"openPlayerPopup\(\s*(\d+)\s*\)", onclick)):
|
|
||||||
continue
|
|
||||||
|
|
||||||
key = f"[{sport}] {event} (STRMED)"
|
|
||||||
|
|
||||||
events[key] = {
|
|
||||||
"sport": sport,
|
|
||||||
"event": event,
|
|
||||||
"link": f"https://streamed.site/set.php?{m[1]}",
|
|
||||||
"timestamp": now.timestamp(),
|
|
||||||
"event_ts": event_dt.timestamp(),
|
|
||||||
}
|
|
||||||
|
|
||||||
return events
|
|
||||||
|
|
||||||
|
|
||||||
async def get_events(
|
async def get_events(
|
||||||
client: httpx.AsyncClient,
|
client: httpx.AsyncClient,
|
||||||
url: str,
|
event_link: str,
|
||||||
cached_keys: set[str],
|
cached_keys: set[str],
|
||||||
) -> list[dict[str, str]]:
|
) -> list[dict[str, str]]:
|
||||||
|
|
||||||
if not (events := HTML_CACHE.load()):
|
|
||||||
events = await refresh_html_cache(client, url)
|
|
||||||
HTML_CACHE.write(events)
|
|
||||||
|
|
||||||
live = []
|
|
||||||
|
|
||||||
now = Time.clean(Time.now())
|
now = Time.clean(Time.now())
|
||||||
start_ts = now.delta(minutes=-30).timestamp()
|
|
||||||
end_ts = now.delta(minutes=30).timestamp()
|
|
||||||
|
|
||||||
for k, v in events.items():
|
if not (api_data := API_CACHE.load(per_entry=False)):
|
||||||
if cached_keys & {k}:
|
api_data = await refresh_api_cache(
|
||||||
|
client,
|
||||||
|
event_link,
|
||||||
|
now.timestamp(),
|
||||||
|
)
|
||||||
|
|
||||||
|
API_CACHE.write(api_data)
|
||||||
|
|
||||||
|
events: list[dict[str, str]] = []
|
||||||
|
|
||||||
|
start_dt = now.delta(minutes=-30).timestamp()
|
||||||
|
end_dt = now.delta(minutes=30).timestamp()
|
||||||
|
|
||||||
|
for info in api_data["days"]:
|
||||||
|
day = Time.from_str(info["day_et"], "%Y-%m-%d")
|
||||||
|
|
||||||
|
if now.date() != day.date():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not start_ts <= v["event_ts"] <= end_ts:
|
for event in info["items"]:
|
||||||
continue
|
event_league = event["league"]
|
||||||
|
|
||||||
live.append({**v})
|
if event_league == "channel tv":
|
||||||
|
continue
|
||||||
|
|
||||||
return live
|
sport = fix_league(event_league)
|
||||||
|
event_name = event["title"]
|
||||||
|
event_ts: int = event["ts_et"]
|
||||||
|
event_streams: list[dict] = event["streams"]
|
||||||
|
|
||||||
|
if not (event_link := event_streams[0].get("link")):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not start_dt <= event_ts <= end_dt:
|
||||||
|
continue
|
||||||
|
|
||||||
|
key = f"[{sport}] {event_name} (LOTUS)"
|
||||||
|
|
||||||
|
if cached_keys & {key}:
|
||||||
|
continue
|
||||||
|
|
||||||
|
events.append(
|
||||||
|
{
|
||||||
|
"sport": sport,
|
||||||
|
"event": event_name,
|
||||||
|
"link": event_link,
|
||||||
|
"timestamp": event_ts,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return events
|
||||||
|
|
||||||
|
|
||||||
async def scrape(client: httpx.AsyncClient) -> None:
|
async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
@ -141,16 +140,16 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
)
|
)
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
sport, event, ts = ev["sport"], ev["event"], ev["event_ts"]
|
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
|
||||||
|
|
||||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||||
|
|
||||||
key = f"[{sport}] {event} (STRMED)"
|
key = f"[{sport}] {event} (LOTUS)"
|
||||||
|
|
||||||
entry = {
|
entry = {
|
||||||
"url": url,
|
"url": url,
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"base": "https://embedsports.top/",
|
"base": "",
|
||||||
"timestamp": ts,
|
"timestamp": ts,
|
||||||
"id": tvg_id or "Live.Event.us",
|
"id": tvg_id or "Live.Event.us",
|
||||||
}
|
}
|
||||||
123
M3U8/scrapers/pixel.py
Normal file
123
M3U8/scrapers/pixel.py
Normal file
|
|
@ -0,0 +1,123 @@
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
from .utils import Cache, Time, get_logger, leagues
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
urls: dict[str, dict[str, str | float]] = {}
|
||||||
|
|
||||||
|
API_FILE = Cache(Path(__file__).parent / "caches" / "pixel_api.json", exp=28_800)
|
||||||
|
|
||||||
|
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "pixel.json", exp=10_800)
|
||||||
|
|
||||||
|
BASE_URL = "https://pixelsport.tv/backend/livetv/events"
|
||||||
|
|
||||||
|
|
||||||
|
async def refresh_api_cache(
|
||||||
|
client: httpx.AsyncClient,
|
||||||
|
url: str,
|
||||||
|
ts: float,
|
||||||
|
) -> dict[str, list[dict, str, str]]:
|
||||||
|
log.info("Refreshing API cache")
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = await client.get(url)
|
||||||
|
r.raise_for_status()
|
||||||
|
except Exception as e:
|
||||||
|
log.error(f'Failed to fetch "{url}": {e}')
|
||||||
|
return {}
|
||||||
|
|
||||||
|
data = r.json()
|
||||||
|
|
||||||
|
data["timestamp"] = ts
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
async def get_events(
|
||||||
|
client: httpx.AsyncClient,
|
||||||
|
cached_keys: set[str],
|
||||||
|
) -> dict[str, str | float]:
|
||||||
|
now = Time.clean(Time.now())
|
||||||
|
|
||||||
|
if not (api_data := API_FILE.load(per_entry=False)):
|
||||||
|
api_data = await refresh_api_cache(
|
||||||
|
client,
|
||||||
|
BASE_URL,
|
||||||
|
now.timestamp(),
|
||||||
|
)
|
||||||
|
|
||||||
|
API_FILE.write(api_data)
|
||||||
|
|
||||||
|
events = {}
|
||||||
|
|
||||||
|
pattern = re.compile(
|
||||||
|
r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?",
|
||||||
|
re.IGNORECASE,
|
||||||
|
)
|
||||||
|
|
||||||
|
start_dt = now.delta(minutes=-30)
|
||||||
|
end_dt = now.delta(minutes=30)
|
||||||
|
|
||||||
|
for event in api_data["events"]:
|
||||||
|
event_dt = Time.from_str(f'{event["date"]} UTC', "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||||
|
|
||||||
|
if now.date() != event_dt.date():
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not start_dt <= event_dt <= end_dt:
|
||||||
|
continue
|
||||||
|
|
||||||
|
event_name = event["match_name"]
|
||||||
|
channel_info: dict[str, str] = event["channel"]
|
||||||
|
category: dict[str, str] = channel_info["TVCategory"]
|
||||||
|
|
||||||
|
sport = category["name"]
|
||||||
|
|
||||||
|
stream_urls = [(i, f"server{i}URL") for i in range(1, 4)]
|
||||||
|
|
||||||
|
for z, stream_url in stream_urls:
|
||||||
|
if stream_link := channel_info.get(stream_url):
|
||||||
|
if pattern.search(stream_link):
|
||||||
|
key = f"[{sport}] {event_name} (PIXEL - {z})"
|
||||||
|
|
||||||
|
if cached_keys & {key}:
|
||||||
|
continue
|
||||||
|
|
||||||
|
tvg_id, logo = leagues.get_tvg_info(sport, event_name)
|
||||||
|
|
||||||
|
events[key] = {
|
||||||
|
"url": stream_link,
|
||||||
|
"logo": logo,
|
||||||
|
"base": "https://pixelsport.tv/",
|
||||||
|
"timestamp": event_dt.timestamp(),
|
||||||
|
"id": tvg_id or "Live.Event.us",
|
||||||
|
}
|
||||||
|
|
||||||
|
return events
|
||||||
|
|
||||||
|
|
||||||
|
async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
cached_urls = CACHE_FILE.load()
|
||||||
|
cached_count = len(cached_urls)
|
||||||
|
urls.update(cached_urls)
|
||||||
|
|
||||||
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
|
|
||||||
|
events = await get_events(client, set(cached_urls.keys()))
|
||||||
|
|
||||||
|
if events:
|
||||||
|
for d in (urls, cached_urls):
|
||||||
|
d |= events
|
||||||
|
|
||||||
|
if new_count := len(cached_urls) - cached_count:
|
||||||
|
log.info(f"Collected and cached {new_count} new event(s)")
|
||||||
|
else:
|
||||||
|
log.info("No new events found")
|
||||||
|
|
||||||
|
CACHE_FILE.write(cached_urls)
|
||||||
|
|
@ -15,11 +15,6 @@ API_FILE = Cache(Path(__file__).parent / "caches" / "ppv_api.json", exp=28_800)
|
||||||
|
|
||||||
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "ppv.json", exp=10_800)
|
CACHE_FILE = Cache(Path(__file__).parent / "caches" / "ppv.json", exp=10_800)
|
||||||
|
|
||||||
# MIRRORS = [
|
|
||||||
# "https://ppv.to",
|
|
||||||
# "https://ppvs.su",
|
|
||||||
# ]
|
|
||||||
|
|
||||||
BASE_URL = "https://ppv.to"
|
BASE_URL = "https://ppv.to"
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -41,15 +36,13 @@ async def refresh_api_cache(
|
||||||
|
|
||||||
async def get_events(
|
async def get_events(
|
||||||
client: httpx.AsyncClient,
|
client: httpx.AsyncClient,
|
||||||
base_url: str,
|
|
||||||
cached_keys: set[str],
|
cached_keys: set[str],
|
||||||
) -> list[dict[str, str]]:
|
) -> list[dict[str, str]]:
|
||||||
|
|
||||||
if not (api_data := API_FILE.load(per_entry=False)):
|
if not (api_data := API_FILE.load(per_entry=False)):
|
||||||
api_data = await refresh_api_cache(
|
api_data = await refresh_api_cache(
|
||||||
client,
|
client,
|
||||||
urljoin(
|
urljoin(
|
||||||
base_url,
|
BASE_URL,
|
||||||
"api/streams",
|
"api/streams",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
@ -107,18 +100,9 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||||
|
|
||||||
# if not (base_url := await network.get_base(MIRRORS)):
|
|
||||||
# log.warning("No working PPV mirrors")
|
|
||||||
# CACHE_FILE.write(cached_urls)
|
|
||||||
# return
|
|
||||||
|
|
||||||
log.info(f'Scraping from "{BASE_URL}"')
|
log.info(f'Scraping from "{BASE_URL}"')
|
||||||
|
|
||||||
events = await get_events(
|
events = await get_events(client, set(cached_urls.keys()))
|
||||||
client,
|
|
||||||
BASE_URL,
|
|
||||||
set(cached_urls.keys()),
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -135,7 +135,7 @@ class Leagues:
|
||||||
|
|
||||||
return any(t in self.teams(league) for t in (t1.strip(), t2.strip()))
|
return any(t in self.teams(league) for t in (t1.strip(), t2.strip()))
|
||||||
|
|
||||||
return event.lower() in ["nfl redzone", "college gameday"]
|
return event.lower() in {"nfl redzone", "college gameday"}
|
||||||
|
|
||||||
def get_tvg_info(
|
def get_tvg_info(
|
||||||
self,
|
self,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue