mirror of
https://github.com/doms9/iptv.git
synced 2026-03-07 11:18:25 +01:00
e
This commit is contained in:
parent
2ad26ef050
commit
00000d9247
8 changed files with 34 additions and 20 deletions
|
|
@ -1,4 +1,5 @@
|
|||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from playwright.async_api import Browser
|
||||
|
||||
|
|
@ -14,7 +15,7 @@ CACHE_FILE = Cache(TAG, exp=10_800)
|
|||
|
||||
API_FILE = Cache(f"{TAG}-api", exp=19_800)
|
||||
|
||||
API_URL = "https://api.cdn-live.tv/api/v1/events/sports"
|
||||
API_URL = "https://api.cdn-live.tv"
|
||||
|
||||
|
||||
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||
|
|
@ -26,7 +27,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
log.info("Refreshing API cache")
|
||||
|
||||
if r := await network.request(
|
||||
API_URL,
|
||||
urljoin(API_URL, "api/v1/events/sports"),
|
||||
log=log,
|
||||
params={"user": "cdnlivetv", "plan": "free"},
|
||||
):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from playwright.async_api import Browser
|
||||
|
||||
|
|
@ -14,7 +15,7 @@ CACHE_FILE = Cache(TAG, exp=5_400)
|
|||
|
||||
API_CACHE = Cache(f"{TAG}-api", exp=28_800)
|
||||
|
||||
BASE_URL = "https://embedhd.org/api-event.php"
|
||||
BASE_URL = "https://embedhd.org"
|
||||
|
||||
|
||||
def fix_league(s: str) -> str:
|
||||
|
|
@ -29,7 +30,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
|
||||
api_data = {"timestamp": now.timestamp()}
|
||||
|
||||
if r := await network.request(BASE_URL, log=log):
|
||||
if r := await network.request(urljoin(BASE_URL, "api-event.php"), log=log):
|
||||
api_data: dict = r.json()
|
||||
|
||||
api_data["timestamp"] = now.timestamp()
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ TAG = "PAWA"
|
|||
|
||||
CACHE_FILE = Cache(TAG, exp=10_800)
|
||||
|
||||
BASE_URL = "https://pawastreams.net/feed/"
|
||||
BASE_URL = "https://pawastreams.net/feed"
|
||||
|
||||
|
||||
async def process_event(url: str, url_num: int) -> str | None:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import json
|
||||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from playwright.async_api import Browser, Page
|
||||
|
||||
|
|
@ -13,20 +14,20 @@ TAG = "PIXEL"
|
|||
|
||||
CACHE_FILE = Cache(TAG, exp=19_800)
|
||||
|
||||
BASE_URL = "https://pixelsport.tv/backend/livetv/events"
|
||||
BASE_URL = "https://pixelsport.tv"
|
||||
|
||||
|
||||
async def get_api_data(page: Page) -> dict[str, list[dict, str, str]]:
|
||||
try:
|
||||
await page.goto(
|
||||
BASE_URL,
|
||||
url := urljoin(BASE_URL, "backend/livetv/events"),
|
||||
wait_until="domcontentloaded",
|
||||
timeout=10_000,
|
||||
)
|
||||
|
||||
raw_json = await page.locator("pre").inner_text(timeout=5_000)
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{BASE_URL}": {e}')
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
|
|
@ -65,7 +66,7 @@ async def get_events(page: Page) -> dict[str, dict[str, str | float]]:
|
|||
events[key] = {
|
||||
"url": stream_link,
|
||||
"logo": logo,
|
||||
"base": "https://pixelsport.tv",
|
||||
"base": BASE_URL,
|
||||
"timestamp": now.timestamp(),
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from playwright.async_api import Browser
|
||||
|
||||
|
|
@ -15,9 +16,9 @@ CACHE_FILE = Cache(TAG, exp=10_800)
|
|||
API_FILE = Cache(f"{TAG}-api", exp=19_800)
|
||||
|
||||
MIRRORS = [
|
||||
"https://old.ppv.to/api/streams",
|
||||
"https://api.ppvs.su/api/streams",
|
||||
"https://api.ppv.to/api/streams",
|
||||
"https://old.ppv.to",
|
||||
"https://api.ppvs.su",
|
||||
"https://api.ppv.to",
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -29,7 +30,7 @@ async def get_events(url: str, cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
|
||||
api_data = {"timestamp": now.timestamp()}
|
||||
|
||||
if r := await network.request(url, log=log):
|
||||
if r := await network.request(urljoin(url, "api/streams"), log=log):
|
||||
api_data: dict = r.json()
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ TAG = "SPORT9"
|
|||
|
||||
CACHE_FILE = Cache(TAG, exp=5_400)
|
||||
|
||||
BASE_URL = "https://sport9.ru/"
|
||||
BASE_URL = "https://sport9.ru"
|
||||
|
||||
|
||||
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ CACHE_FILE = Cache(TAG, exp=10_800)
|
|||
|
||||
HTML_CACHE = Cache(f"{TAG}-html", exp=28_800)
|
||||
|
||||
BASE_URL = "https://streamhub.pro/"
|
||||
MIRRORS = ["https://streamhub.pro", "https://livesports4u.net"]
|
||||
|
||||
CATEGORIES = {
|
||||
"Soccer": "sport_68c02a4464a38",
|
||||
|
|
@ -35,15 +35,17 @@ CATEGORIES = {
|
|||
|
||||
|
||||
async def refresh_html_cache(
|
||||
url: str,
|
||||
date: str,
|
||||
sport_id: str,
|
||||
ts: float,
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
|
||||
events = {}
|
||||
|
||||
if not (
|
||||
html_data := await network.request(
|
||||
urljoin(BASE_URL, f"events/{date}"),
|
||||
urljoin(url, f"events/{date}"),
|
||||
log=log,
|
||||
params={"sport_id": sport_id},
|
||||
)
|
||||
|
|
@ -93,7 +95,7 @@ async def refresh_html_cache(
|
|||
return events
|
||||
|
||||
|
||||
async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
||||
async def get_events(url: str, cached_keys: list[str]) -> list[dict[str, str]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (events := HTML_CACHE.load()):
|
||||
|
|
@ -101,6 +103,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
|
||||
tasks = [
|
||||
refresh_html_cache(
|
||||
url,
|
||||
date,
|
||||
sport_id,
|
||||
now.timestamp(),
|
||||
|
|
@ -143,9 +146,16 @@ async def scrape(browser: Browser) -> None:
|
|||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working PPV mirrors")
|
||||
|
||||
events = await get_events(cached_urls.keys())
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(base_url, cached_urls.keys())
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ TAG = "TOTALSPRTK"
|
|||
|
||||
CACHE_FILE = Cache(TAG, exp=28_800)
|
||||
|
||||
BASE_URL = "https://live3.totalsportek777.com/"
|
||||
BASE_URL = "https://live3.totalsportek777.com"
|
||||
|
||||
|
||||
def fix_txt(s: str) -> str:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue