mirror of
https://github.com/doms9/iptv.git
synced 2026-04-21 19:46:59 +02:00
e
- remove pixel.py - misc edits
This commit is contained in:
parent
b52a847b57
commit
00000d9224
3 changed files with 3 additions and 117 deletions
|
|
@ -12,7 +12,6 @@ from scrapers import (
|
|||
livetvsx,
|
||||
ovogoal,
|
||||
pawa,
|
||||
pixel,
|
||||
ppv,
|
||||
roxie,
|
||||
shark,
|
||||
|
|
@ -66,7 +65,6 @@ async def main() -> None:
|
|||
pw_tasks = [
|
||||
asyncio.create_task(cdnlivetv.scrape(hdl_brwsr)),
|
||||
asyncio.create_task(embedhd.scrape(hdl_brwsr)),
|
||||
# asyncio.create_task(pixel.scrape(hdl_brwsr)),
|
||||
asyncio.create_task(ppv.scrape(xtrnl_brwsr)),
|
||||
asyncio.create_task(roxie.scrape(hdl_brwsr)),
|
||||
asyncio.create_task(sportzone.scrape(xtrnl_brwsr)),
|
||||
|
|
@ -111,7 +109,6 @@ async def main() -> None:
|
|||
| livetvsx.urls
|
||||
| ovogoal.urls
|
||||
| pawa.urls
|
||||
| pixel.urls
|
||||
| ppv.urls
|
||||
| roxie.urls
|
||||
| shark.urls
|
||||
|
|
|
|||
|
|
@ -1,107 +0,0 @@
|
|||
import json
|
||||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from playwright.async_api import Browser, Page
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
TAG = "PIXEL"
|
||||
|
||||
CACHE_FILE = Cache(TAG, exp=19_800)
|
||||
|
||||
BASE_URL = "https://pixelsport.tv"
|
||||
|
||||
|
||||
async def get_api_data(page: Page) -> dict[str, list[dict, str, str]]:
|
||||
try:
|
||||
resp = await page.goto(
|
||||
url := urljoin(BASE_URL, "backend/livetv/events"),
|
||||
wait_until="domcontentloaded",
|
||||
timeout=6_000,
|
||||
)
|
||||
|
||||
if not resp or resp.status != 200:
|
||||
log.warning(f"{url} Status Code: {resp.status if resp else 'None'}")
|
||||
|
||||
return {}
|
||||
|
||||
raw_json = await page.locator("pre").inner_text(timeout=5_000)
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
return json.loads(raw_json)
|
||||
|
||||
|
||||
async def get_events(page: Page) -> dict[str, dict[str, str | float]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
api_data = await get_api_data(page)
|
||||
|
||||
events = {}
|
||||
|
||||
for event in api_data.get("events", []):
|
||||
event_dt = Time.from_str(event["date"], timezone="UTC")
|
||||
|
||||
if event_dt.date() != now.date():
|
||||
continue
|
||||
|
||||
event_name = event["match_name"]
|
||||
|
||||
channel_info: dict[str, str] = event["channel"]
|
||||
|
||||
category: dict[str, str] = channel_info["TVCategory"]
|
||||
|
||||
sport = category["name"]
|
||||
|
||||
stream_urls = [(i, f"server{i}URL") for i in range(1, 4)]
|
||||
|
||||
for z, stream_url in stream_urls:
|
||||
if (stream_link := channel_info.get(stream_url)) and stream_link != "null":
|
||||
key = f"[{sport}] {event_name} {z} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event_name)
|
||||
|
||||
events[key] = {
|
||||
"url": stream_link,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": now.timestamp(),
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(browser: Browser) -> None:
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
async with network.event_context(browser) as context:
|
||||
async with network.event_page(context) as page:
|
||||
handler = partial(get_events, page=page)
|
||||
|
||||
events = await network.safe_process(
|
||||
handler,
|
||||
url_num=1,
|
||||
semaphore=network.PW_S,
|
||||
log=log,
|
||||
)
|
||||
|
||||
urls.update(events or {})
|
||||
|
||||
log.info(f"Collected and cached {len(urls)} new event(s)")
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
|
|
@ -59,8 +59,6 @@ async def refresh_html_cache(
|
|||
|
||||
sport = sport_node.text(strip=True)
|
||||
|
||||
logo = section.css_first(".league-icon img").attributes.get("src")
|
||||
|
||||
for event in section.css(".section-event"):
|
||||
event_name = "Live Event"
|
||||
|
||||
|
|
@ -86,7 +84,6 @@ async def refresh_html_cache(
|
|||
"sport": sport,
|
||||
"event": event_name,
|
||||
"link": href,
|
||||
"logo": logo,
|
||||
"event_ts": event_dt.timestamp(),
|
||||
"timestamp": ts,
|
||||
}
|
||||
|
|
@ -168,20 +165,19 @@ async def scrape(browser: Browser) -> None:
|
|||
log=log,
|
||||
)
|
||||
|
||||
sport, event, logo, ts = (
|
||||
sport, event, ts = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["logo"],
|
||||
ev["event_ts"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, pic = leagues.get_tvg_info(sport, event)
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo or pic,
|
||||
"logo": logo,
|
||||
"base": "https://storytrench.net/",
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue