mirror of
https://github.com/doms9/iptv.git
synced 2026-01-21 03:59:03 +01:00
Compare commits
37 commits
e0081a68c1
...
13e9afae40
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
13e9afae40 | ||
|
|
5ff97f9f9e | ||
|
|
5fc5966a8e | ||
|
|
c8da651e96 | ||
|
|
82c6dcaba6 | ||
|
|
daa608b252 | ||
|
|
00000d94e3 | ||
|
|
8ac6ca8360 | ||
|
|
1011010f7f | ||
|
|
7ad4ff9b92 | ||
|
|
00000d91c7 | ||
|
|
b4f5824bb5 | ||
|
|
f367691e89 | ||
|
|
7f49e9455b | ||
|
|
26f94e1955 | ||
|
|
7299b8b515 | ||
|
|
7c64ae5fa0 | ||
|
|
50618864f6 | ||
|
|
00db2b8a09 | ||
|
|
a208fd261a | ||
|
|
5510f35660 | ||
|
|
b8c0e21b76 | ||
|
|
89147547e5 | ||
|
|
8e7d441639 | ||
|
|
cdfc6f869f | ||
|
|
8109c87cac | ||
|
|
724ef867d9 | ||
|
|
bbb40ab59a | ||
|
|
649b9b0a57 | ||
|
|
a12e319702 | ||
|
|
2024e31a55 | ||
|
|
48637fdd30 | ||
|
|
dc0263367e | ||
|
|
db1ac43db7 | ||
|
|
4864cc0ad7 | ||
|
|
dce0e886d6 | ||
|
|
13cb387338 |
7 changed files with 88874 additions and 91938 deletions
175288
EPG/TV.xml
175288
EPG/TV.xml
File diff suppressed because one or more lines are too long
2724
M3U8/TV.m3u8
2724
M3U8/TV.m3u8
File diff suppressed because it is too large
Load diff
2724
M3U8/events.m3u8
2724
M3U8/events.m3u8
File diff suppressed because it is too large
Load diff
|
|
@ -57,7 +57,7 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
if not (html_data := await network.request(BASE_URL, log=log)):
|
||||
return events
|
||||
|
||||
pattern = re.compile(r"^(?:LIVE|\d+\s+(minutes?)\b)", re.IGNORECASE)
|
||||
pattern = re.compile(r"^(?:LIVE|(?:[1-9]|[12]\d|30)\s+minutes?\b)", re.IGNORECASE)
|
||||
|
||||
soup = HTMLParser(html_data.content)
|
||||
|
||||
|
|
@ -67,6 +67,14 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
if not (rank_elem := li_item.css_first(".f1-podium--rank")):
|
||||
continue
|
||||
|
||||
if not (time_elem := li_item.css_first(".SaatZamanBilgisi")):
|
||||
continue
|
||||
|
||||
time_text = time_elem.text(strip=True)
|
||||
|
||||
if not pattern.search(time_text):
|
||||
continue
|
||||
|
||||
sport = rank_elem.text(strip=True)
|
||||
|
||||
if not (driver_elem := li_item.css_first(".f1-podium--driver")):
|
||||
|
|
@ -83,14 +91,6 @@ async def get_events(cached_keys: list[str]) -> list[dict[str, str]]:
|
|||
if not (href := link.attributes.get("href")):
|
||||
continue
|
||||
|
||||
if not (time_elem := li_item.css_first(".SaatZamanBilgisi")):
|
||||
continue
|
||||
|
||||
time_text = time_elem.text(strip=True)
|
||||
|
||||
if not pattern.search(time_text):
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
|
|
|
|||
|
|
@ -260,6 +260,19 @@
|
|||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"COPA DEL REY": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/80.png",
|
||||
"names": [
|
||||
"KING'S CUP",
|
||||
"KINGS CUP",
|
||||
"LA COPA",
|
||||
"SPAIN COPA DEL REY",
|
||||
"SPANISH COPA DEL REY",
|
||||
"SPANISH CUP"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"COPA LIBERTADORES": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/58.png",
|
||||
|
|
@ -283,6 +296,12 @@
|
|||
"names": ["ITALIAN CUP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"COUPE DE FRANCE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/182.png",
|
||||
"names": ["FRANCE CUP", "FRENCH CUP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"EFL": {
|
||||
"logo": "https://i.gyazo.com/c8842fbcb2eeb6a53bc69fa6055b8b5d.png",
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ TAG = "WATCHFTY"
|
|||
|
||||
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=10_800)
|
||||
|
||||
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=28_800)
|
||||
API_FILE = Cache(f"{TAG.lower()}-api.json", exp=19_800)
|
||||
|
||||
API_URL = "https://api.watchfooty.st"
|
||||
|
||||
|
|
@ -76,6 +76,8 @@ async def process_event(
|
|||
context: BrowserContext,
|
||||
) -> str | None:
|
||||
|
||||
pattern = re.compile(r"\((\d+)\)")
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
captured: list[str] = []
|
||||
|
|
@ -111,18 +113,31 @@ async def process_event(
|
|||
|
||||
return
|
||||
|
||||
match = re.search(r"\((\d+)\)", text)
|
||||
|
||||
if not match or int(match[1]) == 0:
|
||||
if not (match := pattern.search(text)) or int(match[1]) == 0:
|
||||
log.warning(f"URL {url_num}) No available stream links.")
|
||||
|
||||
return
|
||||
|
||||
try:
|
||||
first_available = await page.wait_for_selector(
|
||||
'a[href*="/stream/"]', timeout=3_000
|
||||
'a[href*="/stream/"]',
|
||||
timeout=3_000,
|
||||
)
|
||||
except TimeoutError:
|
||||
log.warning(f"URL {url_num}) No available stream links.")
|
||||
|
||||
await first_available.click()
|
||||
return
|
||||
|
||||
if not (href := await first_available.get_attribute("href")):
|
||||
log.warning(f"URL {url_num}) No available stream links.")
|
||||
|
||||
return
|
||||
|
||||
await page.goto(
|
||||
href,
|
||||
wait_until="domcontentloaded",
|
||||
timeout=5_000,
|
||||
)
|
||||
|
||||
wait_task = asyncio.create_task(got_one.wait())
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
## Base Log @ 2025-12-18 20:40 UTC
|
||||
## Base Log @ 2025-12-19 20:41 UTC
|
||||
|
||||
### ✅ Working Streams: 144<br>❌ Dead Streams: 2
|
||||
### ✅ Working Streams: 146<br>❌ Dead Streams: 0
|
||||
|
||||
| Channel | Error (Code) | Link |
|
||||
| ------- | ------------ | ---- |
|
||||
| ESPN U | HTTP Error (403) | `http://cord-cutter.net:8080/30550113/30550113/10255` |
|
||||
| ION TV | HTTP Error (403) | `http://cord-cutter.net:8080/30550113/30550113/9297` |
|
||||
---
|
||||
#### Base Channels URL
|
||||
```
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue