mirror of
https://github.com/doms9/iptv.git
synced 2025-12-09 11:59:03 +01:00
e
This commit is contained in:
commit
00000d99f4
38 changed files with 395070 additions and 0 deletions
48
.github/workflows/epg.yml
vendored
Normal file
48
.github/workflows/epg.yml
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
name: fetch epg
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "45 2,10,18 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
epg-fetcher:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Cache venv
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: .venv
|
||||
key: shared-venv-${{ runner.os }}-${{ hashFiles('uv.lock') }}
|
||||
restore-keys: |
|
||||
shared-venv-${{ runner.os }}-
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: "latest"
|
||||
enable-cache: true
|
||||
ignore-nothing-to-cache: true
|
||||
cache-dependency-glob: "uv.lock"
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
|
||||
- name: Fetch EPG
|
||||
run: uv run EPG/fetch.py
|
||||
|
||||
- name: Push changes
|
||||
uses: stefanzweifel/git-auto-commit-action@v6
|
||||
with:
|
||||
commit_message: "update EPG"
|
||||
file_pattern: "EPG/TV.xml"
|
||||
commit_author: "GitHub Actions Bot <actions@github.com>"
|
||||
commit_user_name: "GitHub Actions Bot"
|
||||
commit_user_email: "actions@github.com"
|
||||
27
.github/workflows/health.yml
vendored
Normal file
27
.github/workflows/health.yml
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
name: health check
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 2,8,14,20 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
health-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Run health.sh
|
||||
run: bash health.sh
|
||||
|
||||
- name: Update log
|
||||
uses: stefanzweifel/git-auto-commit-action@v6
|
||||
with:
|
||||
commit_message: "health log"
|
||||
file_pattern: "readme.md"
|
||||
commit_author: "GitHub Actions Bot <actions@github.com>"
|
||||
commit_user_name: "GitHub Actions Bot"
|
||||
commit_user_email: "actions@github.com"
|
||||
85
.github/workflows/m3u8.yml
vendored
Normal file
85
.github/workflows/m3u8.yml
vendored
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
name: fetch m3u8
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
m3u8-fetcher:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check time
|
||||
id: check_time
|
||||
run: |
|
||||
hour=$(TZ=America/New_York date +%-H)
|
||||
|
||||
(( hour >= 8 && hour <= 23 )) && echo "run=true" >> $GITHUB_OUTPUT || echo "run=false" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout
|
||||
if: steps.check_time.outputs.run == 'true'
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Cache venv
|
||||
if: steps.check_time.outputs.run == 'true'
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: .venv
|
||||
key: shared-venv-${{ runner.os }}-${{ hashFiles('uv.lock') }}
|
||||
restore-keys: |
|
||||
shared-venv-${{ runner.os }}-
|
||||
|
||||
- name: Cache cert
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: M3U8/scrape/utils/cached-ca.pem
|
||||
key: cert-cache-${{ runner.os }}-${{ hashFiles('M3U8/scrape/utils/cached-ca.pem') }}
|
||||
restore-keys: |
|
||||
cert-cache-${{ runner.os }}-
|
||||
|
||||
- name: Install uv
|
||||
if: steps.check_time.outputs.run == 'true'
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: "latest"
|
||||
enable-cache: true
|
||||
ignore-nothing-to-cache: true
|
||||
cache-dependency-glob: "uv.lock"
|
||||
|
||||
- name: Setup Python
|
||||
if: steps.check_time.outputs.run == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
|
||||
- name: Cache Playwright browsers
|
||||
id: cache-pw
|
||||
if: steps.check_time.outputs.run == 'true'
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright
|
||||
|
||||
- name: Install Playwright browsers
|
||||
if: steps.check_time.outputs.run == 'true' && steps.cache-pw.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
uv run playwright install
|
||||
uv run playwright install-deps
|
||||
|
||||
- name: Fetch M3U8
|
||||
if: steps.check_time.outputs.run == 'true'
|
||||
run: uv run M3U8/fetch.py
|
||||
|
||||
- name: Push changes
|
||||
if: steps.check_time.outputs.run == 'true'
|
||||
uses: stefanzweifel/git-auto-commit-action@v6
|
||||
with:
|
||||
commit_message: "update M3U8"
|
||||
file_pattern: "M3U8/TV.m3u8"
|
||||
commit_author: "GitHub Actions Bot <actions@github.com>"
|
||||
commit_user_name: "GitHub Actions Bot"
|
||||
commit_user_email: "actions@github.com"
|
||||
15
.gitignore
vendored
Normal file
15
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# Python-generated files
|
||||
__pycache__/
|
||||
*.py[oc]
|
||||
build/
|
||||
dist/
|
||||
wheels/
|
||||
*.egg-info
|
||||
|
||||
# Virtual environments
|
||||
.venv
|
||||
|
||||
# Misc
|
||||
.python-version
|
||||
stuff/
|
||||
M3U8/scrapers/caches/*
|
||||
386668
EPG/TV.xml
Normal file
386668
EPG/TV.xml
Normal file
File diff suppressed because one or more lines are too long
195
EPG/fetch.py
Normal file
195
EPG/fetch.py
Normal file
|
|
@ -0,0 +1,195 @@
|
|||
#!/usr/bin/env python3
|
||||
import asyncio
|
||||
import gzip
|
||||
import re
|
||||
from pathlib import Path
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
import httpx
|
||||
|
||||
epg_file = Path(__file__).parent / "TV.xml"
|
||||
|
||||
epg_urls = [
|
||||
"https://epgshare01.online/epgshare01/epg_ripper_CA2.xml.gz",
|
||||
"https://epgshare01.online/epgshare01/epg_ripper_DUMMY_CHANNELS.xml.gz",
|
||||
"https://epgshare01.online/epgshare01/epg_ripper_FANDUEL1.xml.gz",
|
||||
"https://epgshare01.online/epgshare01/epg_ripper_MY1.xml.gz",
|
||||
"https://epgshare01.online/epgshare01/epg_ripper_PLEX1.xml.gz",
|
||||
"https://epgshare01.online/epgshare01/epg_ripper_UK1.xml.gz",
|
||||
"https://epgshare01.online/epgshare01/epg_ripper_US2.xml.gz",
|
||||
"https://epgshare01.online/epgshare01/epg_ripper_US_LOCALS1.xml.gz",
|
||||
"https://i.mjh.nz/Roku/all.xml.gz",
|
||||
]
|
||||
|
||||
client = httpx.AsyncClient(
|
||||
timeout=5,
|
||||
follow_redirects=True,
|
||||
http2=True,
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
|
||||
},
|
||||
)
|
||||
|
||||
live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png"
|
||||
|
||||
dummies = {
|
||||
"Basketball.Dummy.us": live_img,
|
||||
"Golf.Dummy.us": live_img,
|
||||
"Live.Event.us": live_img,
|
||||
"MLB.Baseball.Dummy.us": None,
|
||||
"NBA.Basketball.Dummy.us": None,
|
||||
"NFL.Dummy.us": None,
|
||||
"NHL.Hockey.Dummy.us": None,
|
||||
"PPV.EVENTS.Dummy.us": live_img,
|
||||
"Racing.Dummy.us": live_img,
|
||||
"Soccer.Dummy.us": live_img,
|
||||
"Tennis.Dummy.us": live_img,
|
||||
"WNBA.dummy.us": None,
|
||||
}
|
||||
|
||||
replace_ids = {
|
||||
"NCAA Sports": {"old": "Sports.Dummy.us", "new": "NCAA.Sports.Dummy.us"},
|
||||
"UFC": {"old": "UFC.247.Dummy.us", "new": "UFC.Dummy.us"},
|
||||
}
|
||||
|
||||
|
||||
def get_tvg_ids() -> dict[str, str]:
|
||||
base_m3u8 = (
|
||||
(Path(__file__).parent.parent / "M3U8" / "base.m3u8")
|
||||
.read_text(encoding="utf-8")
|
||||
.splitlines()
|
||||
)
|
||||
|
||||
tvg = {}
|
||||
|
||||
for line in base_m3u8:
|
||||
if line.startswith("#EXTINF"):
|
||||
tvg_id = re.search(r'tvg-id="([^"]*)"', line)[1]
|
||||
tvg_logo = re.search(r'tvg-logo="([^"]*)"', line)[1]
|
||||
|
||||
tvg[tvg_id] = tvg_logo
|
||||
|
||||
return tvg
|
||||
|
||||
|
||||
async def fetch_xml(url: str) -> ET.Element | None:
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
print(f'Failed to fetch "{url}": {e}')
|
||||
return
|
||||
|
||||
try:
|
||||
decompressed_data = gzip.decompress(r.content)
|
||||
|
||||
return ET.fromstring(decompressed_data)
|
||||
|
||||
except Exception as e:
|
||||
print(f'Failed to decompress and parse XML from "{url}": {e}')
|
||||
|
||||
|
||||
def hijack_id(
|
||||
old: str,
|
||||
new: str,
|
||||
text: str,
|
||||
root: ET.Element,
|
||||
) -> None:
|
||||
|
||||
og_channel = root.find(f"./channel[@id='{old}']")
|
||||
|
||||
if og_channel is not None:
|
||||
new_channel = ET.Element(og_channel.tag, {**og_channel.attrib, "id": new})
|
||||
|
||||
display_name = og_channel.find("display-name")
|
||||
|
||||
if display_name is not None:
|
||||
new_channel.append(ET.Element("display-name", display_name.attrib))
|
||||
new_channel[-1].text = text
|
||||
|
||||
for child in og_channel:
|
||||
if child.tag == "display-name":
|
||||
continue
|
||||
|
||||
new_child = ET.Element(child.tag, child.attrib)
|
||||
new_child.text = child.text
|
||||
|
||||
root.remove(og_channel)
|
||||
|
||||
root.append(new_channel)
|
||||
|
||||
for program in root.findall(f"./programme[@channel='{old}']"):
|
||||
new_program = ET.Element(program.tag, {**program.attrib, "channel": new})
|
||||
|
||||
for child in program:
|
||||
new_child = ET.Element(child.tag, child.attrib)
|
||||
new_child.text = child.text
|
||||
new_program.append(new_child)
|
||||
|
||||
for tag_name in ["title", "desc", "sub-title"]:
|
||||
tag = new_program.find(tag_name)
|
||||
|
||||
if tag is not None:
|
||||
tag.text = text
|
||||
|
||||
root.remove(program)
|
||||
|
||||
root.append(new_program)
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
tvg_ids = get_tvg_ids()
|
||||
|
||||
tvg_ids |= dummies | {v["old"]: live_img for v in replace_ids.values()}
|
||||
|
||||
root = ET.Element("tv")
|
||||
|
||||
tasks = [fetch_xml(url) for url in epg_urls]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
for epg_data in results:
|
||||
if epg_data is None:
|
||||
continue
|
||||
|
||||
for channel in epg_data.findall("channel"):
|
||||
if (channel_id := channel.get("id")) in tvg_ids:
|
||||
for icon_tag in channel.findall("icon"):
|
||||
if logo := tvg_ids.get(channel_id):
|
||||
icon_tag.set("src", logo)
|
||||
|
||||
if (url_tag := channel.find("url")) is not None:
|
||||
channel.remove(url_tag)
|
||||
|
||||
root.append(channel)
|
||||
|
||||
for program in epg_data.findall("programme"):
|
||||
if program.get("channel") in tvg_ids:
|
||||
title_text = program.find("title").text
|
||||
subtitle = program.find("sub-title")
|
||||
|
||||
if (
|
||||
title_text in ["NHL Hockey", "Live: NFL Football"]
|
||||
and subtitle is not None
|
||||
):
|
||||
program.find("title").text = f"{title_text} {subtitle.text}"
|
||||
|
||||
root.append(program)
|
||||
|
||||
for k, v in replace_ids.items():
|
||||
hijack_id(**v, text=k, root=root)
|
||||
|
||||
tree = ET.ElementTree(root)
|
||||
|
||||
tree.write(epg_file, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
print(f"EPG saved to {epg_file.resolve()}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
||||
try:
|
||||
asyncio.run(client.aclose())
|
||||
except Exception:
|
||||
pass
|
||||
24
LICENSE
Normal file
24
LICENSE
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
This is free and unencumbered software released into the public domain.
|
||||
|
||||
Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||
distribute this software, either in source code form or as a compiled
|
||||
binary, for any purpose, commercial or non-commercial, and by any
|
||||
means.
|
||||
|
||||
In jurisdictions that recognize copyright laws, the author or authors
|
||||
of this software dedicate any and all copyright interest in the
|
||||
software to the public domain. We make this dedication for the benefit
|
||||
of the public at large and to the detriment of our heirs and
|
||||
successors. We intend this dedication to be an overt act of
|
||||
relinquishment in perpetuity of all present and future rights to this
|
||||
software under copyright law.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
For more information, please refer to <http://unlicense.org/>
|
||||
1480
M3U8/TV.m3u8
Normal file
1480
M3U8/TV.m3u8
Normal file
File diff suppressed because it is too large
Load diff
442
M3U8/base.m3u8
Normal file
442
M3U8/base.m3u8
Normal file
|
|
@ -0,0 +1,442 @@
|
|||
#EXTM3U url-tvg="https://raw.githubusercontent.com/doms9/iptv/refs/heads/default/EPG/TV.xml"
|
||||
|
||||
#EXTINF:-1 tvg-chno="1" tvg-id="A.and.E.HD.East.us2" tvg-name="A&E TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s51529_dark_360w_270h.png" group-title="TV",A&E TV
|
||||
http://fl1.moveonjoy.com/ANE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="2" tvg-id="WFTS-DT.us_locals1" tvg-name="ABC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10003_dark_360w_270h.png" group-title="TV",ABC
|
||||
https://fl1.moveonjoy.com/FL_Tampa_ABC/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="3" tvg-id="ACC.Network.us2" tvg-name="ACC Network" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s111871_dark_360w_270h.png" group-title="TV",ACC Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/9273
|
||||
|
||||
#EXTINF:-1 tvg-chno="4" tvg-id="AdultSwim.com.Cartoon.Network.us2" tvg-name="Adult Swim" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16496_dark_360w_270h.png" group-title="TV",Adult Swim
|
||||
https://turnerlive.warnermediacdn.com/hls/live/2023183/aseast/noslate/VIDEO_1_5128000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="5" tvg-id="Altitude.Sports.us2" tvg-name="Altitude Sports" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s44263_dark_360w_270h.png" group-title="TV",Altitude Sports
|
||||
http://cord-cutter.net:8080/30550113/30550113/79545
|
||||
|
||||
#EXTINF:-1 tvg-chno="6" tvg-id="AMC.HD.us2" tvg-name="AMC" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10021_dark_360w_270h.png" group-title="TV",AMC
|
||||
http://cord-cutter.net:8080/30550113/30550113/18925
|
||||
|
||||
#EXTINF:-1 tvg-chno="7" tvg-id="Animal.Planet.HD.us2" tvg-name="Animal Planet" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16331_dark_360w_270h.png" group-title="TV",Animal Planet
|
||||
http://fl1.moveonjoy.com/Animal_Planet/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="8" tvg-id="ASPiRE.HD.us2" tvg-name="Aspire" tvg-logo="https://i.gyazo.com/0dec42cc5ef48c489cc10db906dc5b9b.png" group-title="TV",Aspire
|
||||
http://fl1.moveonjoy.com/Aspire/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="9" tvg-id="Astro.Premier.League.my" tvg-name="Astro Premier League" tvg-logo="https://1000logos.net/wp-content/uploads/2020/08/Astro-Logo-500x281.png" group-title="TV",Astro Premier League
|
||||
http://stalker.klma2023.net/play/live.php?mac=00:1B:79:F8:59:0E&stream=1163984&extension=ts
|
||||
|
||||
#EXTINF:-1 tvg-chno="10" tvg-id="BBC.America.HD.us2" tvg-name="BBC America" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s64492_dark_360w_270h.png" group-title="TV",BBC America
|
||||
http://cord-cutter.net:8080/30550113/30550113/20194
|
||||
|
||||
#EXTINF:-1 tvg-chno="11" tvg-id="BBC.News.(North.America).HD.us2" tvg-name="BBC World News" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s89542_dark_360w_270h.png" group-title="TV",BBC World News
|
||||
http://cord-cutter.net:8080/30550113/30550113/139752
|
||||
|
||||
#EXTINF:-1 tvg-chno="12" tvg-id="BET.HD.us2" tvg-name="BET" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10051_dark_360w_270h.png" group-title="TV",BET
|
||||
http://fl1.moveonjoy.com/BET_EAST/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="13" tvg-id="Big.Ten.Network.HD.us2" tvg-name="Big Ten Network" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s56783_dark_360w_270h.png" group-title="TV",Big Ten Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/9828
|
||||
|
||||
#EXTINF:-1 tvg-chno="14" tvg-id="Bloomberg.HD.us2" tvg-name="Bloomberg TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s71799_dark_360w_270h.png" group-title="TV",Bloomberg TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/15158
|
||||
|
||||
#EXTINF:-1 tvg-chno="15" tvg-id="Boomerang.us2" tvg-name="Boomerang" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s21883_dark_360w_270h.png" group-title="TV",Boomerang
|
||||
http://cord-cutter.net:8080/30550113/30550113/14741
|
||||
|
||||
#EXTINF:-1 tvg-chno="16" tvg-id="Bounce.TV.us2" tvg-name="Bounce TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s73067_dark_360w_270h.png" group-title="TV",Bounce TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/48323
|
||||
|
||||
#EXTINF:-1 tvg-chno="17" tvg-id="Bravo.HD.us2" tvg-name="Bravo TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10057_dark_360w_270h.png" group-title="TV",Bravo TV
|
||||
http://fl1.moveonjoy.com/BRAVO/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="18" tvg-id="BUZZR.Stream.us2" tvg-name="Buzzr" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s93430_dark_360w_270h.png" group-title="TV",Buzzr
|
||||
https://buzzrota-web.amagi.tv/playlist.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="19" tvg-id="CSPAN.us2" tvg-name="C-SPAN" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10161_dark_360w_270h.png" group-title="TV",C-SPAN
|
||||
http://fl1.moveonjoy.com/C-SPAN/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="20" tvg-id="Cartoon.Network.HD.us2" tvg-name="Cartoon Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s12131_dark_360w_270h.png" group-title="TV",Cartoon Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/46708
|
||||
|
||||
#EXTINF:-1 tvg-chno="21" tvg-id="WCBS-DT.us_locals1" tvg-name="CBS" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10098_dark_360w_270h.png" group-title="TV",CBS
|
||||
http://cord-cutter.net:8080/30550113/30550113/120749
|
||||
|
||||
#EXTINF:-1 tvg-chno="22" tvg-id="plex.tv.CBS.Sports.Golazo.Network.plex" tvg-name="CBS Sports Golazo Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s133691_dark_360w_270h.png" group-title="TV",CBS Sports Golazo Network
|
||||
https://dai.google.com/linear/hls/event/GxrCGmwST0ixsrc_QgB6qw/master.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="23" tvg-id="CBS.Sports.Network.HD.us2" tvg-name="CBS Sports Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16365_dark_360w_270h.png" group-title="TV",CBS Sports Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/10454
|
||||
|
||||
#EXTINF:-1 tvg-chno="24" tvg-id="CMT.HD.us2" tvg-name="CMT" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10138_dark_360w_270h.png" group-title="TV",CMT
|
||||
https://fl1.moveonjoy.com/CMT/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="25" tvg-id="CNBC.HD.us2" tvg-name="CNBC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10139_dark_360w_270h.png" group-title="TV",CNBC
|
||||
https://fl1.moveonjoy.com/CNBC/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="26" tvg-id="CNN.HD.us2" tvg-name="CNN" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s58646_dark_360w_270h.png" group-title="TV",CNN
|
||||
https://turnerlive.warnermediacdn.com/hls/live/586495/cnngo/cnn_slate/VIDEO_0_3564000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="27" tvg-id="Comedy.Central.HD.us2" tvg-name="Comedy Central" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10149_dark_360w_270h.png" group-title="TV",Comedy Central
|
||||
http://cord-cutter.net:8080/30550113/30550113/7466
|
||||
|
||||
#EXTINF:-1 tvg-chno="28" tvg-id="Comedy.TV.HD.us2" tvg-name="Comedy TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s82470_dark_360w_270h.png" group-title="TV",Comedy TV
|
||||
https://fl1.moveonjoy.com/Comedy_TV/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="29" tvg-id="Comet.us2" tvg-name="Comet TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s97051_dark_360w_270h.png" group-title="TV",Comet TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/125831
|
||||
|
||||
#EXTINF:-1 tvg-chno="30" tvg-id="Cooking.Channel.HD.us2" tvg-name="Cooking Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s30156_dark_360w_270h.png" group-title="TV",Cooking Channel
|
||||
https://fl1.moveonjoy.com/COOKING_CHANNEL/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="31" tvg-id="Court.TV.us2" tvg-name="Court TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s117160_dark_360w_270h.png" group-title="TV",Court TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/21092
|
||||
|
||||
#EXTINF:-1 tvg-chno="32" tvg-id="COZI.TV.us2" tvg-name="Cozi TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s78851_dark_360w_270h.png" group-title="TV",Cozi TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/11868
|
||||
|
||||
#EXTINF:-1 tvg-chno="33" tvg-id="Crime.and.Investigation.Network.HD.us2" tvg-name="Crime & Investigation Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s61469_dark_360w_270h.png" group-title="TV",Crime & Investigation Network
|
||||
https://fl1.moveonjoy.com/Crime_and_Investigation_Network/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="34" tvg-id="WKCF-DT.us_locals1" tvg-name="CW" tvg-logo="https://i.gyazo.com/afd5b481b327d204087dfde6a7741f9d.png" group-title="TV",CW
|
||||
http://cord-cutter.net:8080/30550113/30550113/120893
|
||||
|
||||
#EXTINF:-1 tvg-chno="35" tvg-id="Discovery.Channel.HD.us2" tvg-name="Discovery Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11150_dark_360w_270h.png" group-title="TV",Discovery Channel
|
||||
http://cord-cutter.net:8080/30550113/30550113/46720
|
||||
|
||||
#EXTINF:-1 tvg-chno="36" tvg-id="Discovery.Family.Channel.HD.us2" tvg-name="Discovery Family Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16618_dark_360w_270h.png" group-title="TV",Discovery Family Channel
|
||||
http://cord-cutter.net:8080/30550113/30550113/10538
|
||||
|
||||
#EXTINF:-1 tvg-chno="37" tvg-id="Discovery.Life.Channel.us2" tvg-name="Discovery Life" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16125_dark_360w_270h.png" group-title="TV",Discovery Life
|
||||
https://fl1.moveonjoy.com/DISCOVERY_LIFE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="38" tvg-id="Science.Channel.HD.us2" tvg-name="Discovery Science" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s24282_dark_360w_270h.png" group-title="TV",Discovery Science
|
||||
https://fl1.moveonjoy.com/Discovery_Science/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="39" tvg-id="Disney.Channel.HD.us2" tvg-name="Disney" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10171_dark_360w_270h.png" group-title="TV",Disney
|
||||
http://cord-cutter.net:8080/30550113/30550113/2206
|
||||
|
||||
#EXTINF:-1 tvg-chno="40" tvg-id="Disney.XD.HD.us2" tvg-name="Disney XD" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18279_dark_360w_270h.png" group-title="TV",Disney XD
|
||||
http://cord-cutter.net:8080/30550113/30550113/75621
|
||||
|
||||
#EXTINF:-1 tvg-chno="41" tvg-id="E!.Entertainment.Television.HD.us2" tvg-name="E! Entertainment" tvg-logo="https://i.gyazo.com/f73b80e3eb56cec06df6705d00e2f422.png" group-title="TV",E! Entertainment
|
||||
http://fl1.moveonjoy.com/E_ENTERTAINMENT_TELEVISION/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="42" tvg-id="ESPN.HD.us2" tvg-name="ESPN" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10179_dark_360w_270h.png" group-title="TV",ESPN
|
||||
http://cord-cutter.net:8080/30550113/30550113/14197
|
||||
|
||||
#EXTINF:-1 tvg-chno="43" tvg-id="ESPNEWS.HD.us2" tvg-name="ESPN News" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16485_dark_360w_270h.png" group-title="TV",ESPN News
|
||||
http://cord-cutter.net:8080/30550113/30550113/17707
|
||||
|
||||
#EXTINF:-1 tvg-chno="44" tvg-id="ESPNU.HD.us2" tvg-name="ESPN U" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s45654_dark_360w_270h.png" group-title="TV",ESPN U
|
||||
http://cord-cutter.net:8080/30550113/30550113/10255
|
||||
|
||||
#EXTINF:-1 tvg-chno="45" tvg-id="ESPN2.HD.us2" tvg-name="ESPN2" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s12444_dark_360w_270h.png" group-title="TV",ESPN2
|
||||
http://cord-cutter.net:8080/30550113/30550113/2210
|
||||
|
||||
#EXTINF:-1 tvg-chno="46" tvg-id="FanDuel.Sports.Network.Detroit.24/7.HDTV.us" tvg-name="FDSN Detroit" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s54286_dark_360w_270h.png" group-title="TV",FDSN Detroit
|
||||
http://cord-cutter.net:8080/30550113/30550113/20936
|
||||
|
||||
#EXTINF:-1 tvg-chno="47" tvg-id="FanDuel.Sports.Network.Florida.HDTV.(Out.of.Market).us" tvg-name="FDSN Florida" tvg-logo="https://i.gyazo.com/fad701fbaaafe161b13b23ed9b50179b.png" group-title="TV",FDSN Florida
|
||||
http://cord-cutter.net:8080/30550113/30550113/46794
|
||||
|
||||
#EXTINF:-1 tvg-chno="48" tvg-id="FanDuel.Sports.Network.Midwest.24/7.HDTV.us" tvg-name="FDSN Midwest" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11058_dark_360w_270h.png" group-title="TV",FDSN Midwest
|
||||
http://cord-cutter.net:8080/30550113/30550113/66795
|
||||
|
||||
#EXTINF:-1 tvg-chno="49" tvg-id="FanDuel.Sports.Network.North.HDTV.us" tvg-name="FDSN North" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10977_dark_360w_270h.png" group-title="TV",FDSN North
|
||||
http://cord-cutter.net:8080/30550113/30550113/58827
|
||||
|
||||
#EXTINF:-1 tvg-chno="50" tvg-id="FanDuel.Sports.Network.Ohio.(Cleveland).HDTV.us" tvg-name="FDSN Ohio" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s49691_dark_360w_270h.png" group-title="TV",FDSN Ohio
|
||||
http://cord-cutter.net:8080/30550113/30550113/17752
|
||||
|
||||
#EXTINF:-1 tvg-chno="51" tvg-id="FanDuel.Sports.Network.Oklahoma.24/7.HDTV.(Tulsa).us" tvg-name="FDSN Oklahoma" tvg-logo="https://i.gyazo.com/80ad6fd142cd67f06eef58d9ce5aa72b.png" group-title="TV",FDSN Oklahoma
|
||||
http://cord-cutter.net:8080/30550113/30550113/20934
|
||||
|
||||
#EXTINF:-1 tvg-chno="52" tvg-id="FanDuel.Sports.Network.SoCal.HDTV.us" tvg-name="FDSN SoCal" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16743_dark_360w_270h.png" group-title="TV",FDSN SoCal
|
||||
http://cord-cutter.net:8080/30550113/30550113/221151
|
||||
|
||||
#EXTINF:-1 tvg-chno="53" tvg-id="FanDuel.Sports.Network.Southeast.HDTV.(Mont./Birm./Dothan/Mobile.AL).us" tvg-name="FDSN Southeast" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s20789_dark_360w_270h.png" group-title="TV",FDSN Southeast
|
||||
http://cord-cutter.net:8080/30550113/30550113/81111
|
||||
|
||||
#EXTINF:-1 tvg-chno="54" tvg-id="FanDuel.Sports.Network.Southwest.HDTV.24/7.(Main).us" tvg-name="FDSN Southwest" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s59629_dark_360w_270h.png" group-title="TV",FDSN Southwest
|
||||
http://cord-cutter.net:8080/30550113/30550113/21843
|
||||
|
||||
#EXTINF:-1 tvg-chno="55" tvg-id="FanDuel.Sports.Network.Sun.South.24/7.HDTV.(South.Marlins,.Rays,.Heat).us" tvg-name="FDSN Sun" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s61084_dark_360w_270h.png" group-title="TV",FDSN Sun
|
||||
http://cord-cutter.net:8080/30550113/30550113/104917
|
||||
|
||||
#EXTINF:-1 tvg-chno="56" tvg-id="FanDuel.Sports.Network.West.HDTV.us" tvg-name="FDSN West" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s59627_dark_360w_270h.png" group-title="TV",FDSN West
|
||||
http://cord-cutter.net:8080/30550113/30550113/20932
|
||||
|
||||
#EXTINF:-1 tvg-chno="57" tvg-id="FanDuel.Sports.Network.Wisconsin.24/7.HDTV.us" tvg-name="FDSN Wisconsin" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16348_dark_360w_270h.png" group-title="TV",FDSN Wisconsin
|
||||
http://cord-cutter.net:8080/30550113/30550113/78599
|
||||
|
||||
#EXTINF:-1 tvg-chno="58" tvg-id="plex.tv.FIFA+.plex" tvg-name="FIFA+ TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s136235_dark_360w_270h.png" group-title="TV",FIFA+ TV
|
||||
https://jmp2.uk/stvp-IN270000230
|
||||
|
||||
#EXTINF:-1 tvg-chno="59" tvg-id="Food.Network.HD.us2" tvg-name="Food Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s44718_dark_360w_270h.png" group-title="TV",Food Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/7323
|
||||
|
||||
#EXTINF:-1 tvg-chno="60" tvg-id="WFLX-DT.us_locals1" tvg-name="Fox" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s28719_dark_360w_270h.png" group-title="TV",Fox
|
||||
http://cord-cutter.net:8080/30550113/30550113/121595
|
||||
|
||||
#EXTINF:-1 tvg-chno="61" tvg-id="Fox.Business.HD.us2" tvg-name="Fox Business" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s58649_dark_360w_270h.png" group-title="TV",Fox Business
|
||||
http://cord-cutter.net:8080/30550113/30550113/17639
|
||||
|
||||
#EXTINF:-1 tvg-chno="62" tvg-id="Fox.News.Channel.HD.us2" tvg-name="Fox News" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16374_dark_360w_270h.png" group-title="TV",Fox News
|
||||
http://cord-cutter.net:8080/30550113/30550113/1818
|
||||
|
||||
#EXTINF:-1 tvg-chno="63" tvg-id="FS1.Fox.Sports.1.HD.us2" tvg-name="Fox Sports 1" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s82541_dark_360w_270h.png" group-title="TV",Fox Sports 1
|
||||
http://cord-cutter.net:8080/30550113/30550113/1846
|
||||
|
||||
#EXTINF:-1 tvg-chno="64" tvg-id="FS2.Fox.Sports.2.HD.us2" tvg-name="Fox Sports 2" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s33178_dark_360w_270h.png" group-title="TV",Fox Sports 2
|
||||
http://cord-cutter.net:8080/30550113/30550113/1847
|
||||
|
||||
#EXTINF:-1 tvg-chno="65" tvg-id="Freeform.HD.us2" tvg-name="Freeform TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10093_dark_360w_270h.png" group-title="TV",Freeform TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/13370
|
||||
|
||||
#EXTINF:-1 tvg-chno="66" tvg-id="Fuse.HD.us2" tvg-name="FUSE" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s14929_dark_360w_270h.png" group-title="TV",FUSE
|
||||
http://fl1.moveonjoy.com/FUSE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="67" tvg-id="FX.HD.us2" tvg-name="FX" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s14321_dark_360w_270h.png" group-title="TV",FX
|
||||
http://cord-cutter.net:8080/30550113/30550113/46690
|
||||
|
||||
#EXTINF:-1 tvg-chno="68" tvg-id="FX.Movie.Channel.HD.us2" tvg-name="FX Movie Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s70253_dark_360w_270h.png" group-title="TV",FX Movie Channel
|
||||
http://fl1.moveonjoy.com/FX_MOVIE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="69" tvg-id="FXX.HD.us2" tvg-name="FXX" tvg-logo="https://raw.githubusercontent.com/tv-logo/tv-logos/refs/heads/main/countries/united-states/fxx-us.png" group-title="TV",FXX
|
||||
http://cord-cutter.net:8080/30550113/30550113/46699
|
||||
|
||||
#EXTINF:-1 tvg-chno="70" tvg-id="FYI.Channel.HD.us2" tvg-name="FYI TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16834_dark_360w_270h.png" group-title="TV",FYI TV
|
||||
http://fl1.moveonjoy.com/FYI/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="71" tvg-id="Game.Show.Network.HD.us2" tvg-name="Game Show Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s14909_dark_360w_270h.png" group-title="TV",Game Show Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/120633
|
||||
|
||||
#EXTINF:-1 tvg-chno="72" tvg-id="get.us2" tvg-name="getTV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s82563_dark_360w_270h.png" group-title="TV",getTV
|
||||
http://cord-cutter.net:8080/30550113/30550113/18366
|
||||
|
||||
#EXTINF:-1 tvg-chno="73" tvg-id="Golf.Channel.HD.us2" tvg-name="Golf Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s14899_dark_360w_270h.png" group-title="TV",Golf Channel
|
||||
https://fl1.moveonjoy.com/GOLF/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="74" tvg-id="Grit.us2" tvg-name="Grit TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s89922_dark_360w_270h.png" group-title="TV",Grit TV
|
||||
http://fl1.moveonjoy.com/GRIT_TV/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="75" tvg-id="Hallmark.Channel.HD.us2" tvg-name="Hallmark Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11221_dark_360w_270h.png" group-title="TV",Hallmark Channel
|
||||
http://fl1.moveonjoy.com/HALLMARK_CHANNEL/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="76" tvg-id="Hallmark.Family.us2" tvg-name="Hallmark Family" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s105723_dark_360w_270h.png" group-title="TV",Hallmark Family
|
||||
https://fl1.moveonjoy.com/HALLMARK_DRAMA/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="77" tvg-id="Hallmark.Mystery.HD.us2" tvg-name="Hallmark Mystery" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s61522_dark_360w_270h.png" group-title="TV",Hallmark Mystery
|
||||
https://fl1.moveonjoy.com/HALLMARK_MOVIES_MYSTERIES/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="78" tvg-id="HBO.East.us2" tvg-name="HBO" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10240_dark_360w_270h.png" group-title="TV",HBO
|
||||
http://cord-cutter.net:8080/30550113/30550113/46713
|
||||
|
||||
#EXTINF:-1 tvg-chno="79" tvg-id="HBO2.HD.us2" tvg-name="HBO 2" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s68140_dark_360w_270h.png" group-title="TV",HBO 2
|
||||
http://fl1.moveonjoy.com/HBO_2/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="80" tvg-id="HBO.Comedy.HD.us2" tvg-name="HBO Comedy" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s59839_dark_360w_270h.png" group-title="TV",HBO Comedy
|
||||
http://fl1.moveonjoy.com/HBO_COMEDY/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="81" tvg-id="HBO.Drama.us2" tvg-name="HBO Family" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s34879_dark_360w_270h.png" group-title="TV",HBO Family
|
||||
https://fl1.moveonjoy.com/HBO_FAMILY/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="82" tvg-id="HBO.Zone.HD.us2" tvg-name="HBO Zone" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18431_dark_360w_270h.png" group-title="TV",HBO Zone
|
||||
https://fl1.moveonjoy.com/HBO_ZONE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="83" tvg-id="History.HD.us2" tvg-name="History Channel" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s87679_dark_360w_270h.png" group-title="TV",History Channel
|
||||
http://cord-cutter.net:8080/30550113/30550113/15017
|
||||
|
||||
#EXTINF:-1 tvg-chno="84" tvg-id="HLN.HD.us2" tvg-name="HLN TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10145_dark_360w_270h.png" group-title="TV",HLN TV
|
||||
https://turnerlive.warnermediacdn.com/hls/live/586496/cnngo/hln/VIDEO_0_3564000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="85" tvg-id="Investigation.Discovery.HD.us2" tvg-name="Investigation Discovery" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16615_dark_360w_270h.png" group-title="TV",Investigation Discovery
|
||||
https://fl1.moveonjoy.com/INVESTIGATION_DISCOVERY/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="86" tvg-id="ION.Television.HD.us2" tvg-name="ION TV" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18633_dark_360w_270h.png" group-title="TV",ION TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/9297
|
||||
|
||||
#EXTINF:-1 tvg-chno="87" tvg-id="Lifetime.HD.us2" tvg-name="Lifetime" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10918_dark_360w_270h.png" group-title="TV",Lifetime
|
||||
http://fl1.moveonjoy.com/LIFETIME/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="88" tvg-id="LMN.HD.us2" tvg-name="Lifetime Movie Network" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18480_dark_360w_270h.png" group-title="TV",Lifetime Movie Network
|
||||
https://fl1.moveonjoy.com/LIFETIME_MOVIE_NETWORK/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="89" tvg-id="Marquee.Sports.Network.HD.us2" tvg-name="Marquee Sports Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s113768_dark_360w_270h.png" group-title="TV",Marquee Sports Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/13379
|
||||
|
||||
#EXTINF:-1 tvg-chno="90" tvg-id="MLB.Network.HD.us2" tvg-name="MLB Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s62081_dark_360w_270h.png" group-title="TV",MLB Network
|
||||
https://fl1.moveonjoy.com/MLB_NETWORK/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="91" tvg-id="MOTORTREND.HD.us2" tvg-name="MotorTrend TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s31046_dark_360w_270h.png" group-title="TV",MotorTrend TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/10399
|
||||
|
||||
#EXTINF:-1 tvg-chno="92" tvg-id="MSG.National.us2" tvg-name="MSG" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10979_dark_360w_270h.png" group-title="TV",MSG
|
||||
http://cord-cutter.net:8080/30550113/30550113/21090
|
||||
|
||||
#EXTINF:-1 tvg-chno="93" tvg-id="MSNBC.HD.us2" tvg-name="MSNBC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16300_dark_360w_270h.png" group-title="TV",MSNBC
|
||||
https://fl1.moveonjoy.com/MSNBC/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="94" tvg-id="MTV.-.Music.Television.HD.us2" tvg-name="MTV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10986_dark_360w_270h.png" group-title="TV",MTV
|
||||
http://fl1.moveonjoy.com/MTV/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="95" tvg-id="National.Geographic.HD.us2" tvg-name="National Geographic" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s49438_dark_360w_270h.png" group-title="TV",National Geographic
|
||||
http://fl1.moveonjoy.com/National_Geographic/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="96" tvg-id="NBA.TV.HD.us2" tvg-name="NBA TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s32281_dark_360w_270h.png" group-title="TV",NBA TV
|
||||
http://fl1.moveonjoy.com/NBA_TV/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="97" tvg-id="WFLA-DT.us_locals1" tvg-name="NBC" tvg-logo="https://i.gyazo.com/39d42952500205d04e448247b7889256.png" group-title="TV",NBC
|
||||
https://fl1.moveonjoy.com/FL_Tampa_NBC/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="98" tvg-id="NBC.Sports.Bay.Area.HD.us2" tvg-name="NBC Sports Bay Area" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s63138_dark_360w_270h.png" group-title="TV",NBC Sports Bay Area
|
||||
http://cord-cutter.net:8080/30550113/30550113/9900
|
||||
|
||||
#EXTINF:-1 tvg-chno="99" tvg-id="NBC.Sports.Boston.HD.us2" tvg-name="NBC Sports Boston" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s49198_dark_360w_270h.png" group-title="TV",NBC Sports Boston
|
||||
http://cord-cutter.net:8080/30550113/30550113/20939
|
||||
|
||||
#EXTINF:-1 tvg-chno="100" tvg-id="NBC.Sports.California.SAT.us2" tvg-name="NBC Sports California" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s45540_dark_360w_270h.png" group-title="TV",NBC Sports California
|
||||
http://cord-cutter.net:8080/30550113/30550113/20940
|
||||
|
||||
#EXTINF:-1 tvg-chno="101" tvg-id="a90a91570ce0536cbb22b591ad7e0da2" tvg-name="NBC Sports NOW" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s114140_dark_360w_270h.png" group-title="TV",NBC Sports NOW
|
||||
https://jmp2.uk/plu-6549306c83595c000815a696.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="102" tvg-id="NBC.Sports.Philadelphia.HD.us2" tvg-name="NBC Sports Philadelphia" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s32571_dark_360w_270h.png" group-title="TV",NBC Sports Philadelphia
|
||||
http://cord-cutter.net:8080/30550113/30550113/20943
|
||||
|
||||
#EXTINF:-1 tvg-chno="103" tvg-id="New.England.Sports.Network.HD.us2" tvg-name="NESN" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s35038_dark_360w_270h.png" group-title="TV",NESN
|
||||
http://cord-cutter.net:8080/30550113/30550113/31637
|
||||
|
||||
#EXTINF:-1 tvg-chno="104" tvg-id="NewsNation.us2" tvg-name="NewsNation" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s91096_dark_360w_270h.png" group-title="TV",NewsNation
|
||||
http://cord-cutter.net:8080/30550113/30550113/161450
|
||||
|
||||
#EXTINF:-1 tvg-chno="105" tvg-id="NFL.Network.HD.us2" tvg-name="NFL Network" tvg-logo="https://raw.githubusercontent.com/tv-logo/tv-logos/refs/heads/main/countries/united-states/nfl-network-hz-us.png" group-title="TV",NFL Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/159117
|
||||
|
||||
#EXTINF:-1 tvg-chno="106" tvg-id="NFL.RedZone.HD.us2" tvg-name="NFL RedZone" tvg-logo="https://raw.githubusercontent.com/tv-logo/tv-logos/refs/heads/main/countries/united-states/nfl-red-zone-hz-us.png" group-title="TV",NFL RedZone
|
||||
http://cord-cutter.net:8080/30550113/30550113/208830
|
||||
|
||||
#EXTINF:-1 tvg-chno="107" tvg-id="NHL.Network.HD.us2" tvg-name="NHL Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s58570_dark_360w_270h.png" group-title="TV",NHL Network
|
||||
http://23.237.104.106:8080/USA_NHL_NETWORK/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="108" tvg-id="Nickelodeon.HD.us2" tvg-name="Nickelodeon" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11006_dark_360w_270h.png" group-title="TV",Nickelodeon
|
||||
http://cord-cutter.net:8080/30550113/30550113/38
|
||||
|
||||
#EXTINF:-1 tvg-chno="109" tvg-id="Nicktoons.us2" tvg-name="Nicktoons" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s30420_dark_360w_270h.png" group-title="TV",Nicktoons
|
||||
http://cord-cutter.net:8080/30550113/30550113/36
|
||||
|
||||
#EXTINF:-1 tvg-chno="110" tvg-id="Outdoor.Channel.HD.us2" tvg-name="Outdoor Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s14776_dark_360w_270h.png" group-title="TV",Outdoor Channel
|
||||
http://fl1.moveonjoy.com/OUTDOOR_CHANNEL/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="111" tvg-id="Ovation.HD.us2" tvg-name="Ovation" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s15807_dark_360w_270h.png" group-title="TV",Ovation
|
||||
https://fl1.moveonjoy.com/Ovation/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="112" tvg-id="Oxygen.True.Crime.HD.us2" tvg-name="Oxygen" tvg-logo="https://i.gyazo.com/68db5ec4dba40181592406e7c3128fc7.png" group-title="TV",Oxygen
|
||||
https://fl1.moveonjoy.com/OXYGEN/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="113" tvg-id="Paramount.Network.HD.us2" tvg-name="Paramount Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11163_dark_360w_270h.png" group-title="TV",Paramount Network
|
||||
https://fl1.moveonjoy.com/PARAMOUNT_NETWORK/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="114" tvg-id="POP.HD.us2" tvg-name="Pop TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16715_dark_360w_270h.png" group-title="TV",Pop TV
|
||||
https://fl1.moveonjoy.com/Pop_TV/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="115" tvg-id="Premier.Sports.1.HD.uk" tvg-name="Premier Sports 1" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s79027_dark_360w_270h.png" group-title="TV",Premier Sports 1
|
||||
http://c3921155.edmonst.net/iptv/ZQHGFQ9PRYN859UHYGWY674B/2160/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="116" tvg-id="ReelzChannel.HD.us2" tvg-name="Reelz Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s68385_dark_360w_270h.png" group-title="TV",Reelz Channel
|
||||
http://cord-cutter.net:8080/30550113/30550113/10526
|
||||
|
||||
#EXTINF:-1 tvg-chno="117" tvg-id="ROOT.Sports.Northwest.HD.us2" tvg-name="Root Sports" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11062_dark_360w_270h.png" group-title="TV",Root Sports
|
||||
http://cord-cutter.net:8080/30550113/30550113/85232
|
||||
|
||||
#EXTINF:-1 tvg-chno="118" tvg-id="SEC.Network.HD.us2" tvg-name="SEC Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s89535_dark_360w_270h.png" group-title="TV",SEC Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/17608
|
||||
|
||||
#EXTINF:-1 tvg-chno="119" tvg-id="Paramount+.with.Showtime.HD.us2" tvg-name="Showtime" tvg-logo="https://raw.githubusercontent.com/tv-logo/tv-logos/refs/heads/main/countries/united-states/showtime-us.png" group-title="TV",Showtime
|
||||
http://fl1.moveonjoy.com/SHOWTIME/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="120" tvg-id="Smithsonian.HD.Network.us2" tvg-name="Smithsonian Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s58532_dark_360w_270h.png" group-title="TV",Smithsonian Channel
|
||||
http://fl1.moveonjoy.com/SMITHSONIAN_CHANNEL/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="121" tvg-id="Sony.Movie.Channel.HD.us2" tvg-name="Sony Movie Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s69130_dark_360w_270h.png" group-title="TV",Sony Movie Channel
|
||||
http://fl1.moveonjoy.com/Sony_Movie_Channel/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="122" tvg-id="Space.City.Home.Network.HD.us2" tvg-name="Space City Home Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s77744_dark_360w_270h.png" group-title="TV",Space City Home Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/213668
|
||||
|
||||
#EXTINF:-1 tvg-chno="123" tvg-id="Spectrum.SportsNet.LA.Dodgers.HD.us2" tvg-name="Spectrum SportsNet LA Dodgers" tvg-logo="https://i.gyazo.com/765cce528ddda366695bb178d9dee6da.png" group-title="TV",Spectrum SportsNet LA Dodgers
|
||||
http://cord-cutter.net:8080/30550113/30550113/31636
|
||||
|
||||
#EXTINF:-1 tvg-chno="124" tvg-id="Spectrum.SportsNet.Lakers.HD.us2" tvg-name="Spectrum SportsNet Lakers" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s77422_dark_360w_270h.png" group-title="TV",Spectrum SportsNet Lakers
|
||||
http://cord-cutter.net:8080/30550113/30550113/20946
|
||||
|
||||
#EXTINF:-1 tvg-chno="125" tvg-id="Sportsnet.360.HD.ca2" tvg-name="Sportsnet 360" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s49952_dark_360w_270h.png" group-title="TV",Sportsnet 360
|
||||
http://cord-cutter.net:8080/30550113/30550113/57299
|
||||
|
||||
#EXTINF:-1 tvg-chno="126" tvg-id="Sportsnet.East.ca2" tvg-name="Sportsnet East" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s62109_dark_360w_270h.png" group-title="TV",Sportsnet East
|
||||
http://cord-cutter.net:8080/30550113/30550113/57298
|
||||
|
||||
#EXTINF:-1 tvg-chno="127" tvg-id="SNY.SportsNet.New.York.HD.us2" tvg-name="SportsNet New York" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s50038_dark_360w_270h.png" group-title="TV",SportsNet New York
|
||||
http://cord-cutter.net:8080/30550113/30550113/20938
|
||||
|
||||
#EXTINF:-1 tvg-chno="128" tvg-id="Sportsnet.One.ca2" tvg-name="Sportsnet One" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s68859_dark_360w_270h.png" group-title="TV",Sportsnet One
|
||||
http://cord-cutter.net:8080/30550113/30550113/10247
|
||||
|
||||
#EXTINF:-1 tvg-chno="129" tvg-id="Sportsnet.Ontario.HD.ca2" tvg-name="Sportsnet Ontario" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s62111_dark_360w_270h.png" group-title="TV",Sportsnet Ontario
|
||||
http://cord-cutter.net:8080/30550113/30550113/11649
|
||||
|
||||
#EXTINF:-1 tvg-chno="130" tvg-id="SportsNet.Pittsburgh.HD.us2" tvg-name="SportsNet Pittsburgh" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s26028_dark_360w_270h.png" group-title="TV",SportsNet Pittsburgh
|
||||
http://cord-cutter.net:8080/30550113/30550113/108178
|
||||
|
||||
#EXTINF:-1 tvg-chno="131" tvg-id="Starz.HD.us2" tvg-name="Starz" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s12719_dark_360w_270h.png" group-title="TV",Starz
|
||||
http://cord-cutter.net:8080/30550113/30550113/9299
|
||||
|
||||
#EXTINF:-1 tvg-chno="132" tvg-id="Syfy.HD.us2" tvg-name="Syfy" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11097_dark_360w_270h.png" group-title="TV",Syfy
|
||||
http://cord-cutter.net:8080/30550113/30550113/46685
|
||||
|
||||
#EXTINF:-1 tvg-chno="133" tvg-id="TBS.HD.us2" tvg-name="TBS" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11867_dark_360w_270h.png" group-title="TV",TBS
|
||||
https://turnerlive.warnermediacdn.com/hls/live/2023172/tbseast/slate/VIDEO_0_3564000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="134" tvg-id="Tennis.Channel.HD.us2" tvg-name="Tennis Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s60316_dark_360w_270h.png" group-title="TV",Tennis Channel
|
||||
https://fl1.moveonjoy.com/TENNIS_CHANNEL/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="135" tvg-id="The.Weather.Channel.HD.us2" tvg-name="The Weather Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s58812_dark_360w_270h.png" group-title="TV",The Weather Channel
|
||||
http://cord-cutter.net:8080/30550113/30550113/18926
|
||||
|
||||
#EXTINF:-1 tvg-chno="136" tvg-id="TLC.HD.(US).us2" tvg-name="TLC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11158_dark_360w_270h.png" group-title="TV",TLC
|
||||
http://cord-cutter.net:8080/30550113/30550113/12734
|
||||
|
||||
#EXTINF:-1 tvg-chno="137" tvg-id="TNT.HD.us2" tvg-name="TNT" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11164_dark_360w_270h.png" group-title="TV",TNT
|
||||
https://turnerlive.warnermediacdn.com/hls/live/2023168/tnteast/slate/VIDEO_0_3564000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="138" tvg-id="truTV.HD.us2" tvg-name="truTV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10153_dark_360w_270h.png" group-title="TV",truTV
|
||||
https://turnerlive.warnermediacdn.com/hls/live/2023176/trueast/slate/VIDEO_0_3564000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="139" tvg-id="TSN.1.ca2" tvg-name="TSN1" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11182_dark_360w_270h.png" group-title="TV",TSN1
|
||||
http://cord-cutter.net:8080/30550113/30550113/57292
|
||||
|
||||
#EXTINF:-1 tvg-chno="140" tvg-id="TSN.2.ca2" tvg-name="TSN2" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s61474_dark_360w_270h.png" group-title="TV",TSN2
|
||||
http://cord-cutter.net:8080/30550113/30550113/47442
|
||||
|
||||
#EXTINF:-1 tvg-chno="141" tvg-id="Turner.Classic.Movies.HD.us2" tvg-name="Turner Classic Movies" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s12852_dark_360w_270h.png" group-title="TV",Turner Classic Movies
|
||||
https://turnerlive.warnermediacdn.com/hls/live/2023186/tcmeast/noslate/VIDEO_1_5128000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="142" tvg-id="TV.Land.HD.us2" tvg-name="TV Land" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16123_dark_360w_270h.png" group-title="TV",TV Land
|
||||
https://fl1.moveonjoy.com/TV_LAND/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="143" tvg-id="TV.ONE.HD.us2" tvg-name="TV One" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s35513_dark_360w_270h.png" group-title="TV",TV One
|
||||
https://fl1.moveonjoy.com/TV_ONE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="144" tvg-id="USA.Network.HD.us2" tvg-name="USA East" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11207_dark_360w_270h.png" group-title="TV",USA East
|
||||
http://cord-cutter.net:8080/30550113/30550113/10252
|
||||
|
||||
#EXTINF:-1 tvg-chno="145" tvg-id="Vice.HD.us2" tvg-name="Vice TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18822_dark_360w_270h.png" group-title="TV",Vice TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/46697
|
||||
|
||||
#EXTINF:-1 tvg-chno="146" tvg-id="Willow.Cricket.HD.us2" tvg-name="Willow Cricket" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s68605_dark_360w_270h.png" group-title="TV",Willow Cricket
|
||||
http://cord-cutter.net:8080/30550113/30550113/41979
|
||||
|
||||
#EXTINF:-1 tvg-chno="147" tvg-id="Yes.Network.us2" tvg-name="YES Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s30017_dark_360w_270h.png" group-title="TV",YES Network
|
||||
https://fl1.moveonjoy.com/YES_NETWORK/index.m3u8
|
||||
1039
M3U8/events.m3u8
Normal file
1039
M3U8/events.m3u8
Normal file
File diff suppressed because it is too large
Load diff
138
M3U8/fetch.py
Normal file
138
M3U8/fetch.py
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
#!/usr/bin/env python3
|
||||
import asyncio
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from scrapers import (
|
||||
fawa,
|
||||
lotus,
|
||||
pixel,
|
||||
ppv,
|
||||
roxie,
|
||||
shark,
|
||||
sport9,
|
||||
streambtw,
|
||||
streamcenter,
|
||||
streamfree,
|
||||
streamsgate,
|
||||
strmd,
|
||||
tvpass,
|
||||
watchfooty,
|
||||
webcast,
|
||||
)
|
||||
from scrapers.utils import get_logger, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
BASE_FILE = Path(__file__).parent / "base.m3u8"
|
||||
|
||||
EVENTS_FILE = Path(__file__).parent / "events.m3u8"
|
||||
|
||||
COMBINED_FILE = Path(__file__).parent / "TV.m3u8"
|
||||
|
||||
|
||||
def load_base() -> tuple[list[str], int]:
|
||||
log.info("Fetching base M3U8")
|
||||
|
||||
data = BASE_FILE.read_text(encoding="utf-8")
|
||||
|
||||
pattern = re.compile(r'tvg-chno="(\d+)"')
|
||||
|
||||
last_chnl_num = max(map(int, pattern.findall(data)), default=0)
|
||||
|
||||
return data.splitlines(), last_chnl_num
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
base_m3u8, tvg_chno = load_base()
|
||||
|
||||
tasks = [
|
||||
asyncio.create_task(fawa.scrape(network.client)),
|
||||
asyncio.create_task(lotus.scrape(network.client)),
|
||||
asyncio.create_task(pixel.scrape()),
|
||||
asyncio.create_task(ppv.scrape(network.client)),
|
||||
asyncio.create_task(roxie.scrape(network.client)),
|
||||
asyncio.create_task(shark.scrape(network.client)),
|
||||
asyncio.create_task(sport9.scrape(network.client)),
|
||||
asyncio.create_task(streambtw.scrape(network.client)),
|
||||
asyncio.create_task(streamcenter.scrape(network.client)),
|
||||
asyncio.create_task(streamfree.scrape(network.client)),
|
||||
asyncio.create_task(streamsgate.scrape(network.client)),
|
||||
asyncio.create_task(strmd.scrape(network.client)),
|
||||
asyncio.create_task(tvpass.scrape(network.client)),
|
||||
asyncio.create_task(watchfooty.scrape(network.client)),
|
||||
asyncio.create_task(webcast.scrape(network.client)),
|
||||
]
|
||||
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
additions = (
|
||||
fawa.urls
|
||||
| lotus.urls
|
||||
| pixel.urls
|
||||
| ppv.urls
|
||||
| roxie.urls
|
||||
| shark.urls
|
||||
| sport9.urls
|
||||
| streambtw.urls
|
||||
| streamcenter.urls
|
||||
| strmd.urls
|
||||
| streamfree.urls
|
||||
| streamsgate.urls
|
||||
| tvpass.urls
|
||||
| watchfooty.urls
|
||||
| webcast.urls
|
||||
)
|
||||
|
||||
live_events: list[str] = []
|
||||
|
||||
combined_channels: list[str] = []
|
||||
|
||||
for i, (event, info) in enumerate(
|
||||
sorted(additions.items()),
|
||||
start=1,
|
||||
):
|
||||
extinf_all = (
|
||||
f'#EXTINF:-1 tvg-chno="{tvg_chno + i}" tvg-id="{info["id"]}" '
|
||||
f'tvg-name="{event}" tvg-logo="{info["logo"]}" group-title="Live Events",{event}'
|
||||
)
|
||||
|
||||
extinf_live = (
|
||||
f'#EXTINF:-1 tvg-chno="{i}" tvg-id="{info["id"]}" '
|
||||
f'tvg-name="{event}" tvg-logo="{info["logo"]}" group-title="Live Events",{event}'
|
||||
)
|
||||
|
||||
vlc_block = [
|
||||
f'#EXTVLCOPT:http-referrer={info["base"]}',
|
||||
f'#EXTVLCOPT:http-origin={info["base"]}',
|
||||
f"#EXTVLCOPT:http-user-agent={network.UA}",
|
||||
info["url"],
|
||||
]
|
||||
|
||||
combined_channels.extend(["\n" + extinf_all, *vlc_block])
|
||||
|
||||
live_events.extend(["\n" + extinf_live, *vlc_block])
|
||||
|
||||
COMBINED_FILE.write_text(
|
||||
"\n".join(base_m3u8 + combined_channels),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
log.info(f"Base + Events saved to {COMBINED_FILE.resolve()}")
|
||||
|
||||
EVENTS_FILE.write_text(
|
||||
'#EXTM3U url-tvg="https://raw.githubusercontent.com/doms9/iptv/refs/heads/default/EPG/TV.xml"\n'
|
||||
+ "\n".join(live_events),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
log.info(f"Events saved to {EVENTS_FILE.resolve()}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
||||
try:
|
||||
asyncio.run(network.client.aclose())
|
||||
except Exception:
|
||||
pass
|
||||
160
M3U8/scrapers/fawa.py
Normal file
160
M3U8/scrapers/fawa.py
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
import re
|
||||
from functools import partial
|
||||
from urllib.parse import quote, urljoin
|
||||
|
||||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("fawa.json", exp=10_800)
|
||||
|
||||
BASE_URL = "http://www.fawanews.sc/"
|
||||
|
||||
TAG = "FAWA"
|
||||
|
||||
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
url_num: int,
|
||||
) -> str | None:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
|
||||
return
|
||||
|
||||
valid_m3u8 = re.compile(
|
||||
r'var\s+(\w+)\s*=\s*\[["\']?(https?:\/\/[^"\'\s>]+\.m3u8(?:\?[^"\'\s>]*)?)["\']\]?',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
if not (match := valid_m3u8.search(r.text)):
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
return
|
||||
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return match[2]
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
cached_hrefs: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
try:
|
||||
r = await client.get(BASE_URL)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{BASE_URL}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
valid_event = re.compile(r"\d{1,2}:\d{1,2}")
|
||||
clean_event = re.compile(r"\s+-+\s+\w{1,4}")
|
||||
|
||||
events = []
|
||||
|
||||
for item in soup.css(".user-item"):
|
||||
text = item.css_first(".user-item__name")
|
||||
subtext = item.css_first(".user-item__playing")
|
||||
link = item.css_first("a[href]")
|
||||
|
||||
if not (href := link.attributes.get("href")):
|
||||
continue
|
||||
|
||||
href = quote(href)
|
||||
|
||||
if cached_hrefs & {href}:
|
||||
continue
|
||||
|
||||
if not (text and subtext):
|
||||
continue
|
||||
|
||||
event_name, details = text.text(strip=True), subtext.text(strip=True)
|
||||
|
||||
if not (valid_event.search(details)):
|
||||
continue
|
||||
|
||||
sport = valid_event.split(details)[0].strip()
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": clean_event.sub("", event_name),
|
||||
"link": urljoin(BASE_URL, href),
|
||||
"href": href,
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_hrefs = {entry["href"] for entry in cached_urls.values()}
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client, cached_hrefs)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
client=client,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["link"],
|
||||
)
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": now,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"href": ev["href"],
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
168
M3U8/scrapers/lotus.py
Normal file
168
M3U8/scrapers/lotus.py
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
from functools import partial
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("lotus.json", exp=5_400)
|
||||
|
||||
API_CACHE = Cache("lotus-api.json", exp=28_800)
|
||||
|
||||
BASE_URL = "https://lotusgamehd.xyz/api-event.php"
|
||||
|
||||
TAG = "LOTUS"
|
||||
|
||||
|
||||
def fix_league(s: str) -> str:
|
||||
return " ".join(x.capitalize() for x in s.split()) if len(s) > 5 else s.upper()
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
ts: float,
|
||||
) -> dict[str, dict[str, str]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
data = r.json()
|
||||
|
||||
data["timestamp"] = ts
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.now()
|
||||
|
||||
if not (api_data := API_CACHE.load(per_entry=False)):
|
||||
api_data = await refresh_api_cache(
|
||||
client,
|
||||
url,
|
||||
now.timestamp(),
|
||||
)
|
||||
|
||||
API_CACHE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
for info in api_data.get("days", []):
|
||||
event_dt = Time.from_str(info["day_et"], timezone="ET")
|
||||
|
||||
if now.date() != event_dt.date():
|
||||
continue
|
||||
|
||||
for event in info["items"]:
|
||||
event_league = event["league"]
|
||||
|
||||
if event_league == "channel tv":
|
||||
continue
|
||||
|
||||
event_streams: list[dict[str, str]] = event["streams"]
|
||||
|
||||
if not (event_link := event_streams[0].get("link")):
|
||||
continue
|
||||
|
||||
sport = fix_league(event_league)
|
||||
event_name = event["title"]
|
||||
|
||||
key = f"[{sport}] {event_name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": event_name,
|
||||
"link": event_link,
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
BASE_URL,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": "https://vividmosaica.com/",
|
||||
"timestamp": now,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
181
M3U8/scrapers/old/fstv.py
Normal file
181
M3U8/scrapers/old/fstv.py
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
from functools import partial
|
||||
from urllib.parse import unquote, urljoin
|
||||
|
||||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("fstv.json", exp=10_800)
|
||||
|
||||
MIRRORS = ["https://fstv.zip", "https://fstv.space"]
|
||||
|
||||
TAG = "FSTV"
|
||||
|
||||
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
url_num: int,
|
||||
) -> tuple[str, str]:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
|
||||
|
||||
return "", ""
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
match_name = None
|
||||
|
||||
if category_links := soup.css(".common-list-category .category-item a"):
|
||||
match_name = category_links[-1].text(strip=True)
|
||||
|
||||
if not match_name or match_name.lower() == "vs":
|
||||
if og_title := soup.css_first("meta[property='og:title']"):
|
||||
match_name = (
|
||||
og_title.attributes.get("content", "").split(" start on")[0].strip()
|
||||
)
|
||||
|
||||
if not (ifr := soup.css_first("iframe")):
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
return "", ""
|
||||
|
||||
if src := ifr.attributes.get("src"):
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return match_name or "", unquote(src).split("link=")[-1]
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
cached_hrefs: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = []
|
||||
|
||||
for wrpr in soup.css("div.fixtures-live-wrapper"):
|
||||
for league_block in wrpr.css(".match-table-item > .league-info-wrapper"):
|
||||
if not (
|
||||
league_name_el := league_block.css_first(".league-info a.league-name")
|
||||
):
|
||||
continue
|
||||
|
||||
full_text = league_name_el.text(strip=True)
|
||||
|
||||
if "]" in full_text:
|
||||
event_name = full_text.split("]", 1)[1].strip()
|
||||
|
||||
else:
|
||||
event_name = full_text
|
||||
|
||||
parent_item = league_block.parent
|
||||
|
||||
for game in parent_item.css(".common-table-row a[href*='/match/']"):
|
||||
if not (href := game.attributes.get("href")):
|
||||
continue
|
||||
|
||||
if cached_hrefs & {href}:
|
||||
continue
|
||||
|
||||
cached_hrefs.add(href)
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": event_name,
|
||||
"link": urljoin(url, href),
|
||||
"href": href,
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_hrefs = {entry["href"] for entry in cached_urls.values()}
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working FSTV mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
base_url,
|
||||
cached_hrefs,
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
client=client,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
)
|
||||
|
||||
match_name, url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport = ev["sport"]
|
||||
|
||||
key = (
|
||||
f"[{sport}] {match_name} ({TAG})"
|
||||
if match_name
|
||||
else f"[{sport}] ({TAG})"
|
||||
)
|
||||
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": base_url,
|
||||
"timestamp": now,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"href": ev["href"],
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
||||
|
||||
# cloudflare bot check added
|
||||
176
M3U8/scrapers/old/streameast.py
Normal file
176
M3U8/scrapers/old/streameast.py
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("streameast.json", exp=10_800)
|
||||
|
||||
prefixes = {
|
||||
"ga": None,
|
||||
"ph": None,
|
||||
"sg": None,
|
||||
"ch": None,
|
||||
"ec": None,
|
||||
"fi": None,
|
||||
"ms": None,
|
||||
"ps": None,
|
||||
"cf": None,
|
||||
"sk": None,
|
||||
"co": "the",
|
||||
"fun": "the",
|
||||
"ru": "the",
|
||||
"su": "the",
|
||||
}
|
||||
|
||||
MIRRORS = [
|
||||
*[f"https://streameast.{ext}" for ext in prefixes if not prefixes[ext]],
|
||||
*[f"https://thestreameast.{ext}" for ext in prefixes if prefixes[ext] == "the"],
|
||||
]
|
||||
|
||||
TAG = "STRMEST"
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = []
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for section in soup.css("div.se-sport-section"):
|
||||
if not (sport := section.attributes.get("data-sport-name", "").strip()):
|
||||
continue
|
||||
|
||||
for a in section.css("a.uefa-card"):
|
||||
if not (href := a.attributes.get("href")):
|
||||
continue
|
||||
|
||||
link = urljoin(url, href)
|
||||
|
||||
team_spans = [t.text(strip=True) for t in a.css("span.uefa-name")]
|
||||
|
||||
if len(team_spans) == 2:
|
||||
name = f"{team_spans[0]} vs {team_spans[1]}"
|
||||
|
||||
elif len(team_spans) == 1:
|
||||
name = team_spans[0]
|
||||
|
||||
else:
|
||||
continue
|
||||
|
||||
if not (time_span := a.css_first(".uefa-time")):
|
||||
continue
|
||||
|
||||
time_text = time_span.text(strip=True)
|
||||
|
||||
timestamp = int(a.attributes.get("data-time", Time.default_8()))
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
event_dt = Time.from_ts(timestamp)
|
||||
|
||||
if time_text == "LIVE" or (start_dt <= event_dt <= end_dt):
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": link,
|
||||
"timestamp": timestamp,
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working Streameast mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
base_url,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": "https://embedsports.top/",
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
100
M3U8/scrapers/pixel.py
Normal file
100
M3U8/scrapers/pixel.py
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
import json
|
||||
import re
|
||||
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("pixel.json", exp=19_800)
|
||||
|
||||
BASE_URL = "https://pixelsport.tv/backend/livetv/events"
|
||||
|
||||
TAG = "PIXL"
|
||||
|
||||
|
||||
async def get_api_data(url: str) -> dict[str, list[dict, str, str]]:
|
||||
async with async_playwright() as p:
|
||||
try:
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
await page.goto(
|
||||
url,
|
||||
wait_until="domcontentloaded",
|
||||
timeout=10_000,
|
||||
)
|
||||
|
||||
raw_json = await page.locator("pre").inner_text(timeout=5_000)
|
||||
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
finally:
|
||||
await browser.close()
|
||||
|
||||
return json.loads(raw_json)
|
||||
|
||||
|
||||
async def get_events() -> dict[str, dict[str, str | float]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
api_data = await get_api_data(BASE_URL)
|
||||
|
||||
events = {}
|
||||
|
||||
pattern = re.compile(r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?", re.IGNORECASE)
|
||||
|
||||
for event in api_data.get("events", []):
|
||||
event_dt = Time.from_str(event["date"], timezone="UTC")
|
||||
|
||||
if event_dt.date() != now.date():
|
||||
continue
|
||||
|
||||
event_name = event["match_name"]
|
||||
channel_info: dict[str, str] = event["channel"]
|
||||
category: dict[str, str] = channel_info["TVCategory"]
|
||||
|
||||
sport = category["name"]
|
||||
|
||||
stream_urls = [(i, f"server{i}URL") for i in range(1, 4)]
|
||||
|
||||
for z, stream_url in stream_urls:
|
||||
if stream_link := channel_info.get(stream_url):
|
||||
if pattern.search(stream_link):
|
||||
key = f"[{sport}] {event_name} {z} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event_name)
|
||||
|
||||
events[key] = {
|
||||
"url": stream_link,
|
||||
"logo": logo,
|
||||
"base": "https://pixelsport.tv",
|
||||
"timestamp": now.timestamp(),
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape() -> None:
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events()
|
||||
|
||||
urls.update(events)
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
|
||||
log.info(f"Collected and cached {len(urls)} new event(s)")
|
||||
177
M3U8/scrapers/ppv.py
Normal file
177
M3U8/scrapers/ppv.py
Normal file
|
|
@ -0,0 +1,177 @@
|
|||
from functools import partial
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("ppv.json", exp=10_800)
|
||||
|
||||
API_FILE = Cache("ppv-api.json", exp=19_800)
|
||||
|
||||
API_MIRRORS = [
|
||||
"https://old.ppv.to/api/streams",
|
||||
"https://api.ppvs.su/api/streams",
|
||||
"https://api.ppv.to/api/streams",
|
||||
]
|
||||
|
||||
BASE_MIRRORS = [
|
||||
"https://old.ppv.to",
|
||||
"https://ppvs.su",
|
||||
"https://ppv.to",
|
||||
]
|
||||
|
||||
TAG = "PPV"
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
) -> dict[str, dict[str, str]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
return r.json()
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
api_url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
if not (api_data := API_FILE.load(per_entry=False)):
|
||||
api_data = await refresh_api_cache(client, api_url)
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for stream_group in api_data.get("streams", []):
|
||||
sport = stream_group["category"]
|
||||
|
||||
if sport == "24/7 Streams":
|
||||
continue
|
||||
|
||||
for event in stream_group.get("streams", []):
|
||||
name = event.get("name")
|
||||
start_ts = event.get("starts_at")
|
||||
logo = event.get("poster")
|
||||
iframe = event.get("iframe")
|
||||
|
||||
if not (name and start_ts and iframe):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
event_dt = Time.from_ts(start_ts)
|
||||
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": iframe,
|
||||
"logo": logo,
|
||||
"timestamp": event_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
base_url = await network.get_base(BASE_MIRRORS)
|
||||
|
||||
api_url = await network.get_base(API_MIRRORS)
|
||||
|
||||
if not (base_url and api_url):
|
||||
log.warning("No working PPV mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
api_url,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
timeout=6,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, logo, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["logo"],
|
||||
ev["timestamp"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, pic = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo or pic,
|
||||
"base": base_url,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
215
M3U8/scrapers/roxie.py
Normal file
215
M3U8/scrapers/roxie.py
Normal file
|
|
@ -0,0 +1,215 @@
|
|||
import asyncio
|
||||
import re
|
||||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("roxie.json", exp=10_800)
|
||||
|
||||
HTML_CACHE = Cache("roxie-html.json", exp=19_800)
|
||||
|
||||
BASE_URL = "https://roxiestreams.live"
|
||||
|
||||
SPORT_ENDPOINTS = {
|
||||
"fighting": "Fighting",
|
||||
"mlb": "MLB",
|
||||
"motorsports": "Racing",
|
||||
"nba": "NBA",
|
||||
"nfl": "American Football",
|
||||
"soccer": "Soccer",
|
||||
}
|
||||
|
||||
TAG = "ROXIE"
|
||||
|
||||
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
url_num: int,
|
||||
) -> str | None:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
|
||||
return
|
||||
|
||||
valid_m3u8 = re.compile(
|
||||
r"showPlayer\(['\"]clappr['\"],\s*['\"]([^'\"]+?\.m3u8(?:\?[^'\"]*)?)['\"]\)",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
if not (match := valid_m3u8.search(r.text)):
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
return
|
||||
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return match[1]
|
||||
|
||||
|
||||
async def refresh_html_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
sport: str,
|
||||
now_ts: float,
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = {}
|
||||
|
||||
for row in soup.css("table#eventsTable tbody tr"):
|
||||
if not (a_tag := row.css_first("td a")):
|
||||
continue
|
||||
|
||||
event = a_tag.text(strip=True)
|
||||
|
||||
if not (href := a_tag.attributes.get("href")):
|
||||
continue
|
||||
|
||||
if not (span := row.css_first("span.countdown-timer")):
|
||||
continue
|
||||
|
||||
data_start = span.attributes["data-start"].rsplit(":", 1)[0]
|
||||
|
||||
event_dt = Time.from_str(data_start, timezone="PST")
|
||||
|
||||
event_sport = SPORT_ENDPOINTS[sport]
|
||||
|
||||
key = f"[{event_sport}] {event} ({TAG})"
|
||||
|
||||
events[key] = {
|
||||
"sport": event_sport,
|
||||
"event": event,
|
||||
"link": href,
|
||||
"event_ts": event_dt.timestamp(),
|
||||
"timestamp": now_ts,
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
sport_urls: dict[str, str],
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (events := HTML_CACHE.load()):
|
||||
log.info("Refreshing HTML cache")
|
||||
|
||||
tasks = [
|
||||
refresh_html_cache(
|
||||
client,
|
||||
url,
|
||||
sport,
|
||||
now.timestamp(),
|
||||
)
|
||||
for sport, url in sport_urls.items()
|
||||
]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
events = {k: v for data in results for k, v in data.items()}
|
||||
|
||||
HTML_CACHE.write(events)
|
||||
|
||||
live = []
|
||||
|
||||
start_ts = now.delta(minutes=-30).timestamp()
|
||||
end_ts = now.delta(minutes=30).timestamp()
|
||||
|
||||
for k, v in events.items():
|
||||
if cached_keys & {k}:
|
||||
continue
|
||||
|
||||
if not start_ts <= v["event_ts"] <= end_ts:
|
||||
continue
|
||||
|
||||
live.append({**v})
|
||||
|
||||
return live
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
sport_urls = {sport: urljoin(BASE_URL, sport) for sport in SPORT_ENDPOINTS}
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
sport_urls,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
client=client,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["event_ts"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
192
M3U8/scrapers/shark.py
Normal file
192
M3U8/scrapers/shark.py
Normal file
|
|
@ -0,0 +1,192 @@
|
|||
import re
|
||||
from functools import partial
|
||||
|
||||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("shark.json", exp=10_800)
|
||||
|
||||
HTML_CACHE = Cache("shark-html.json", exp=19_800)
|
||||
|
||||
BASE_URL = "https://sharkstreams.net"
|
||||
|
||||
TAG = "SHARK"
|
||||
|
||||
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
url_num: int,
|
||||
) -> str | None:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
|
||||
return
|
||||
|
||||
data: dict[str, list[str]] = r.json()
|
||||
|
||||
if not data.get("urls"):
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
|
||||
return
|
||||
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
|
||||
return data["urls"][0]
|
||||
|
||||
|
||||
async def refresh_html_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
now_ts: float,
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
|
||||
log.info("Refreshing HTML cache")
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
pattern = re.compile(r"openEmbed\('([^']+)'\)", re.IGNORECASE)
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = {}
|
||||
|
||||
for row in soup.css(".row"):
|
||||
date_node = row.css_first(".ch-date")
|
||||
sport_node = row.css_first(".ch-category")
|
||||
name_node = row.css_first(".ch-name")
|
||||
|
||||
if not (date_node and sport_node and name_node):
|
||||
continue
|
||||
|
||||
event_dt = Time.from_str(date_node.text(strip=True), timezone="EST")
|
||||
sport = sport_node.text(strip=True)
|
||||
event_name = name_node.text(strip=True)
|
||||
|
||||
embed_btn = row.css_first("a.hd-link.secondary")
|
||||
|
||||
if not embed_btn or not (onclick := embed_btn.attributes.get("onclick")):
|
||||
continue
|
||||
|
||||
if not (match := pattern.search(onclick)):
|
||||
continue
|
||||
|
||||
link = match[1].replace("player.php", "get-stream.php")
|
||||
|
||||
key = f"[{sport}] {event_name} ({TAG})"
|
||||
|
||||
events[key] = {
|
||||
"sport": sport,
|
||||
"event": event_name,
|
||||
"link": link,
|
||||
"event_ts": event_dt.timestamp(),
|
||||
"timestamp": now_ts,
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (events := HTML_CACHE.load()):
|
||||
events = await refresh_html_cache(
|
||||
client,
|
||||
BASE_URL,
|
||||
now.timestamp(),
|
||||
)
|
||||
|
||||
HTML_CACHE.write(events)
|
||||
|
||||
live = []
|
||||
|
||||
start_ts = now.delta(hours=-1).timestamp()
|
||||
end_ts = now.delta(minutes=10).timestamp()
|
||||
|
||||
for k, v in events.items():
|
||||
if cached_keys & {k}:
|
||||
continue
|
||||
|
||||
if not start_ts <= v["event_ts"] <= end_ts:
|
||||
continue
|
||||
|
||||
live.append({**v})
|
||||
|
||||
return live
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client, set(cached_urls.keys()))
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
client=client,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["event_ts"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
170
M3U8/scrapers/sport9.py
Normal file
170
M3U8/scrapers/sport9.py
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
import asyncio
|
||||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("sport9.json", exp=3_600)
|
||||
|
||||
BASE_URL = "https://sport9.ru"
|
||||
|
||||
TAG = "SPRT9"
|
||||
|
||||
|
||||
async def get_html(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
date: str,
|
||||
) -> bytes:
|
||||
|
||||
try:
|
||||
r = await client.get(url, params={"date": date})
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return b""
|
||||
|
||||
return r.content
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.now()
|
||||
|
||||
tasks = [
|
||||
get_html(client, BASE_URL, str(d.date()))
|
||||
for d in [
|
||||
now.delta(days=-1),
|
||||
now,
|
||||
now.delta(days=1),
|
||||
]
|
||||
]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
soups = [HTMLParser(html) for html in results]
|
||||
|
||||
events = []
|
||||
|
||||
for soup in soups:
|
||||
for card in soup.css("a.match-card"):
|
||||
live_badge = card.css_first(".live-badge")
|
||||
|
||||
if not live_badge or live_badge.text(strip=True) != "Live":
|
||||
continue
|
||||
|
||||
if not (sport_node := card.css_first(".tournament-name")):
|
||||
continue
|
||||
|
||||
sport = sport_node.text(strip=True)
|
||||
team_1_node = card.css_first(".team1 .team-name")
|
||||
team_2_node = card.css_first(".team2 .team-name")
|
||||
|
||||
if team_1_node and team_2_node:
|
||||
event = event = (
|
||||
f"{team_1_node.text(strip=True)} vs {team_2_node.text(strip=True)}"
|
||||
)
|
||||
|
||||
elif team_1_node:
|
||||
event = team_1_node.text(strip=True)
|
||||
|
||||
elif team_2_node:
|
||||
event = team_2_node.text(strip=True)
|
||||
|
||||
else:
|
||||
continue
|
||||
|
||||
if not (href := card.attributes.get("href")):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": event,
|
||||
"link": urljoin(BASE_URL, href),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client, set(cached_urls.keys()))
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": "https://vividmosaica.com/",
|
||||
"timestamp": now,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
141
M3U8/scrapers/streambtw.py
Normal file
141
M3U8/scrapers/streambtw.py
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
import base64
|
||||
import re
|
||||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("streambtw.json", exp=19_800)
|
||||
|
||||
BASE_URL = "https://streambtw.com"
|
||||
|
||||
TAG = "STRMBTW"
|
||||
|
||||
|
||||
def fix_league(s: str) -> str:
|
||||
pattern = re.compile(r"^\w*-\w*", re.IGNORECASE)
|
||||
|
||||
return " ".join(s.split("-")) if pattern.search(s) else s
|
||||
|
||||
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
url_num: int,
|
||||
) -> str | None:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
|
||||
return
|
||||
|
||||
valid_m3u8 = re.compile(r'var\s+(\w+)\s*=\s*"([^"]*)"', re.IGNORECASE)
|
||||
|
||||
if not (match := valid_m3u8.search(r.text)):
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
return
|
||||
|
||||
encoded = match[2][::-1]
|
||||
decoded = base64.b64decode(encoded[::-1]).decode("utf-8")
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return decoded
|
||||
|
||||
|
||||
async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
|
||||
try:
|
||||
r = await client.get(BASE_URL)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{BASE_URL}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = []
|
||||
|
||||
for card in soup.css("div.container div.card"):
|
||||
link = card.css_first("a.btn.btn-primary")
|
||||
|
||||
if not (href := link.attrs.get("href")):
|
||||
continue
|
||||
|
||||
league = card.css_first("h5.card-title").text(strip=True)
|
||||
|
||||
name = card.css_first("p.card-text").text(strip=True)
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": fix_league(league),
|
||||
"event": name,
|
||||
"link": urljoin(BASE_URL, href),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
client=client,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": now,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = entry
|
||||
|
||||
log.info(f"Collected {len(urls)} event(s)")
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
176
M3U8/scrapers/streamcenter.py
Normal file
176
M3U8/scrapers/streamcenter.py
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
from functools import partial
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("streamcenter.json", exp=10_800)
|
||||
|
||||
API_FILE = Cache("streamcenter-api.json", exp=28_800)
|
||||
|
||||
BASE_URL = "https://backendstreamcenter.youshop.pro:488/api/Parties"
|
||||
|
||||
TAG = "STRMCNTR"
|
||||
|
||||
categories = {
|
||||
4: "Basketball",
|
||||
9: "Football",
|
||||
13: "Baseball",
|
||||
14: "American Football",
|
||||
15: "Motor Sport",
|
||||
16: "Hockey",
|
||||
17: "Fight MMA",
|
||||
18: "Boxing",
|
||||
19: "NCAA Sports",
|
||||
20: "WWE",
|
||||
21: "Tennis",
|
||||
}
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
now_ts: float,
|
||||
) -> list[dict[str, str | int]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
try:
|
||||
r = await client.get(url, params={"pageNumber": 1, "pageSize": 500})
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
data = r.json()
|
||||
|
||||
data[-1]["timestamp"] = now_ts
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||
api_data = await refresh_api_cache(
|
||||
client,
|
||||
BASE_URL,
|
||||
now.timestamp(),
|
||||
)
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for stream_group in api_data:
|
||||
category_id: int = stream_group.get("categoryId")
|
||||
|
||||
name: str = stream_group.get("gameName")
|
||||
|
||||
iframe: str = stream_group.get("videoUrl")
|
||||
|
||||
event_time: str = stream_group.get("beginPartie")
|
||||
|
||||
if not (name and category_id and iframe and event_time):
|
||||
continue
|
||||
|
||||
event_dt = Time.from_str(event_time, timezone="CET")
|
||||
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
if not (sport := categories.get(category_id)):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": iframe.replace("<", "?", count=1),
|
||||
"timestamp": event_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info('Scraping from "https://streamcenter.xyz"')
|
||||
|
||||
events = await get_events(client, set(cached_urls.keys()))
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["timestamp"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": "https://streamcenter.xyz",
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
87
M3U8/scrapers/streamfree.py
Normal file
87
M3U8/scrapers/streamfree.py
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("streamfree.json", exp=19_800)
|
||||
|
||||
BASE_URL = "https://streamfree.to"
|
||||
|
||||
TAG = "STRMFR"
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
) -> dict[str, dict[str, list]]:
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
return r.json()
|
||||
|
||||
|
||||
async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | float]]:
|
||||
api_data = await refresh_api_cache(client, urljoin(BASE_URL, "streams"))
|
||||
|
||||
events = {}
|
||||
|
||||
for streams in api_data.get("streams", {}).values():
|
||||
if not streams:
|
||||
continue
|
||||
|
||||
for stream in streams:
|
||||
sport, name, stream_key = (
|
||||
stream.get("league"),
|
||||
stream.get("name"),
|
||||
stream.get("stream_key"),
|
||||
)
|
||||
|
||||
if not (sport and name and stream_key):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
logo = (
|
||||
urljoin(BASE_URL, thumbnail)
|
||||
if (thumbnail := stream.get("thumbnail_url"))
|
||||
else None
|
||||
)
|
||||
|
||||
tvg_id, pic = leagues.get_tvg_info(sport, name)
|
||||
|
||||
events[key] = {
|
||||
"url": urljoin(BASE_URL, f"live/{stream_key}720p/index.m3u8"),
|
||||
"logo": logo or pic,
|
||||
"base": BASE_URL,
|
||||
"timestamp": Time.now().timestamp(),
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client)
|
||||
|
||||
urls.update(events)
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
|
||||
log.info(f"Collected and cached {len(urls)} new event(s)")
|
||||
204
M3U8/scrapers/streamsgate.py
Normal file
204
M3U8/scrapers/streamsgate.py
Normal file
|
|
@ -0,0 +1,204 @@
|
|||
import asyncio
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
from typing import Any
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("streamsgate.json", exp=10_800)
|
||||
|
||||
API_FILE = Cache("streamsgate-api.json", exp=28_800)
|
||||
|
||||
BASE_URL = "https://streamingon.org"
|
||||
|
||||
SPORT_ENDPOINTS = [
|
||||
"soccer",
|
||||
"nfl",
|
||||
"nba",
|
||||
"cfb",
|
||||
"mlb",
|
||||
"nhl",
|
||||
"ufc",
|
||||
"boxing",
|
||||
"f1",
|
||||
]
|
||||
|
||||
TAG = "STRMSG8"
|
||||
|
||||
|
||||
def get_event(t1: str, t2: str) -> str:
|
||||
match t1:
|
||||
case "RED ZONE":
|
||||
return "NFL RedZone"
|
||||
|
||||
case "TBD":
|
||||
return "TBD"
|
||||
|
||||
case _:
|
||||
return f"{t1.strip()} vs {t2.strip()}"
|
||||
|
||||
|
||||
async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, Any]]:
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
return r.json()
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient, ts: float
|
||||
) -> list[dict[str, Any]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
tasks = [
|
||||
get_api_data(client, urljoin(BASE_URL, f"data/{sport}.json"))
|
||||
for sport in SPORT_ENDPOINTS
|
||||
]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
data = list(chain(*results))
|
||||
|
||||
for ev in data:
|
||||
ev["ts"] = ev.pop("timestamp")
|
||||
|
||||
data[-1]["timestamp"] = ts
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient, cached_keys: set[str]
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||
api_data = await refresh_api_cache(client, now.timestamp())
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for stream_group in api_data:
|
||||
event_ts = stream_group.get("ts")
|
||||
|
||||
sport = stream_group.get("league")
|
||||
|
||||
t1, t2 = stream_group.get("away"), stream_group.get("home")
|
||||
|
||||
if not (event_ts and sport):
|
||||
continue
|
||||
|
||||
event_dt = Time.from_ts(event_ts)
|
||||
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
event = get_event(t1, t2)
|
||||
|
||||
if not (streams := stream_group.get("streams")):
|
||||
continue
|
||||
|
||||
if not (url := streams[0].get("url")):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": event,
|
||||
"link": url,
|
||||
"timestamp": event_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
|
||||
valid_count = cached_count = len(valid_urls)
|
||||
urls.update(valid_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client, set(cached_urls.keys()))
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
sport, event, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["timestamp"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
cached_urls[key] = entry
|
||||
|
||||
if url:
|
||||
valid_count += 1
|
||||
urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
215
M3U8/scrapers/strmd.py
Normal file
215
M3U8/scrapers/strmd.py
Normal file
|
|
@ -0,0 +1,215 @@
|
|||
import asyncio
|
||||
import re
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import BrowserContext, async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("strmd.json", exp=10_800)
|
||||
|
||||
API_FILE = Cache("strmd-api.json", exp=28_800)
|
||||
|
||||
MIRRORS = [
|
||||
"https://streami.su",
|
||||
"https://streamed.st",
|
||||
"https://streamed.pk",
|
||||
]
|
||||
|
||||
TAG = "STRMD"
|
||||
|
||||
|
||||
def fix_sport(s: str) -> str:
|
||||
if "-" in s:
|
||||
return " ".join(i.capitalize() for i in s.split("-"))
|
||||
|
||||
elif s == "fight":
|
||||
return "Fight (UFC/Boxing)"
|
||||
|
||||
return s.capitalize() if len(s) >= 4 else s.upper()
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
ts: float,
|
||||
) -> list[dict[str, Any]]:
|
||||
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
data = r.json()
|
||||
|
||||
data[-1]["timestamp"] = ts
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||
api_data = await refresh_api_cache(
|
||||
client,
|
||||
urljoin(url, "api/matches/all-today"),
|
||||
now.timestamp(),
|
||||
)
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
pattern = re.compile(r"[\n\r]+|\s{2,}")
|
||||
|
||||
for event in api_data:
|
||||
if (category := event.get("category")) == "other":
|
||||
continue
|
||||
|
||||
if not (ts := event["date"]):
|
||||
continue
|
||||
|
||||
start_ts = int(f"{ts}"[:-3])
|
||||
|
||||
event_dt = Time.from_ts(start_ts)
|
||||
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
sport = fix_sport(category)
|
||||
|
||||
parts = pattern.split(event["title"].strip())
|
||||
name = " | ".join(p.strip() for p in parts if p.strip())
|
||||
|
||||
logo = urljoin(url, poster) if (poster := event.get("poster")) else None
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
sources: list[dict[str, str]] = event["sources"]
|
||||
|
||||
if not sources:
|
||||
continue
|
||||
|
||||
skip_types = {"alpha", "bravo"}
|
||||
valid_sources = [d for d in sources if d.get("source") not in skip_types]
|
||||
|
||||
if not valid_sources:
|
||||
continue
|
||||
|
||||
srce = valid_sources[0]
|
||||
|
||||
source_type = srce.get("source")
|
||||
stream_id = srce.get("id")
|
||||
|
||||
if not (source_type and stream_id):
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": f"https://embedsports.top/embed/{source_type}/{stream_id}/1",
|
||||
"logo": logo,
|
||||
"timestamp": event_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working STRMD mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
base_url,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, logo, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["logo"],
|
||||
ev["timestamp"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, pic = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo or pic,
|
||||
"base": "https://embedsports.top/",
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
82
M3U8/scrapers/tvpass.py
Normal file
82
M3U8/scrapers/tvpass.py
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
import re
|
||||
|
||||
import httpx
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("tvpass.json", exp=86_400)
|
||||
|
||||
BASE_URL = "https://tvpass.org/playlist/m3u"
|
||||
|
||||
TAG = "TVP"
|
||||
|
||||
|
||||
async def get_data(client: httpx.AsyncClient) -> list[str]:
|
||||
try:
|
||||
r = await client.get(BASE_URL)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{BASE_URL}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
return r.text.splitlines()
|
||||
|
||||
|
||||
async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | float]]:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
events = {}
|
||||
|
||||
data = await get_data(client)
|
||||
|
||||
for i, line in enumerate(data, start=1):
|
||||
if line.startswith("#EXTINF"):
|
||||
tvg_id_match = re.search(r'tvg-id="([^"]*)"', line)
|
||||
tvg_name_match = re.search(r'tvg-name="([^"]*)"', line)
|
||||
group_title_match = re.search(r'group-title="([^"]*)"', line)
|
||||
|
||||
tvg = tvg_id_match[1] if tvg_id_match else None
|
||||
|
||||
if not tvg and (url := data[i]).endswith("/sd"):
|
||||
if tvg_name := tvg_name_match[1]:
|
||||
sport = group_title_match[1].upper().strip()
|
||||
|
||||
event = "(".join(tvg_name.split("(")[:-1]).strip()
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
channel = url.split("/")[-2]
|
||||
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
events[key] = {
|
||||
"url": f"http://origin.thetvapp.to/hls/{channel}/mono.m3u8",
|
||||
"logo": logo,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"base": "https://tvpass.org",
|
||||
"timestamp": now,
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client)
|
||||
|
||||
urls.update(events)
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
|
||||
log.info(f"Collected and cached {len(urls)} new event(s)")
|
||||
12
M3U8/scrapers/utils/__init__.py
Normal file
12
M3U8/scrapers/utils/__init__.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from .caching import Cache
|
||||
from .config import Time, leagues
|
||||
from .logger import get_logger
|
||||
from .webwork import network
|
||||
|
||||
__all__ = [
|
||||
"Cache",
|
||||
"Time",
|
||||
"get_logger",
|
||||
"leagues",
|
||||
"network",
|
||||
]
|
||||
57
M3U8/scrapers/utils/caching.py
Normal file
57
M3U8/scrapers/utils/caching.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from .config import Time
|
||||
|
||||
|
||||
class Cache:
|
||||
def __init__(self, file: str, exp: int | float) -> None:
|
||||
self.file = Path(__file__).parent.parent / "caches" / file
|
||||
self.exp = exp
|
||||
self.now_ts = Time.now().timestamp()
|
||||
|
||||
def is_fresh(self, entry: dict) -> bool:
|
||||
ts: float | int = entry.get("timestamp", Time.default_8())
|
||||
|
||||
dt_ts = Time.clean(Time.from_ts(ts)).timestamp()
|
||||
|
||||
return self.now_ts - dt_ts < self.exp
|
||||
|
||||
def write(self, data: dict) -> None:
|
||||
self.file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.file.write_text(
|
||||
json.dumps(
|
||||
data,
|
||||
indent=2,
|
||||
ensure_ascii=False,
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
def load(
|
||||
self,
|
||||
per_entry: bool = True,
|
||||
index: int | None = None,
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
|
||||
try:
|
||||
data: dict = json.loads(self.file.read_text(encoding="utf-8"))
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return {}
|
||||
|
||||
if per_entry:
|
||||
return {k: v for k, v in data.items() if self.is_fresh(v)}
|
||||
|
||||
if index:
|
||||
ts: float | int = data[index].get("timestamp", Time.default_8())
|
||||
|
||||
else:
|
||||
ts: float | int = data.get("timestamp", Time.default_8())
|
||||
|
||||
dt_ts = Time.clean(Time.from_ts(ts)).timestamp()
|
||||
|
||||
return data if self.is_fresh({"timestamp": dt_ts}) else {}
|
||||
|
||||
|
||||
__all__ = ["Cache"]
|
||||
212
M3U8/scrapers/utils/config.py
Normal file
212
M3U8/scrapers/utils/config.py
Normal file
|
|
@ -0,0 +1,212 @@
|
|||
import json
|
||||
import re
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import pytz
|
||||
|
||||
|
||||
class Time(datetime):
|
||||
ZONES = {
|
||||
"CET": pytz.timezone("Europe/Berlin"),
|
||||
"ET": pytz.timezone("America/New_York"),
|
||||
"PST": pytz.timezone("America/Los_Angeles"),
|
||||
"UTC": timezone.utc,
|
||||
}
|
||||
|
||||
ZONES["EDT"] = ZONES["EST"] = ZONES["ET"]
|
||||
|
||||
TZ = ZONES["ET"]
|
||||
|
||||
@classmethod
|
||||
def now(cls) -> "Time":
|
||||
return cls.from_ts(datetime.now(cls.TZ).timestamp())
|
||||
|
||||
@classmethod
|
||||
def from_ts(cls, ts: int | float) -> "Time":
|
||||
return cls.fromtimestamp(ts, tz=cls.TZ)
|
||||
|
||||
@classmethod
|
||||
def default_8(cls) -> float:
|
||||
return (
|
||||
cls.now()
|
||||
.replace(hour=8, minute=0, second=0, microsecond=0, tzinfo=cls.TZ)
|
||||
.timestamp()
|
||||
)
|
||||
|
||||
def delta(self, **kwargs) -> "Time":
|
||||
return self.from_ts((self + timedelta(**kwargs)).timestamp())
|
||||
|
||||
def clean(self) -> "Time":
|
||||
return self.__class__.fromtimestamp(
|
||||
self.replace(second=0, microsecond=0).timestamp(),
|
||||
tz=self.TZ,
|
||||
)
|
||||
|
||||
def to_tz(self, tzone: str) -> "Time":
|
||||
dt = self.astimezone(self.ZONES[tzone])
|
||||
return self.__class__.fromtimestamp(dt.timestamp(), tz=self.ZONES[tzone])
|
||||
|
||||
@classmethod
|
||||
def _to_class_tz(cls, dt) -> "Time":
|
||||
dt = dt.astimezone(cls.TZ)
|
||||
return cls.fromtimestamp(dt.timestamp(), tz=cls.TZ)
|
||||
|
||||
@classmethod
|
||||
def from_only_time(cls, s: str, d: date, timezone: str) -> "Time":
|
||||
hour, minute = map(int, s.split(":"))
|
||||
|
||||
dt = datetime(
|
||||
2000,
|
||||
1,
|
||||
1,
|
||||
hour,
|
||||
minute,
|
||||
tzinfo=cls.ZONES.get(timezone, cls.TZ),
|
||||
)
|
||||
|
||||
dt = dt.astimezone(cls.TZ)
|
||||
|
||||
dt = datetime.combine(d, dt.timetz())
|
||||
|
||||
return cls.fromtimestamp(dt.timestamp(), tz=cls.TZ)
|
||||
|
||||
@classmethod
|
||||
def from_str(
|
||||
cls,
|
||||
s: str,
|
||||
fmt: str | None = None,
|
||||
timezone: str | None = None,
|
||||
) -> "Time":
|
||||
tz = cls.ZONES.get(timezone, cls.TZ)
|
||||
|
||||
if fmt:
|
||||
dt = datetime.strptime(s, fmt)
|
||||
|
||||
dt = tz.localize(dt)
|
||||
|
||||
else:
|
||||
formats = [
|
||||
"%B %d, %Y %I:%M %p",
|
||||
"%B %d, %Y %I:%M:%S %p",
|
||||
"%m/%d/%Y %I:%M %p",
|
||||
"%B %d, %Y %H:%M",
|
||||
"%B %d, %Y %H:%M:%S",
|
||||
"%Y-%m-%d",
|
||||
"%Y-%m-%d %H:%M",
|
||||
"%Y-%m-%d %H:%M:%S",
|
||||
"%Y-%m-%d %H:%M %p",
|
||||
"%Y-%m-%d %I:%M %p",
|
||||
"%Y/%m/%d %H:%M",
|
||||
"%Y/%m/%d %H:%M:%S",
|
||||
"%m/%d/%Y %H:%M",
|
||||
"%m/%d/%Y %H:%M:%S",
|
||||
"%Y-%m-%dT%H:%M:%S",
|
||||
"%Y/%m/%dT%H:%M:%S.%fZ",
|
||||
"%Y-%m-%dT%H:%M:%S.%fZ",
|
||||
"%a, %d %b %Y %H:%M:%S %z",
|
||||
]
|
||||
|
||||
for frmt in formats:
|
||||
try:
|
||||
dt = datetime.strptime(s, frmt)
|
||||
break
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
return cls.from_ts(Time.default_8())
|
||||
|
||||
if not dt.tzinfo:
|
||||
dt = (
|
||||
tz.localize(dt)
|
||||
if hasattr(tz, "localize")
|
||||
else dt.replace(tzinfo=tz)
|
||||
)
|
||||
|
||||
return cls._to_class_tz(dt)
|
||||
|
||||
|
||||
class Leagues:
|
||||
live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png"
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.data = json.loads(
|
||||
(Path(__file__).parent / "leagues.json").read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
def teams(self, league: str) -> list[str]:
|
||||
return self.data["teams"].get(league, [])
|
||||
|
||||
def info(self, name: str) -> tuple[str | None, str]:
|
||||
name = name.upper()
|
||||
|
||||
if match := next(
|
||||
(
|
||||
(tvg_id, league_data.get("logo"))
|
||||
for tvg_id, leagues in self.data["leagues"].items()
|
||||
for league_entry in leagues
|
||||
for league_name, league_data in league_entry.items()
|
||||
if name == league_name or name in league_data.get("names", [])
|
||||
),
|
||||
None,
|
||||
):
|
||||
tvg_id, logo = match
|
||||
|
||||
return (tvg_id, logo or self.live_img)
|
||||
|
||||
return (None, self.live_img)
|
||||
|
||||
def is_valid(
|
||||
self,
|
||||
event: str,
|
||||
league: str,
|
||||
) -> bool:
|
||||
|
||||
pattern = re.compile(r"\s+(?:-|vs\.?|at|@)\s+", flags=re.IGNORECASE)
|
||||
|
||||
if pattern.search(event):
|
||||
t1, t2 = re.split(pattern, event)
|
||||
|
||||
return any(t in self.teams(league) for t in (t1.strip(), t2.strip()))
|
||||
|
||||
return event.lower() in {
|
||||
"nfl redzone",
|
||||
"redzone",
|
||||
"red zone",
|
||||
"college gameday",
|
||||
}
|
||||
|
||||
def get_tvg_info(
|
||||
self,
|
||||
sport: str,
|
||||
event: str,
|
||||
) -> tuple[str | None, str]:
|
||||
|
||||
match sport:
|
||||
case "American Football" | "NFL":
|
||||
return (
|
||||
self.info("NFL")
|
||||
if self.is_valid(event, "NFL")
|
||||
else self.info("NCAA")
|
||||
)
|
||||
|
||||
case "Basketball" | "NBA":
|
||||
if self.is_valid(event, "NBA"):
|
||||
return self.info("NBA")
|
||||
|
||||
elif self.is_valid(event, "WNBA"):
|
||||
return self.info("WNBA")
|
||||
|
||||
else:
|
||||
return self.info("Basketball")
|
||||
|
||||
case "Ice Hockey" | "Hockey":
|
||||
return self.info("NHL")
|
||||
|
||||
case _:
|
||||
return self.info(sport)
|
||||
|
||||
|
||||
leagues = Leagues()
|
||||
|
||||
__all__ = ["leagues", "Time"]
|
||||
893
M3U8/scrapers/utils/leagues.json
Normal file
893
M3U8/scrapers/utils/leagues.json
Normal file
|
|
@ -0,0 +1,893 @@
|
|||
{
|
||||
"leagues": {
|
||||
"Basketball.Dummy.us": [
|
||||
{
|
||||
"BASKETBALL": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2024/04/Basketball-Emoji-1536x864.png",
|
||||
"names": []
|
||||
}
|
||||
}
|
||||
],
|
||||
"Golf.Dummy.us": [
|
||||
{
|
||||
"GOLF": {
|
||||
"logo": "https://i.gyazo.com/14a883f22796f631e6f97c34dbeb6ada.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"PGA": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2024/10/PGA-Tour-Logo-500x281.png",
|
||||
"names": ["PGA TOUR"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"MLB.Baseball.Dummy.us": [
|
||||
{
|
||||
"MLB": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/teamlogos/leagues/500/mlb.png",
|
||||
"names": ["BASEBALL", "MAJOR LEAGUE BASEBALL"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"NBA.Basketball.Dummy.us": [
|
||||
{
|
||||
"NBA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/teamlogos/leagues/500/nba.png",
|
||||
"names": [
|
||||
"NATIONAL BASKETBALL ASSOCIATION",
|
||||
"NBA BASKETBALL",
|
||||
"NBA PRESEASON"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"NCAA.Sports.Dummy.us": [
|
||||
{
|
||||
"NCAA": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2021/12/NCAA-Logo-500x281.png",
|
||||
"names": [
|
||||
"CBB",
|
||||
"CFB",
|
||||
"COLLEGE BASKETBALL",
|
||||
"COLLEGE FOOTBALL",
|
||||
"NCAA - BASKETBALL",
|
||||
"NCAA - FOOTBALL",
|
||||
"NCAA AMERICAN FOOTBALL",
|
||||
"NCAA BASKETBALL",
|
||||
"NCAA FOOTBALL",
|
||||
"NCAA SPORTS",
|
||||
"NCAAB",
|
||||
"NCAAB D",
|
||||
"NCAAB D-I",
|
||||
"NCAAF",
|
||||
"NCAAF D-I",
|
||||
"NCAAM",
|
||||
"NCAAW"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"NFL.Dummy.us": [
|
||||
{
|
||||
"NFL": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/teamlogos/leagues/500/nfl.png",
|
||||
"names": [
|
||||
"AMERICAN FOOTBALL",
|
||||
"NATIONAL FOOTBALL LEAGUE",
|
||||
"NFL PRESEASON",
|
||||
"USA NFL"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"NHL.Hockey.Dummy.us": [
|
||||
{
|
||||
"NHL": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/teamlogos/leagues/500/nhl.png",
|
||||
"names": [
|
||||
"HOCKEY",
|
||||
"NATIONAL HOCKEY LEAGUE",
|
||||
"NHL HOCKEY",
|
||||
"NHL PRESEASON"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"PPV.EVENTS.Dummy.us": [
|
||||
{
|
||||
"PAY PER VIEW": {
|
||||
"logo": null,
|
||||
"names": ["PAY-PER-VIEW", "PAYPERVIEW", "PPV"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"WRESTLING": {
|
||||
"logo": null,
|
||||
"names": ["AEW", "WWE"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"Racing.Dummy.us": [
|
||||
{
|
||||
"F1": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2021/06/F1-logo-500x281.png",
|
||||
"names": [
|
||||
"FORMULA 1",
|
||||
"FORMULA 1 GP",
|
||||
"FORMULA ONE",
|
||||
"FORMULA ONE GP"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"MOTO GP": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2021/03/MotoGP-Logo-500x281.png",
|
||||
"names": ["MOTOGP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"RACING": {
|
||||
"logo": null,
|
||||
"names": []
|
||||
}
|
||||
}
|
||||
],
|
||||
"Soccer.Dummy.us": [
|
||||
{
|
||||
"2. BUNDESLIGA": {
|
||||
"logo": "https://i.gyazo.com/6c343e57acf501f4df3502d7ec646897.png",
|
||||
"names": ["GERMAN 2. BUNDESLIGA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"3. LIGA": {
|
||||
"logo": "https://i.gyazo.com/9f4f2e8370377b6214b4103003196de7.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"AFC CHAMPIONS LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2200.png&scale=crop&cquality=40&location=origin&w=500&h=500",
|
||||
"names": [
|
||||
"ACL",
|
||||
"ACL ELITE",
|
||||
"AFC CHAMPIONS LEAGUE ELITE",
|
||||
"ASIAN CHAMPIONS LEAGUE"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"AFRICA CUP OF NATIONS": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/76.png",
|
||||
"names": ["AFCON"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"AUSTRIA 2 LIGA": {
|
||||
"logo": "https://i.gyazo.com/5d1464502b841fef6e5d78c8b0764b52.png",
|
||||
"names": ["ADMIRAL 2. LIGA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"AUSTRIA BUNDESLIGA": {
|
||||
"logo": "https://i.gyazo.com/83d851fb1110f1e395690403f9cf01bb.webp",
|
||||
"names": ["ADMIRAL BUNDESLIGA", "FEDERAL LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"BUNDESLIGA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/10.png",
|
||||
"names": ["BUNDESLIG", "GERMAN BUNDESLIGA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CAF CHAMPIONS LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2391.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"CANADIAN PREMIER LEAGUE": {
|
||||
"logo": "https://i.gyazo.com/f61986e2ccfbf88f7d753b4e7f2c9fdc.png",
|
||||
"names": ["CANPL", "CPL"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CHAMPIONSHIP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/24.png",
|
||||
"names": [
|
||||
"ENGLISH CHAMPIONSHIP",
|
||||
"ENGLISH FOOTBALL LEAGUE CHAMPIONSHIP",
|
||||
"ENGLISH LEAGUE CHAMPIONSHIP",
|
||||
"SKY BET CHAMPIONSHIP"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CONCACAF CENTRAL AMERICAN CUP": {
|
||||
"logo": "https://b.fssta.com/uploads/application/soccer/competition-logos/CONCACAFCentralAmericanCup.png",
|
||||
"names": ["COPA CENTROAMERICANA DE CONCACAF"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CONCACAF CHAMPIONS LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2298.png",
|
||||
"names": ["CONCACAF CHAMPIONS CUP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CONCACAF GOLD CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/59.png",
|
||||
"names": ["COPA ORO CONCACAF"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CONCACAF W CHAMPIONS CUP": {
|
||||
"logo": "https://i.gyazo.com/c1caff728e9a32711254b98d008194b2.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"CONCACAF W CHAMPIONSHIP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/18969.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"COPA AMÉRICA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/83.png",
|
||||
"names": [
|
||||
"CONMEBOL COPA AMERICA",
|
||||
"COPA AMERICA",
|
||||
"COPA LIBERTADORES DE AMÉRICA",
|
||||
"SOUTH AMERICAN FOOTBALL CHAMPIONSHIP"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"COPA LIBERTADORES": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/58.png",
|
||||
"names": [
|
||||
"CONMEBOL LIBERTADORES",
|
||||
"COPA LIBERTADORES DE AMERICA",
|
||||
"COPA LIBERTADORES DE AMÉRICA",
|
||||
"LIBERTADORES"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"COPA SUDAMERICANA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/1208.png",
|
||||
"names": ["CONMEBOL SUDAMERICANA", "COPA CONMEBOL SUDAMERICANA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"COPPA ITALIA": {
|
||||
"logo": "https://i.gyazo.com/8fd7660cca8f8b690f50979b72b295c3.png",
|
||||
"names": ["ITALIAN CUP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"EFL": {
|
||||
"logo": "https://i.gyazo.com/c8842fbcb2eeb6a53bc69fa6055b8b5d.png",
|
||||
"names": [
|
||||
"CARABAO CUP",
|
||||
"EFL CUP",
|
||||
"ENGLISH CARABAO CUP",
|
||||
"ENGLISH FOOTBALL LEAGUE CUP",
|
||||
"LEAGUE CUP"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"EFL LEAGUE ONE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/25.png",
|
||||
"names": [
|
||||
"ENGLISH FOOTBALL LEAGUE ONE",
|
||||
"LEAGUE ONE",
|
||||
"SKY BET LEAGUE ONE"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"EFL LEAGUE TWO": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/26.png",
|
||||
"names": [
|
||||
"ENGLISH FOOTBALL LEAGUE TWO",
|
||||
"LEAGUE TWO",
|
||||
"SKY BET LEAGUE TWO"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"EKSTRAKLASA": {
|
||||
"logo": "https://i.gyazo.com/362e31efdd0dad03b00858f4fb0901b5.png",
|
||||
"names": ["PKO BANK POLSKI EKSTRAKLASA", "POLAND EKSTRAKLASA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"EREDIVISIE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/11.png",
|
||||
"names": [
|
||||
"DUTCH EERSTE EREDIVISIE",
|
||||
"DUTCH EREDIVISIE",
|
||||
"NETHERLANDS EREDIVISIE",
|
||||
"VRIENDENLOTERIJ EREDIVISIE"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"FA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/40.png&w=500&h=500",
|
||||
"names": [
|
||||
"EMIRATES FA CUP",
|
||||
"ENGLISH FA CUP",
|
||||
"FA CUP",
|
||||
"FOOTBALL ASSOCIATION CHALLENGE CUP"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"FIFA CLUB WORLD CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/1932.png",
|
||||
"names": ["FIFA CWC"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"FIFA WORLD CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/4.png",
|
||||
"names": ["FIFA WC", "WC"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"FIFA'S WOMEN WORLD CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/60.png",
|
||||
"names": ["FIFA WOMEN WC"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"FOOTBALL": {
|
||||
"logo": "https://i.gyazo.com/1c4aa937f5ea01b0f29bb27adb59884c.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"FRAUEN BUNDESLIGA": {
|
||||
"logo": "https://i.gyazo.com/d13d4c0330be96801aa4b2d8b83d3a8f.png",
|
||||
"names": ["GOOGLE PIXEL FRAUEN-BUNDESLIGA", "WOMEN'S FEDERAL LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"GREECE CUP": {
|
||||
"logo": "https://i.gyazo.com/f80306df9b94a90f991b3cce386dc2b5.png",
|
||||
"names": ["BETSSON GREECE UP", "GREEK CUP", "GREEK FOOTBALL CUP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"J1 LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2199.png",
|
||||
"names": ["J.LEAGUE", "JAPANESE J.LEAGUE", "MEIJI YASUDA J1 LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"K LEAGUE 1": {
|
||||
"logo": "https://i.gyazo.com/721eba6c954e2015d999ead7a0bd5c69.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"LA LIGA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/15.png",
|
||||
"names": [
|
||||
"CAMPEONATO NACIONAL DE LIGA DE PRIMERA DIVISION",
|
||||
"CAMPEONATO NACIONAL DE LIGA DE PRIMERA DIVISIÓN",
|
||||
"LA-LIGA",
|
||||
"LALIGA",
|
||||
"PRIMERA DIVISION",
|
||||
"PRIMERA DIVISIÓN",
|
||||
"SPANISH LA LIGA",
|
||||
"SPANISH LALIGA"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LA LIGA 2": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/107.png",
|
||||
"names": [
|
||||
"CAMPEONATO NACIONAL DE LIGA DE SEGUNDA DIVISION",
|
||||
"CAMPEONATO NACIONAL DE LIGA DE SEGUNDA DIVISIÓN",
|
||||
"LALIGA 2",
|
||||
"SEGUNDA DIVISION",
|
||||
"SEGUNDA DIVISIÓN",
|
||||
"SPAIN SEGUNDA DIVISION",
|
||||
"SPANISH LA LIGA 2",
|
||||
"SPANISH LALIGA 2",
|
||||
"SPANISH SEGUNDA LIGA"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LA PRIMERA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2244.png",
|
||||
"names": [
|
||||
"LA LIGA MAYOR",
|
||||
"PRIMERA DIVISION DE FUTBOL PROFESIONAL DE EL SALVADOR",
|
||||
"PRIMERA DIVISIÓN DE EL SALVADOR",
|
||||
"PRIMERA DIVISIÓN DE FÚTBOL PROFESIONAL DE EL SALVADOR",
|
||||
"SALVADORAN PRIMERA DIVISION"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LEAGUES CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2410.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA DE EXPANSIÓN MX": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2306.png",
|
||||
"names": ["LIGA BBVA EXPANSIÓN MX"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA FPD": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2245.png",
|
||||
"names": [
|
||||
"COSTA RICAN PRIMERA DIVISION",
|
||||
"LIGA DE FUTBOL DE PRIMERA DIVISION",
|
||||
"LIGA DE FÚTBOL DE PRIMERA DIVISIÓN",
|
||||
"LIGA PROMERICA",
|
||||
"PRIMERA DIVISION OF COSTA RICA",
|
||||
"PRIMERA DIVISIÓN OF COSTA RICA"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA GUATE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2248.png",
|
||||
"names": [
|
||||
"LIGA GUATE BANRURAL",
|
||||
"LIGA NACIONAL",
|
||||
"LIGA NACIONAL DE FUTBOL DE GUATEMALA",
|
||||
"LIGA NACIONAL DE FÚTBOL DE GUATEMALA"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA HONDUBET": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2247.png",
|
||||
"names": [
|
||||
"HONDURAN LIGA NACIONAL",
|
||||
"LIGA NACIONAL DE FUTBOL PROFESIONAL DE HONDURAS",
|
||||
"LIGA NACIONAL DE FÚTBOL PROFESIONAL DE HONDURAS"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA I": {
|
||||
"logo": "https://i.gyazo.com/3fd4b38d5263ca391e45850eb58d11e6.png",
|
||||
"names": [
|
||||
"ROMANIA LIGA 1",
|
||||
"ROMANIA LIGA I",
|
||||
"ROMANIAN LIGA 1",
|
||||
"ROMANIAN LIGA I",
|
||||
"SUPERLIGA"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA MX": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/22.png",
|
||||
"names": [
|
||||
"LIGA BBVA MX",
|
||||
"MEXICAN LIGA BBVA MX",
|
||||
"MEXICO LIGA MX",
|
||||
"PRIMERA DIVISION DE MEXICO",
|
||||
"PRIMERA DIVISIÓN DE MÉXICO"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA MX FEMENIL": {
|
||||
"logo": "https://i.gyazo.com/ee0e1ba5ea748951b7ec7f46fb411c4f.png",
|
||||
"names": ["LIGA BBVA MX FEMENIL", "MEXICO WOMEN LIGA MX"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA PROFESIONAL ARGENTINA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/1.png",
|
||||
"names": [
|
||||
"ARGENTINE PRIMERA DIVISION",
|
||||
"ARGENTINE PRIMERA DIVISIÓN",
|
||||
"LIGA PROFESIONAL DE FUTBOL",
|
||||
"LIGA PROFESIONAL DE FÚTBOL",
|
||||
"PRIMERA DIVISION",
|
||||
"PRIMERA DIVISIÓN",
|
||||
"TORNEO BETANO"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGUE 1": {
|
||||
"logo": "https://ligue1.com/images/Logo_Ligue_1.webp",
|
||||
"names": ["FRANCE LIGUE 1", "FRENCH LIGUE 1"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGUE 2": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/96.png",
|
||||
"names": ["FRANCE LIGUE 2", "FRENCH LIGUE 2"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"MLS": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/19.png",
|
||||
"names": ["MAJOR LEAGUE SOCCER"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"NORTHERN SUPER LEAGUE": {
|
||||
"logo": "https://i.gyazo.com/042f5bf51ab721bede2d9b56ce1818ae.png",
|
||||
"names": ["NSL"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"NWSL": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2323.png",
|
||||
"names": ["NATIONAL WOMEN'S SOCCER LEAGUE", "NWSL WOMEN"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"NWSL CHALLENGE CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2445.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"PREMIER LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/23.png",
|
||||
"names": ["ENGLISH PREMIER LEAGUE", "EPL"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"PRIMEIRA LIGA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/14.png",
|
||||
"names": ["LIGA PORTUGAL", "PORTUGUESE PRIMEIRA LIGA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"PRIMERA A": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/1543.png",
|
||||
"names": ["COLOMBIA PRIMERA A", "COLOMBIAN PRIMERA A"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"PRIMERA B": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2295.png",
|
||||
"names": ["COLOMBIA PRIMERA B", "COLOMBIAN PRIMERA B"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SCOTTISH PREMIERSHIP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/45.png",
|
||||
"names": ["PREMIERSHIP", "SPFL"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SERBIA SUPERLIGA": {
|
||||
"logo": "https://i.gyazo.com/0992f078dcacfef489477fc7bb1f5220.webp",
|
||||
"names": ["MOZZART SUPERLIGA", "SERBIAN SUPER LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SERIE A": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/12.png",
|
||||
"names": ["ITALIAN SERIE A", "ITALY SERIE A", "SERIE-A"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SERIE B": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/99.png",
|
||||
"names": ["ITALIAN SERIE B", "ITALY SERIE B", "SERIE-B"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SOCCER": {
|
||||
"logo": "https://i.gyazo.com/1c4aa937f5ea01b0f29bb27adb59884c.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"SUPER LEAGUE GREECE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/98.png",
|
||||
"names": [
|
||||
"A1 ETHNIKI KATIGORIA",
|
||||
"GREECE SUPER LEAGUE",
|
||||
"GREEK SUPER LEAGUE",
|
||||
"SUPER LEAGUE 1"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SÜPER LIG": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/18.png",
|
||||
"names": [
|
||||
"SUPER LIG",
|
||||
"SUPERLIG",
|
||||
"SÜPERLIG",
|
||||
"TURKEY SUPER LIG",
|
||||
"TURKISH SUPER LIG"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"TURKEY 1 LIG": {
|
||||
"logo": "https://i.gyazo.com/730673f84223a85c9b9ae66123907bba.png",
|
||||
"names": ["TFF 1. LIG", "TRENDYOL 1. LIG"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"U.S. OPEN CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/69.png",
|
||||
"names": ["LAMAR HUNT U.S. OPEN CUP", "US OPEN CUP", "USOC"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA CHAMPIONS LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2.png",
|
||||
"names": ["CHAMPIONS LEAGUE", "UCL"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA CONFERENCE LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/20296.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA EUROPA LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2310.png",
|
||||
"names": ["EUROPA LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA EUROPEAN CHAMPIONSHIP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/74.png",
|
||||
"names": ["EUROS", "UEFA EUROS"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA SUPER CUP": {
|
||||
"logo": "https://i.gyazo.com/3b786181aba130321b85c0e2f9604652.png",
|
||||
"names": ["EUROPEAN SUPER CUP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA WOMEN'S CHAMPIONS LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2408.png",
|
||||
"names": ["UCL WOMEN", "UEFA WOMEN", "WOMEN'S CHAMPIONS LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"USL CHAMPIONSHIP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2292.png",
|
||||
"names": ["UNITED SOCCER LEAGUE CHAMPIONSHIP", "USLC"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"USL LEAGUE ONE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2452.png",
|
||||
"names": ["UNITED SOCCER LEAGUE LEAGUE ONE", "USL 1", "USL1"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"WORLD CUP QUALIFIERS": {
|
||||
"logo": "https://i.gyazo.com/1c4aa937f5ea01b0f29bb27adb59884c.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"WSL": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2314.png",
|
||||
"names": [
|
||||
"BARCLAY'S WOMEN'S SUPER LEAGUE",
|
||||
"ENGLISH WOMEN'S SUPER LEAGUE",
|
||||
"FA WSL",
|
||||
"WOMEN'S SUPER LEAGUE"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"Tennis.Dummy.us": [
|
||||
{
|
||||
"TENNIS": {
|
||||
"logo": "https://i.gyazo.com/b5e83afc3a75dacfb831abe975fd3821.png",
|
||||
"names": []
|
||||
}
|
||||
}
|
||||
],
|
||||
"UFC.247.Dummy.us": [
|
||||
{
|
||||
"UFC": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2017/06/Logo-UFC-500x313.png",
|
||||
"names": ["UFC FIGHT NIGHT"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"WNBA.dummy.us": [
|
||||
{
|
||||
"WNBA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/teamlogos/leagues/500/wnba.png",
|
||||
"names": ["NBA W", "WOMEN'S NATIONAL BASKETBALL ASSOCIATION"]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"teams": {
|
||||
"NBA": [
|
||||
"76ers",
|
||||
"Atlanta Hawks",
|
||||
"Blazers",
|
||||
"Boston Celtics",
|
||||
"Brooklyn Nets",
|
||||
"Bucks",
|
||||
"Bulls",
|
||||
"Cavaliers",
|
||||
"Celtics",
|
||||
"Charlotte Hornets",
|
||||
"Chicago Bulls",
|
||||
"Cleveland Cavaliers",
|
||||
"Clippers",
|
||||
"Dallas Mavericks",
|
||||
"Denver Nuggets",
|
||||
"Detroit Pistons",
|
||||
"Golden State Warriors",
|
||||
"Grizzlies",
|
||||
"Hawks",
|
||||
"Heat",
|
||||
"Hornets",
|
||||
"Houston Rockets",
|
||||
"Indiana Pacers",
|
||||
"Jazz",
|
||||
"Kings",
|
||||
"Knicks",
|
||||
"Lakers",
|
||||
"Los Angeles Clippers",
|
||||
"Los Angeles Lakers",
|
||||
"Magic",
|
||||
"Mavericks",
|
||||
"Memphis Grizzlies",
|
||||
"Miami Heat",
|
||||
"Milwaukee Bucks",
|
||||
"Minnesota Timberwolves",
|
||||
"Nets",
|
||||
"New Orleans Pelicans",
|
||||
"New York Knicks",
|
||||
"Nuggets",
|
||||
"Oklahoma City Thunder",
|
||||
"Orlando Magic",
|
||||
"Pacers",
|
||||
"Pelicans",
|
||||
"Philadelphia 76ers",
|
||||
"Phoenix Suns",
|
||||
"Pistons",
|
||||
"Portland Trail Blazers",
|
||||
"Raptors",
|
||||
"Rockets",
|
||||
"Sacramento Kings",
|
||||
"San Antonio Spurs",
|
||||
"Sixers",
|
||||
"Spurs",
|
||||
"Suns",
|
||||
"Thunder",
|
||||
"Timberwolves",
|
||||
"Toronto Raptors",
|
||||
"Trail Blazers",
|
||||
"Utah Jazz",
|
||||
"Warriors",
|
||||
"Washington Wizards",
|
||||
"Wizards",
|
||||
"Wolves"
|
||||
],
|
||||
"NFL": [
|
||||
"49ers",
|
||||
"9ers",
|
||||
"Arizona Cardinals",
|
||||
"Atlanta Falcons",
|
||||
"Baltimore Ravens",
|
||||
"Bears",
|
||||
"Bengals",
|
||||
"Bills",
|
||||
"Broncos",
|
||||
"Browns",
|
||||
"Buccaneers",
|
||||
"Buffalo Bills",
|
||||
"Cardinals",
|
||||
"Carolina Panthers",
|
||||
"Chargers",
|
||||
"Chicago Bears",
|
||||
"Chiefs",
|
||||
"Cincinnati Bengals",
|
||||
"Cleveland Browns",
|
||||
"Colts",
|
||||
"Commanders",
|
||||
"Cowboys",
|
||||
"Dallas Cowboys",
|
||||
"Denver Broncos",
|
||||
"Detroit Lions",
|
||||
"Dolphins",
|
||||
"Eagles",
|
||||
"Falcons",
|
||||
"Giants",
|
||||
"Green Bay Packers",
|
||||
"Houston Texans",
|
||||
"Indianapolis Colts",
|
||||
"Jacksonville Jaguars",
|
||||
"Jaguars",
|
||||
"Jets",
|
||||
"Kansas City Chiefs",
|
||||
"Las Vegas Raiders",
|
||||
"Lions",
|
||||
"Los Angeles Chargers",
|
||||
"Los Angeles Rams",
|
||||
"Miami Dolphins",
|
||||
"Minnesota Vikings",
|
||||
"New England Patriots",
|
||||
"New Orleans Saints",
|
||||
"New York Giants",
|
||||
"New York Jets",
|
||||
"Niners",
|
||||
"Packers",
|
||||
"Panthers",
|
||||
"Patriots",
|
||||
"Philadelphia Eagles",
|
||||
"Pittsburgh Steelers",
|
||||
"Raiders",
|
||||
"Rams",
|
||||
"Ravens",
|
||||
"Redskins",
|
||||
"Saints",
|
||||
"San Francisco 49ers",
|
||||
"Seahawks",
|
||||
"Seattle Seahawks",
|
||||
"Steelers",
|
||||
"Tampa Bay Buccaneers",
|
||||
"Tennessee Titans",
|
||||
"Texans",
|
||||
"Titans",
|
||||
"Vikings",
|
||||
"Washington Commanders",
|
||||
"Washington Redskins"
|
||||
],
|
||||
"WNBA": [
|
||||
"Aces",
|
||||
"Atlanta Dream",
|
||||
"Chicago Sky",
|
||||
"Connecticut Sun",
|
||||
"Dallas Wings",
|
||||
"Dream",
|
||||
"Fever",
|
||||
"Golden State Valkyries",
|
||||
"Indiana Fever",
|
||||
"Las Vegas Aces",
|
||||
"Liberty",
|
||||
"Los Angeles Sparks",
|
||||
"Lynx",
|
||||
"Mercury",
|
||||
"Minnesota Lynx",
|
||||
"Mystics",
|
||||
"New York Liberty",
|
||||
"Phoenix Mercury",
|
||||
"Seattle Storm",
|
||||
"Sky",
|
||||
"Sparks",
|
||||
"Storm",
|
||||
"Sun",
|
||||
"Valkyries",
|
||||
"Washington Mystics",
|
||||
"Wings"
|
||||
]
|
||||
}
|
||||
}
|
||||
50
M3U8/scrapers/utils/logger.py
Normal file
50
M3U8/scrapers/utils/logger.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
LOG_FMT = (
|
||||
"[%(asctime)s] "
|
||||
"%(levelname)-8s "
|
||||
"[%(name)s] "
|
||||
"%(message)-70s "
|
||||
"(%(filename)s:%(lineno)d)"
|
||||
)
|
||||
|
||||
COLORS = {
|
||||
"DEBUG": "\033[36m",
|
||||
"INFO": "\033[32m",
|
||||
"WARNING": "\033[33m",
|
||||
"ERROR": "\033[31m",
|
||||
"CRITICAL": "\033[1;41m",
|
||||
"reset": "\033[0m",
|
||||
}
|
||||
|
||||
|
||||
class ColorFormatter(logging.Formatter):
|
||||
def format(self, record) -> str:
|
||||
color = COLORS.get(record.levelname, COLORS["reset"])
|
||||
levelname = record.levelname
|
||||
record.levelname = f"{color}{levelname:<8}{COLORS['reset']}"
|
||||
formatted = super().format(record)
|
||||
record.levelname = levelname
|
||||
|
||||
return formatted
|
||||
|
||||
|
||||
def get_logger(name: str | None = None) -> logging.Logger:
|
||||
if not name:
|
||||
name = Path(__file__).stem
|
||||
|
||||
logger = logging.getLogger(name)
|
||||
|
||||
if not logger.hasHandlers():
|
||||
handler = logging.StreamHandler()
|
||||
formatter = ColorFormatter(LOG_FMT, datefmt="%Y-%m-%d | %H:%M:%S")
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.INFO)
|
||||
logger.propagate = False
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
__all__ = ["get_logger", "ColorFormatter"]
|
||||
255
M3U8/scrapers/utils/webwork.py
Normal file
255
M3U8/scrapers/utils/webwork.py
Normal file
|
|
@ -0,0 +1,255 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import random
|
||||
import re
|
||||
from collections.abc import Awaitable, Callable
|
||||
from functools import partial
|
||||
from typing import TypeVar
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import Browser, BrowserContext, Playwright, Request
|
||||
|
||||
from .logger import get_logger
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class Network:
|
||||
UA = (
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
||||
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||
"Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.client = httpx.AsyncClient(
|
||||
timeout=5,
|
||||
follow_redirects=True,
|
||||
headers={"User-Agent": Network.UA},
|
||||
http2=True,
|
||||
)
|
||||
|
||||
self._logger = get_logger("network")
|
||||
|
||||
async def check_status(self, url: str) -> bool:
|
||||
try:
|
||||
r = await self.client.get(url)
|
||||
r.raise_for_status()
|
||||
return r.status_code == 200
|
||||
except (httpx.HTTPError, httpx.TimeoutException) as e:
|
||||
self._logger.debug(f"Status check failed for {url}: {e}")
|
||||
return False
|
||||
|
||||
async def get_base(self, mirrors: list[str]) -> str | None:
|
||||
random.shuffle(mirrors)
|
||||
|
||||
tasks = [self.check_status(link) for link in mirrors]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
working_mirrors = [
|
||||
mirror for mirror, success in zip(mirrors, results) if success
|
||||
]
|
||||
|
||||
return working_mirrors[0] if working_mirrors else None
|
||||
|
||||
@staticmethod
|
||||
async def safe_process(
|
||||
fn: Callable[[], Awaitable[T]],
|
||||
url_num: int,
|
||||
timeout: int | float = 15,
|
||||
log: logging.Logger | None = None,
|
||||
) -> T | None:
|
||||
|
||||
if not log:
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
task = asyncio.create_task(fn())
|
||||
|
||||
try:
|
||||
return await asyncio.wait_for(task, timeout=timeout)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"URL {url_num}) Timed out after {timeout}s, skipping event")
|
||||
|
||||
task.cancel()
|
||||
|
||||
try:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
except Exception as e:
|
||||
log.debug(f"URL {url_num}) Ignore exception after timeout: {e}")
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
log.error(f"URL {url_num}) Unexpected error: {e}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def capture_req(
|
||||
req: Request,
|
||||
captured: list[str],
|
||||
got_one: asyncio.Event,
|
||||
) -> None:
|
||||
|
||||
invalids = ["amazonaws", "knitcdn"]
|
||||
|
||||
escaped = [re.escape(i) for i in invalids]
|
||||
|
||||
pattern = re.compile(
|
||||
rf"^(?!.*({'|'.join(escaped)})).*\.m3u8",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
if pattern.search(req.url):
|
||||
captured.append(req.url)
|
||||
got_one.set()
|
||||
|
||||
async def process_event(
|
||||
self,
|
||||
url: str,
|
||||
url_num: int,
|
||||
context: BrowserContext,
|
||||
timeout: int | float = 10,
|
||||
log: logging.Logger | None = None,
|
||||
) -> str | None:
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
captured: list[str] = []
|
||||
|
||||
got_one = asyncio.Event()
|
||||
|
||||
handler = partial(
|
||||
self.capture_req,
|
||||
captured=captured,
|
||||
got_one=got_one,
|
||||
)
|
||||
|
||||
page.on("request", handler)
|
||||
|
||||
try:
|
||||
await page.goto(
|
||||
url,
|
||||
wait_until="domcontentloaded",
|
||||
timeout=15_000,
|
||||
)
|
||||
|
||||
wait_task = asyncio.create_task(got_one.wait())
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(wait_task, timeout=timeout)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
|
||||
return
|
||||
|
||||
finally:
|
||||
if not wait_task.done():
|
||||
wait_task.cancel()
|
||||
|
||||
try:
|
||||
await wait_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
if captured:
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return captured[0]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
log.warning(f"URL {url_num}) Exception while processing: {e}")
|
||||
return
|
||||
|
||||
finally:
|
||||
page.remove_listener("request", handler)
|
||||
await page.close()
|
||||
|
||||
@staticmethod
|
||||
async def browser(
|
||||
playwright: Playwright,
|
||||
browser: str = "firefox",
|
||||
ignore_https_errors: bool = False,
|
||||
) -> tuple[Browser, BrowserContext]:
|
||||
|
||||
if browser == "brave":
|
||||
brwsr = await playwright.chromium.connect_over_cdp("http://localhost:9222")
|
||||
context = brwsr.contexts[0]
|
||||
else:
|
||||
brwsr = await playwright.firefox.launch(headless=True)
|
||||
|
||||
context = await brwsr.new_context(
|
||||
user_agent=Network.UA,
|
||||
ignore_https_errors=ignore_https_errors,
|
||||
viewport={"width": 1366, "height": 768},
|
||||
device_scale_factor=1,
|
||||
locale="en-US",
|
||||
timezone_id="America/New_York",
|
||||
color_scheme="dark",
|
||||
permissions=["geolocation"],
|
||||
extra_http_headers={
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
},
|
||||
)
|
||||
|
||||
await context.add_init_script(
|
||||
"""
|
||||
Object.defineProperty(navigator, "webdriver", { get: () => undefined });
|
||||
|
||||
Object.defineProperty(navigator, "languages", {
|
||||
get: () => ["en-US", "en"],
|
||||
});
|
||||
|
||||
Object.defineProperty(navigator, "plugins", {
|
||||
get: () => [1, 2, 3, 4],
|
||||
});
|
||||
|
||||
const elementDescriptor = Object.getOwnPropertyDescriptor(
|
||||
HTMLElement.prototype,
|
||||
"offsetHeight"
|
||||
);
|
||||
|
||||
Object.defineProperty(HTMLDivElement.prototype, "offsetHeight", {
|
||||
...elementDescriptor,
|
||||
get: function () {
|
||||
if (this.id === "modernizr") {
|
||||
return 24;
|
||||
}
|
||||
return elementDescriptor.get.apply(this);
|
||||
},
|
||||
});
|
||||
|
||||
Object.defineProperty(window.screen, "width", { get: () => 1366 });
|
||||
Object.defineProperty(window.screen, "height", { get: () => 768 });
|
||||
|
||||
const getParameter = WebGLRenderingContext.prototype.getParameter;
|
||||
|
||||
WebGLRenderingContext.prototype.getParameter = function (param) {
|
||||
if (param === 37445) return "Intel Inc."; // UNMASKED_VENDOR_WEBGL
|
||||
if (param === 37446) return "Intel Iris OpenGL Engine"; // UNMASKED_RENDERER_WEBGL
|
||||
return getParameter.apply(this, [param]);
|
||||
};
|
||||
|
||||
const observer = new MutationObserver((mutations) => {
|
||||
mutations.forEach((mutation) => {
|
||||
mutation.addedNodes.forEach((node) => {
|
||||
if (node.tagName === "IFRAME" && node.hasAttribute("sandbox")) {
|
||||
node.removeAttribute("sandbox");
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
observer.observe(document.documentElement, { childList: true, subtree: true });
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
return brwsr, context
|
||||
|
||||
|
||||
network = Network()
|
||||
|
||||
__all__ = ["network"]
|
||||
304
M3U8/scrapers/watchfooty.py
Normal file
304
M3U8/scrapers/watchfooty.py
Normal file
|
|
@ -0,0 +1,304 @@
|
|||
import asyncio
|
||||
import re
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
from typing import Any
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import BrowserContext, async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("watchfty.json", exp=10_800)
|
||||
|
||||
API_FILE = Cache("watchfty-api.json", exp=28_800)
|
||||
|
||||
API_MIRRORS = ["https://api.watchfooty.top", "https://api.watchfooty.st"]
|
||||
|
||||
BASE_MIRRORS = ["https://www.watchfooty.top", "https://www.watchfooty.st"]
|
||||
|
||||
SPORT_ENDPOINTS = [
|
||||
"american-football",
|
||||
# "australian-football",
|
||||
# "baseball",
|
||||
"basketball",
|
||||
# "cricket",
|
||||
# "darts",
|
||||
"fighting",
|
||||
"football",
|
||||
"golf",
|
||||
"hockey",
|
||||
"racing",
|
||||
# "rugby",
|
||||
# "tennis",
|
||||
# "volleyball",
|
||||
]
|
||||
|
||||
TAG = "WFTY"
|
||||
|
||||
|
||||
async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, Any]]:
|
||||
try:
|
||||
r = await client.get(url, timeout=5)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
return r.json()
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
) -> list[dict[str, Any]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
tasks = [
|
||||
get_api_data(client, urljoin(url, f"api/v1/matches/{sport}"))
|
||||
for sport in SPORT_ENDPOINTS
|
||||
]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
data = list(chain(*results))
|
||||
|
||||
for ev in data:
|
||||
ev["ts"] = ev.pop("timestamp")
|
||||
|
||||
data[-1]["timestamp"] = Time.now().timestamp()
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def process_event(
|
||||
url: str,
|
||||
url_num: int,
|
||||
context: BrowserContext,
|
||||
) -> str | None:
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
captured: list[str] = []
|
||||
|
||||
got_one = asyncio.Event()
|
||||
|
||||
handler = partial(
|
||||
network.capture_req,
|
||||
captured=captured,
|
||||
got_one=got_one,
|
||||
)
|
||||
|
||||
page.on("request", handler)
|
||||
|
||||
try:
|
||||
await page.goto(
|
||||
url,
|
||||
wait_until="domcontentloaded",
|
||||
timeout=15_000,
|
||||
)
|
||||
|
||||
await page.wait_for_timeout(1_500)
|
||||
|
||||
try:
|
||||
header = await page.wait_for_selector(
|
||||
"text=/Stream Links/i",
|
||||
timeout=5_000,
|
||||
)
|
||||
|
||||
text = await header.inner_text()
|
||||
except TimeoutError:
|
||||
log.warning(f"URL {url_num}) Can't find stream links header.")
|
||||
return
|
||||
|
||||
match = re.search(r"\((\d+)\)", text)
|
||||
|
||||
if not match or int(match[1]) == 0:
|
||||
log.warning(f"URL {url_num}) No available stream links.")
|
||||
return
|
||||
|
||||
first_available = await page.wait_for_selector(
|
||||
'a[href*="/stream/"]', timeout=3_000
|
||||
)
|
||||
|
||||
await first_available.click()
|
||||
|
||||
wait_task = asyncio.create_task(got_one.wait())
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(wait_task, timeout=6)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
|
||||
return
|
||||
|
||||
finally:
|
||||
if not wait_task.done():
|
||||
wait_task.cancel()
|
||||
|
||||
try:
|
||||
await wait_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
if captured:
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return captured[-1]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
log.warning(f"URL {url_num}) Exception while processing: {e}")
|
||||
return
|
||||
|
||||
finally:
|
||||
page.remove_listener("request", handler)
|
||||
await page.close()
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
api_url: str,
|
||||
base_url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||
api_data = await refresh_api_cache(client, api_url)
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
start_dt = now.delta(hours=-1)
|
||||
end_dt = now.delta(minutes=10)
|
||||
pattern = re.compile(r"\-+|\(")
|
||||
|
||||
for event in api_data:
|
||||
match_id = event.get("matchId")
|
||||
name = event.get("title")
|
||||
league = event.get("league")
|
||||
|
||||
if not (match_id and name and league):
|
||||
continue
|
||||
|
||||
if not (ts := event.get("ts")):
|
||||
continue
|
||||
|
||||
start_ts = int(f"{ts}"[:-3])
|
||||
|
||||
event_dt = Time.from_ts(start_ts)
|
||||
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
sport = pattern.split(league, 1)[0].strip()
|
||||
|
||||
logo = urljoin(api_url, poster) if (poster := event.get("poster")) else None
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": urljoin(base_url, f"stream/{match_id}"),
|
||||
"logo": logo,
|
||||
"timestamp": event_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
|
||||
valid_count = cached_count = len(valid_urls)
|
||||
urls.update(valid_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
base_url = await network.get_base(BASE_MIRRORS)
|
||||
|
||||
api_url = await network.get_base(API_MIRRORS)
|
||||
|
||||
if not (base_url and api_url):
|
||||
log.warning("No working Watch Footy mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
api_url,
|
||||
base_url,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
sport, event, logo, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["logo"],
|
||||
ev["timestamp"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, pic = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo or pic,
|
||||
"base": base_url,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
cached_urls[key] = entry
|
||||
|
||||
if url:
|
||||
valid_count += 1
|
||||
urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
185
M3U8/scrapers/webcast.py
Normal file
185
M3U8/scrapers/webcast.py
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
import asyncio
|
||||
from functools import partial
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("webcast.json", exp=10_800)
|
||||
|
||||
HTML_CACHE = Cache("webcast-html.json", exp=86_400)
|
||||
|
||||
BASE_URLS = {"NFL": "https://nflwebcast.com", "NHL": "https://slapstreams.com"}
|
||||
|
||||
TAG = "WEBCST"
|
||||
|
||||
|
||||
def fix_event(s: str) -> str:
|
||||
return " vs ".join(s.split("@"))
|
||||
|
||||
|
||||
async def refresh_html_cache(
|
||||
client: httpx.AsyncClient, url: str
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
now = Time.now()
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = {}
|
||||
|
||||
title = soup.css_first("title").text(strip=True)
|
||||
|
||||
sport = "NFL" if "NFL" in title else "NHL"
|
||||
|
||||
date_text = now.strftime("%B %d, %Y")
|
||||
|
||||
if date_row := soup.css_first("tr.mdatetitle"):
|
||||
if mtdate_span := date_row.css_first("span.mtdate"):
|
||||
date_text = mtdate_span.text(strip=True)
|
||||
|
||||
for row in soup.css("tr.singele_match_date"):
|
||||
if not (time_node := row.css_first("td.matchtime")):
|
||||
continue
|
||||
|
||||
time = time_node.text(strip=True)
|
||||
|
||||
if not (vs_node := row.css_first("td.teamvs a")):
|
||||
continue
|
||||
|
||||
event_name = vs_node.text(strip=True)
|
||||
|
||||
for span in vs_node.css("span.mtdate"):
|
||||
date = span.text(strip=True)
|
||||
|
||||
event_name = event_name.replace(date, "").strip()
|
||||
|
||||
if not (href := vs_node.attributes.get("href")):
|
||||
continue
|
||||
|
||||
event_dt = Time.from_str(f"{date_text} {time} PM", timezone="EST")
|
||||
|
||||
event = fix_event(event_name)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
events[key] = {
|
||||
"sport": sport,
|
||||
"event": event,
|
||||
"link": href,
|
||||
"event_ts": event_dt.timestamp(),
|
||||
"timestamp": now.timestamp(),
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient, cached_keys: set[str]
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (events := HTML_CACHE.load()):
|
||||
log.info("Refreshing HTML cache")
|
||||
|
||||
tasks = [refresh_html_cache(client, url) for url in BASE_URLS.values()]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
events = {k: v for data in results for k, v in data.items()}
|
||||
|
||||
HTML_CACHE.write(events)
|
||||
|
||||
live = []
|
||||
|
||||
start_ts = now.delta(minutes=-30).timestamp()
|
||||
end_ts = now.delta(minutes=30).timestamp()
|
||||
|
||||
for k, v in events.items():
|
||||
if cached_keys & {k}:
|
||||
continue
|
||||
|
||||
if not start_ts <= v["event_ts"] <= end_ts:
|
||||
continue
|
||||
|
||||
live.append({**v})
|
||||
|
||||
return live
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"')
|
||||
|
||||
events = await get_events(client, set(cached_urls.keys()))
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["event_ts"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URLS[sport],
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
144
health.sh
Normal file
144
health.sh
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
#!/bin/bash
|
||||
base_file="./M3U8/base.m3u8"
|
||||
UA="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
|
||||
MAX_JOBS=10
|
||||
RETRY_COUNT=3
|
||||
README="./readme.md"
|
||||
STATUSLOG=$(mktemp)
|
||||
|
||||
get_status() {
|
||||
local url="$1"
|
||||
local channel="$2"
|
||||
local attempt response status_code
|
||||
|
||||
[[ "$url" != http* ]] && return
|
||||
|
||||
for attempt in $(seq 1 "$RETRY_COUNT"); do
|
||||
response=$(
|
||||
curl -skL \
|
||||
-A "$UA" \
|
||||
-H "Accept: */*" \
|
||||
-H "Accept-Language: en-US,en;q=0.9" \
|
||||
-H "Accept-Encoding: gzip, deflate, br" \
|
||||
-H "Connection: keep-alive" \
|
||||
-o /dev/null \
|
||||
--max-time 15 \
|
||||
-w "%{http_code}" \
|
||||
"$url" 2>&1
|
||||
)
|
||||
|
||||
[[ "$response" =~ ^[0-9]+$ ]] && break
|
||||
|
||||
sleep 1
|
||||
done
|
||||
|
||||
if [[ ! "$response" =~ ^[0-9]+$ ]]; then
|
||||
if [[ "$response" == *"timed out"* ]]; then
|
||||
echo "| $channel | Connection timed out | \`$url\` |" >>"$STATUSLOG"
|
||||
else
|
||||
echo "| $channel | Curl error | \`$url\` |" >>"$STATUSLOG"
|
||||
fi
|
||||
|
||||
echo "FAIL" >>"$STATUSLOG"
|
||||
|
||||
return
|
||||
fi
|
||||
|
||||
status_code="$response"
|
||||
|
||||
case "$status_code" in
|
||||
200)
|
||||
echo "PASS" >>"$STATUSLOG"
|
||||
;;
|
||||
|
||||
4* | 5*)
|
||||
echo "| $channel | HTTP Error ($status_code) | \`$url\` |" >>"$STATUSLOG"
|
||||
echo "FAIL" >>"$STATUSLOG"
|
||||
;;
|
||||
|
||||
*)
|
||||
if [[ "$status_code" == "000" ]]; then
|
||||
echo "| $channel | Connection timed out (000) | \`$url\` |" >>"$STATUSLOG"
|
||||
else
|
||||
echo "| $channel | Unknown status ($status_code) | \`$url\` |" >>"$STATUSLOG"
|
||||
fi
|
||||
|
||||
echo "FAIL" >>"$STATUSLOG"
|
||||
;;
|
||||
|
||||
esac
|
||||
}
|
||||
|
||||
check_links() {
|
||||
echo "Checking links from: $base_file"
|
||||
channel_num=0
|
||||
name=""
|
||||
|
||||
echo "| Channel | Error (Code) | Link |" >"$STATUSLOG"
|
||||
echo "| ------- | ------------ | ---- |" >>"$STATUSLOG"
|
||||
|
||||
while IFS= read -r line; do
|
||||
line=$(echo "$line" | tr -d '\r\n')
|
||||
|
||||
if [[ "$line" == \#EXTINF* ]]; then
|
||||
name=$(echo "$line" | sed -n 's/.*tvg-name="\([^"]*\)".*/\1/p')
|
||||
[[ -z "$name" ]] && name="Channel $channel_num"
|
||||
|
||||
elif [[ "$line" =~ ^https?:// ]]; then
|
||||
while (($(jobs -r | wc -l) >= MAX_JOBS)); do sleep 0.2; done
|
||||
get_status "$line" "$name" &
|
||||
((channel_num++))
|
||||
fi
|
||||
|
||||
done < <(cat "$base_file")
|
||||
|
||||
wait
|
||||
echo "Done."
|
||||
}
|
||||
|
||||
write_readme() {
|
||||
local passed failed
|
||||
|
||||
passed=$(grep -c '^PASS$' "$STATUSLOG")
|
||||
failed=$(grep -c '^FAIL$' "$STATUSLOG")
|
||||
|
||||
{
|
||||
echo "## Base Log @ $(TZ="UTC" date "+%Y-%m-%d %H:%M %Z")"
|
||||
echo
|
||||
echo "### ✅ Working Streams: $passed<br>❌ Dead Streams: $failed"
|
||||
echo
|
||||
|
||||
if (($failed > 0)); then
|
||||
head -n 1 "$STATUSLOG"
|
||||
grep -v -e '^PASS$' -e '^FAIL$' -e '^---' "$STATUSLOG" | grep -v '^| Channel' | sort -u
|
||||
fi
|
||||
|
||||
echo "---"
|
||||
echo "#### Base Channels URL"
|
||||
echo -e "\`\`\`\nhttps://s.id/d9Base\n\`\`\`\n"
|
||||
echo "#### Live Events URL"
|
||||
echo -e "\`\`\`\nhttps://s.id/d9Live\n\`\`\`\n"
|
||||
echo "#### Combined (Base + Live Events) URL"
|
||||
echo -e "\`\`\`\nhttps://s.id/d9M3U8\n\`\`\`\n"
|
||||
echo "#### EPG URL"
|
||||
echo -e "\`\`\`\nhttps://s.id/d9EPG\n\`\`\`\n"
|
||||
echo "---"
|
||||
echo "#### Mirrors"
|
||||
echo -n "[GitHub](https://github.com/doms9/iptv) | "
|
||||
echo -e "[GitLab](https://gitlab.com/doms9/iptv) | "
|
||||
echo -e "[Forgejo](https://forgejo.mxnticek.eu/doms/iptv)\n"
|
||||
echo "---"
|
||||
echo "#### Legal Disclaimer"
|
||||
echo "This repository lists publicly accessible IPTV streams as found on the internet at the time of checking."
|
||||
echo "No video or audio content is hosted in this repository. These links may point to copyrighted material owned by third parties;"
|
||||
echo "they are provided **solely for educational and research purposes.**"
|
||||
echo "The author does not endorse, promote, or encourage illegal streaming or copyright infringement."
|
||||
echo "End users are solely responsible for ensuring they comply with all applicable laws in their jurisdiction before using any link in this repository."
|
||||
echo "If you are a rights holder and wish for a link to be removed, please open an issue."
|
||||
|
||||
} >"$README"
|
||||
}
|
||||
|
||||
check_links
|
||||
write_readme
|
||||
rm "$STATUSLOG"
|
||||
10
pyproject.toml
Normal file
10
pyproject.toml
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
[project]
|
||||
name = "iptv"
|
||||
version = "0.0.3"
|
||||
requires-python = ">=3.10"
|
||||
dependencies = [
|
||||
"httpx[http2]>=0.28.1",
|
||||
"playwright>=1.55.0",
|
||||
"pytz>=2025.2",
|
||||
"selectolax>=0.4.0",
|
||||
]
|
||||
38
readme.md
Normal file
38
readme.md
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
## Base Log @ 2025-12-08 17:37 UTC
|
||||
|
||||
### ✅ Working Streams: 146<br>❌ Dead Streams: 0
|
||||
|
||||
---
|
||||
#### Base Channels URL
|
||||
```
|
||||
https://s.id/d9Base
|
||||
```
|
||||
|
||||
#### Live Events URL
|
||||
```
|
||||
https://s.id/d9Live
|
||||
```
|
||||
|
||||
#### Combined (Base + Live Events) URL
|
||||
```
|
||||
https://s.id/d9M3U8
|
||||
```
|
||||
|
||||
#### EPG URL
|
||||
```
|
||||
https://s.id/d9EPG
|
||||
```
|
||||
|
||||
---
|
||||
#### Mirrors
|
||||
[GitHub](https://github.com/doms9/iptv) | [GitLab](https://gitlab.com/doms9/iptv) |
|
||||
[Forgejo](https://forgejo.mxnticek.eu/doms/iptv)
|
||||
|
||||
---
|
||||
#### Legal Disclaimer
|
||||
This repository lists publicly accessible IPTV streams as found on the internet at the time of checking.
|
||||
No video or audio content is hosted in this repository. These links may point to copyrighted material owned by third parties;
|
||||
they are provided **solely for educational and research purposes.**
|
||||
The author does not endorse, promote, or encourage illegal streaming or copyright infringement.
|
||||
End users are solely responsible for ensuring they comply with all applicable laws in their jurisdiction before using any link in this repository.
|
||||
If you are a rights holder and wish for a link to be removed, please open an issue.
|
||||
305
uv.lock
generated
Normal file
305
uv.lock
generated
Normal file
|
|
@ -0,0 +1,305 @@
|
|||
version = 1
|
||||
revision = 3
|
||||
requires-python = ">=3.10"
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.12.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||
{ name = "idna" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.11.12"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "3.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651, upload-time = "2025-12-04T14:49:44.05Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/32/6a/33d1702184d94106d3cdd7bfb788e19723206fce152e303473ca3b946c7b/greenlet-3.3.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:6f8496d434d5cb2dce025773ba5597f71f5410ae499d5dd9533e0653258cdb3d", size = 273658, upload-time = "2025-12-04T14:23:37.494Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/b7/2b5805bbf1907c26e434f4e448cd8b696a0b71725204fa21a211ff0c04a7/greenlet-3.3.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b96dc7eef78fd404e022e165ec55327f935b9b52ff355b067eb4a0267fc1cffb", size = 574810, upload-time = "2025-12-04T14:50:04.154Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/38/343242ec12eddf3d8458c73f555c084359883d4ddc674240d9e61ec51fd6/greenlet-3.3.0-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:73631cd5cccbcfe63e3f9492aaa664d278fda0ce5c3d43aeda8e77317e38efbd", size = 586248, upload-time = "2025-12-04T14:57:39.35Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/d0/0ae86792fb212e4384041e0ef8e7bc66f59a54912ce407d26a966ed2914d/greenlet-3.3.0-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b299a0cb979f5d7197442dccc3aee67fce53500cd88951b7e6c35575701c980b", size = 597403, upload-time = "2025-12-04T15:07:10.831Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/a8/15d0aa26c0036a15d2659175af00954aaaa5d0d66ba538345bd88013b4d7/greenlet-3.3.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7dee147740789a4632cace364816046e43310b59ff8fb79833ab043aefa72fd5", size = 586910, upload-time = "2025-12-04T14:25:59.705Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/9b/68d5e3b7ccaba3907e5532cf8b9bf16f9ef5056a008f195a367db0ff32db/greenlet-3.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:39b28e339fc3c348427560494e28d8a6f3561c8d2bcf7d706e1c624ed8d822b9", size = 1547206, upload-time = "2025-12-04T15:04:21.027Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/bd/e3086ccedc61e49f91e2cfb5ffad9d8d62e5dc85e512a6200f096875b60c/greenlet-3.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b3c374782c2935cc63b2a27ba8708471de4ad1abaa862ffdb1ef45a643ddbb7d", size = 1613359, upload-time = "2025-12-04T14:27:26.548Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/6b/d4e73f5dfa888364bbf02efa85616c6714ae7c631c201349782e5b428925/greenlet-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:b49e7ed51876b459bd645d83db257f0180e345d3f768a35a85437a24d5a49082", size = 300740, upload-time = "2025-12-04T14:47:52.773Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/cb/48e964c452ca2b92175a9b2dca037a553036cb053ba69e284650ce755f13/greenlet-3.3.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e29f3018580e8412d6aaf5641bb7745d38c85228dacf51a73bd4e26ddf2a6a8e", size = 274908, upload-time = "2025-12-04T14:23:26.435Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/da/38d7bff4d0277b594ec557f479d65272a893f1f2a716cad91efeb8680953/greenlet-3.3.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a687205fb22794e838f947e2194c0566d3812966b41c78709554aa883183fb62", size = 577113, upload-time = "2025-12-04T14:50:05.493Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/f2/89c5eb0faddc3ff014f1c04467d67dee0d1d334ab81fadbf3744847f8a8a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4243050a88ba61842186cb9e63c7dfa677ec146160b0efd73b855a3d9c7fcf32", size = 590338, upload-time = "2025-12-04T14:57:41.136Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/d7/db0a5085035d05134f8c089643da2b44cc9b80647c39e93129c5ef170d8f/greenlet-3.3.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:670d0f94cd302d81796e37299bcd04b95d62403883b24225c6b5271466612f45", size = 601098, upload-time = "2025-12-04T15:07:11.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/a6/e959a127b630a58e23529972dbc868c107f9d583b5a9f878fb858c46bc1a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cb3a8ec3db4a3b0eb8a3c25436c2d49e3505821802074969db017b87bc6a948", size = 590206, upload-time = "2025-12-04T14:26:01.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/60/29035719feb91798693023608447283b266b12efc576ed013dd9442364bb/greenlet-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2de5a0b09eab81fc6a382791b995b1ccf2b172a9fec934747a7a23d2ff291794", size = 1550668, upload-time = "2025-12-04T15:04:22.439Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/5f/783a23754b691bfa86bd72c3033aa107490deac9b2ef190837b860996c9f/greenlet-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4449a736606bd30f27f8e1ff4678ee193bc47f6ca810d705981cfffd6ce0d8c5", size = 1615483, upload-time = "2025-12-04T14:27:28.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/d5/c339b3b4bc8198b7caa4f2bd9fd685ac9f29795816d8db112da3d04175bb/greenlet-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:7652ee180d16d447a683c04e4c5f6441bae7ba7b17ffd9f6b3aff4605e9e6f71", size = 301164, upload-time = "2025-12-04T14:42:51.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379, upload-time = "2025-12-04T14:23:30.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294, upload-time = "2025-12-04T14:50:06.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742, upload-time = "2025-12-04T14:57:42.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/cb/43692bcd5f7a0da6ec0ec6d58ee7cddb606d055ce94a62ac9b1aa481e969/greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7", size = 622297, upload-time = "2025-12-04T15:07:13.552Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885, upload-time = "2025-12-04T14:26:02.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424, upload-time = "2025-12-04T15:04:23.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017, upload-time = "2025-12-04T14:27:29.688Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/79/3912a94cf27ec503e51ba493692d6db1e3cd8ac7ac52b0b47c8e33d7f4f9/greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39", size = 301964, upload-time = "2025-12-04T14:36:58.316Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/2f/28592176381b9ab2cafa12829ba7b472d177f3acc35d8fbcf3673d966fff/greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739", size = 275140, upload-time = "2025-12-04T14:23:01.282Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/80/fbe937bf81e9fca98c981fe499e59a3f45df2a04da0baa5c2be0dca0d329/greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808", size = 599219, upload-time = "2025-12-04T14:50:08.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/ff/7c985128f0514271b8268476af89aee6866df5eec04ac17dcfbc676213df/greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54", size = 610211, upload-time = "2025-12-04T14:57:43.968Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/07/c47a82d881319ec18a4510bb30463ed6891f2ad2c1901ed5ec23d3de351f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30a6e28487a790417d036088b3bcb3f3ac7d8babaa7d0139edbaddebf3af9492", size = 624311, upload-time = "2025-12-04T15:07:14.697Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/8e/424b8c6e78bd9837d14ff7df01a9829fc883ba2ab4ea787d4f848435f23f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527", size = 612833, upload-time = "2025-12-04T14:26:03.669Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/ba/56699ff9b7c76ca12f1cdc27a886d0f81f2189c3455ff9f65246780f713d/greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39", size = 1567256, upload-time = "2025-12-04T15:04:25.276Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/37/f31136132967982d698c71a281a8901daf1a8fbab935dce7c0cf15f942cc/greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8", size = 1636483, upload-time = "2025-12-04T14:27:30.804Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/71/ba21c3fb8c5dce83b8c01f458a42e99ffdb1963aeec08fff5a18588d8fd7/greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38", size = 301833, upload-time = "2025-12-04T14:32:23.929Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/7c/f0a6d0ede2c7bf092d00bc83ad5bafb7e6ec9b4aab2fbdfa6f134dc73327/greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f", size = 275671, upload-time = "2025-12-04T14:23:05.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/06/dac639ae1a50f5969d82d2e3dd9767d30d6dbdbab0e1a54010c8fe90263c/greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365", size = 646360, upload-time = "2025-12-04T14:50:10.026Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/94/0fb76fe6c5369fba9bf98529ada6f4c3a1adf19e406a47332245ef0eb357/greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3", size = 658160, upload-time = "2025-12-04T14:57:45.41Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/79/d2c70cae6e823fac36c3bbc9077962105052b7ef81db2f01ec3b9bf17e2b/greenlet-3.3.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dcd2bdbd444ff340e8d6bdf54d2f206ccddbb3ccfdcd3c25bf4afaa7b8f0cf45", size = 671388, upload-time = "2025-12-04T15:07:15.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/14/bab308fc2c1b5228c3224ec2bf928ce2e4d21d8046c161e44a2012b5203e/greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955", size = 660166, upload-time = "2025-12-04T14:26:05.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/d2/91465d39164eaa0085177f61983d80ffe746c5a1860f009811d498e7259c/greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55", size = 1615193, upload-time = "2025-12-04T15:04:27.041Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/1b/83d110a37044b92423084d52d5d5a3b3a73cafb51b547e6d7366ff62eff1/greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc", size = 1683653, upload-time = "2025-12-04T14:27:32.366Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/9a/9030e6f9aa8fd7808e9c31ba4c38f87c4f8ec324ee67431d181fe396d705/greenlet-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:73f51dd0e0bdb596fb0417e475fa3c5e32d4c83638296e560086b8d7da7c4170", size = 305387, upload-time = "2025-12-04T14:26:51.063Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/66/bd6317bc5932accf351fc19f177ffba53712a202f9df10587da8df257c7e/greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931", size = 282638, upload-time = "2025-12-04T14:25:20.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/cf/cc81cb030b40e738d6e69502ccbd0dd1bced0588e958f9e757945de24404/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388", size = 651145, upload-time = "2025-12-04T14:50:11.039Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/ea/1020037b5ecfe95ca7df8d8549959baceb8186031da83d5ecceff8b08cd2/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3", size = 654236, upload-time = "2025-12-04T14:57:47.007Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/cc/1e4bae2e45ca2fa55299f4e85854606a78ecc37fead20d69322f96000504/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2662433acbca297c9153a4023fe2161c8dcfdcc91f10433171cf7e7d94ba2221", size = 662506, upload-time = "2025-12-04T15:07:16.906Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/b9/f8025d71a6085c441a7eaff0fd928bbb275a6633773667023d19179fe815/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3c6e9b9c1527a78520357de498b0e709fb9e2f49c3a513afd5a249007261911b", size = 653783, upload-time = "2025-12-04T14:26:06.225Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/c7/876a8c7a7485d5d6b5c6821201d542ef28be645aa024cfe1145b35c120c1/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:286d093f95ec98fdd92fcb955003b8a3d054b4e2cab3e2707a5039e7b50520fd", size = 1614857, upload-time = "2025-12-04T15:04:28.484Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034, upload-time = "2025-12-04T14:27:33.531Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.16.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "4.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "hpack" },
|
||||
{ name = "hyperframe" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hpack"
|
||||
version = "4.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.9"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "certifi" },
|
||||
{ name = "httpcore" },
|
||||
{ name = "idna" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
http2 = [
|
||||
{ name = "h2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyperframe"
|
||||
version = "6.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.11"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iptv"
|
||||
version = "0.0.3"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "httpx", extra = ["http2"] },
|
||||
{ name = "playwright" },
|
||||
{ name = "pytz" },
|
||||
{ name = "selectolax" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", extras = ["http2"], specifier = ">=0.28.1" },
|
||||
{ name = "playwright", specifier = ">=1.55.0" },
|
||||
{ name = "pytz", specifier = ">=2025.2" },
|
||||
{ name = "selectolax", specifier = ">=0.4.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "playwright"
|
||||
version = "1.56.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "greenlet" },
|
||||
{ name = "pyee" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/31/a5362cee43f844509f1f10d8a27c9cc0e2f7bdce5353d304d93b2151c1b1/playwright-1.56.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b33eb89c516cbc6723f2e3523bada4a4eb0984a9c411325c02d7016a5d625e9c", size = 40611424, upload-time = "2025-11-11T18:39:10.175Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/95/347eef596d8778fb53590dc326c344d427fa19ba3d42b646fce2a4572eb3/playwright-1.56.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b228b3395212b9472a4ee5f1afe40d376eef9568eb039fcb3e563de8f4f4657b", size = 39400228, upload-time = "2025-11-11T18:39:13.915Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/54/6ad97b08b2ca1dfcb4fbde4536c4f45c0d9d8b1857a2d20e7bbfdf43bf15/playwright-1.56.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:0ef7e6fd653267798a8a968ff7aa2dcac14398b7dd7440ef57524e01e0fbbd65", size = 40611424, upload-time = "2025-11-11T18:39:17.093Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/76/6d409e37e82cdd5dda3df1ab958130ae32b46e42458bd4fc93d7eb8749cb/playwright-1.56.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:404be089b49d94bc4c1fe0dfb07664bda5ffe87789034a03bffb884489bdfb5c", size = 46263122, upload-time = "2025-11-11T18:39:20.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/84/fb292cc5d45f3252e255ea39066cd1d2385c61c6c1596548dfbf59c88605/playwright-1.56.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64cda7cf4e51c0d35dab55190841bfcdfb5871685ec22cb722cd0ad2df183e34", size = 46110645, upload-time = "2025-11-11T18:39:24.005Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/bd/8c02c3388ae14edc374ac9f22cbe4e14826c6a51b2d8eaf86e89fabee264/playwright-1.56.0-py3-none-win32.whl", hash = "sha256:d87b79bcb082092d916a332c27ec9732e0418c319755d235d93cc6be13bdd721", size = 35639837, upload-time = "2025-11-11T18:39:27.174Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/27/f13b538fbc6b7a00152f4379054a49f6abc0bf55ac86f677ae54bc49fb82/playwright-1.56.0-py3-none-win_amd64.whl", hash = "sha256:3c7fc49bb9e673489bf2622855f9486d41c5101bbed964638552b864c4591f94", size = 35639843, upload-time = "2025-11-11T18:39:30.851Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/c7/3ee8b556107995846576b4fe42a08ed49b8677619421f2afacf6ee421138/playwright-1.56.0-py3-none-win_arm64.whl", hash = "sha256:2745490ae8dd58d27e5ea4d9aa28402e8e2991eb84fb4b2fd5fbde2106716f6f", size = 31248959, upload-time = "2025-11-11T18:39:33.998Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyee"
|
||||
version = "13.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/95/03/1fd98d5841cd7964a27d729ccf2199602fe05eb7a405c1462eb7277945ed/pyee-13.0.0.tar.gz", hash = "sha256:b391e3c5a434d1f5118a25615001dbc8f669cf410ab67d04c4d4e07c55481c37", size = 31250, upload-time = "2025-03-17T18:53:15.955Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/4d/b9add7c84060d4c1906abe9a7e5359f2a60f7a9a4f67268b2766673427d8/pyee-13.0.0-py3-none-any.whl", hash = "sha256:48195a3cddb3b1515ce0695ed76036b5ccc2ef3a9f963ff9f77aec0139845498", size = 15730, upload-time = "2025-03-17T18:53:14.532Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
version = "2025.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "selectolax"
|
||||
version = "0.4.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c1/17/a6bb957acc3fa87c7f980b9db6f5643c4e418cd351465e4b3d63f7000e4a/selectolax-0.4.4.tar.gz", hash = "sha256:9cc19ec7fe6b48b0514dd90c30bd94a1e0b69229523e9740747ed8ab22cf7cd0", size = 4773250, upload-time = "2025-11-25T10:50:28.836Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/63/fd/eadd3f8dfcd73aa0cf953741f31ea5feb43cb68e7bc4a488ef827ade79bc/selectolax-0.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51927fc35bc383a677d693ff9a2d2cbcb561467e90905b979071138010115c88", size = 2034470, upload-time = "2025-11-25T10:48:33.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/70/355b8e4643ca2899b81540274e7d045de541106290b116899978aff5ca00/selectolax-0.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a61a66b06f99641776062fdf591efced74d9a092f828f8ee484963c432612bd7", size = 2028566, upload-time = "2025-11-25T10:48:35.496Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/b3/ee18f4ec64d2fae4220a4ba6e481d7c693e5acf971d914a41c8a719a1d27/selectolax-0.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b9f3aaeb79046a757222f36314751e0caaf13a29317ca8ef1049e817372e0ec", size = 2217074, upload-time = "2025-11-25T10:48:37.433Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/c5/3210afe76a42189676e4c9a64922706c83f47db1c110001400e6963a0422/selectolax-0.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d83d823078d1e4ab9a2406763e573f33b5ca9398b1b594989e7fa08356a6b24", size = 2252643, upload-time = "2025-11-25T10:48:39.399Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/4a/f11030042ca945b4b4a0f9c8864d1275da32153001273a4fe31da2bb04aa/selectolax-0.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a86b93927ecf0c2bdf239519943a56447919c94dd208db7d9ae67370ddd0ae41", size = 2229744, upload-time = "2025-11-25T10:48:40.849Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/2d/8973aa08b4ab5a6102bba761a0f8eb1a7c07427b67badcaf6c1b4c6b3687/selectolax-0.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:aec31cd969ff939a647ca05ec60aced08a14476353689dc82359400e2c6678b9", size = 2257709, upload-time = "2025-11-25T10:48:42.357Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/eb/b29f3abe2bf052c02121b6aa2da738b6518041ab202f9d74bd0a9d26a794/selectolax-0.4.4-cp310-cp310-win32.whl", hash = "sha256:50e7c8254abd28d5ad3444edf788b8200b2f3cae309fdb5e3511408d7b483f63", size = 1712199, upload-time = "2025-11-25T10:48:45.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/e6/4d365d67983f86c13786ef456943fef218efa56ab999dfba7bf632d42d73/selectolax-0.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:416d47df2a14e7629a24adb6a173220bd10a9faac4d8cc1fda54dc46b5f8c093", size = 1808624, upload-time = "2025-11-25T10:48:46.537Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/5c/79ec94beeb6ce98e0083c23e447bfac62486057e0891893006f7a4f03c2f/selectolax-0.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:8dbb9ec4b29b656aa8adf4cbf4c3186f5d79e7ea7391b83d1f580e934adb7564", size = 1761897, upload-time = "2025-11-25T10:48:48.704Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/1a/cdca1f7003ce1f71bb02cecded6dbac794d37ce6449d034100fcd349a22a/selectolax-0.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4eb2fcb1b8cba410af3de6946615dc3fd6cfd3169de41dd451ef3536f4da8677", size = 2033990, upload-time = "2025-11-25T10:48:50.432Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/42/16bf741eefe47b387cd238f83fb7de3b45eed06cf6708e26700d936c4028/selectolax-0.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b13b15ea2bcb548c34ac36ccf72635e6715f309e9522377117c62cae386119af", size = 2027942, upload-time = "2025-11-25T10:48:52.37Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/a2/c2ef572938ae51e48714b899e69e33f3c5eac570fe275f792f88e414afb7/selectolax-0.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f2beefd304b35aff642c88dc4795618845e28cdc623961889ef1854d2eb149f", size = 2217338, upload-time = "2025-11-25T10:48:54.322Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/8b/e30b8ba16808d0cff6286474a631027229d736ffff2b9dfaf4f644fbe12a/selectolax-0.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29ce382b7afaa6115cc118736e0bb91022de087cc62db45a7846a55995d22ab2", size = 2251775, upload-time = "2025-11-25T10:48:56.242Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/a5/4a80b848e34c2d4cc8bc36beb1d311d6c3051596db7522567631de8aac55/selectolax-0.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:84ae93f0ee098555dd2c96b6eaedcf0a996cd4fb3cc5a732f2ee278a453f0c42", size = 2229353, upload-time = "2025-11-25T10:48:57.816Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/f8/4bc101565a4496f9e7724b5010ca20edff4fa2323ce5d4418344ea277c78/selectolax-0.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9796ab3c5f48cd1f3f427871861005c7a515aedf4a0731b92fb4919ac33f5461", size = 2257158, upload-time = "2025-11-25T10:48:59.803Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/bc/23c0b40d2c0cbc08d4a0b6aac1fca80c75db277dbc22e3ad378a60ca9f71/selectolax-0.4.4-cp311-cp311-win32.whl", hash = "sha256:259954f8b11e09dcf6b37475220741688919209c3d8a72651c695955cac48776", size = 1711601, upload-time = "2025-11-25T10:49:01.307Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/3e/d02dc5e1fef6cbac5fa93f0eadbbe9d6c73c8d9b5b1aad07494f44bf562f/selectolax-0.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:62c7b72c8decb86fd4d5e827c9db7bc634cf19887118867f66887c79dd2a50ce", size = 1809718, upload-time = "2025-11-25T10:49:03.282Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/9a/4474fb0f01a4bb1689af1946355236a05ad2dd3e4e23e709cb630b948a66/selectolax-0.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:90a95b2f002ab1d04e3bad23aa9e1e07819e9214b42e99494f45e9e170361fce", size = 1761591, upload-time = "2025-11-25T10:49:04.771Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/86/e5db16edd15574d428a67c07195fff6b1fb457cd8cf645b95b4517b43fe0/selectolax-0.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4535920d100765f79df10883d4245013f71fdef53f0f40a18e4a738f2041095", size = 2032222, upload-time = "2025-11-25T10:49:06.367Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/bb/df49371e31c1f83583d7146a2de91c8c942fe2085af39005cd711402cf98/selectolax-0.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f0fba23a00e5e9bd0c30e38d5b8a0afa4376c981022ecea7e318d564ae8626ef", size = 2025555, upload-time = "2025-11-25T10:49:07.839Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/09/2e9dc0ee325bf8bea1d125404a067548e9cfda40d1ee0c81dc1c3b4136da/selectolax-0.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:825af26bf36c9324fbced1df4530bd7917ab24bf5c93c8cd0fdf3842390e6f5b", size = 2214652, upload-time = "2025-11-25T10:49:09.497Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/a0/ac981c2980be3f59319280b452a53a30a088c582e389f9f295e9f7929f9a/selectolax-0.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9993e97ebb81f5732594b6280a7d7b904cd13fe17f4af87240a9e08aac180e25", size = 2253559, upload-time = "2025-11-25T10:49:11.073Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/ce/11cfe5428c0864558c51ab4ef531758b2b82b6d913b0901ef1a36d78191e/selectolax-0.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:087d651685199e1854f485854fea6e3594bce545a7b110478113caf7c55d68a0", size = 2227072, upload-time = "2025-11-25T10:49:12.643Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/17/af82fa8069e22f2a01d991bbf9ca6e25ab374e9a66e3854a494f6d271055/selectolax-0.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2d6e4b58ab30690003e8fb7dc2b5ec294ed1cd82f2141f6f6b61e2bb81a64752", size = 2259395, upload-time = "2025-11-25T10:49:14.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/7f/ee9a39473ac82e5818801de3d77ac663bb01bb6d9289248ed190bbf305c4/selectolax-0.4.4-cp312-cp312-win32.whl", hash = "sha256:91aa04bb65bacca66a5d1ff73c300e9b3992272faa321b52ec5c39bd002777a5", size = 1708102, upload-time = "2025-11-25T10:49:16.918Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/60/7e2abff3445371be120c33d1945dfba6723c0c9e25773ffac0fdd710eb05/selectolax-0.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:c38849f9b5405958f1620a7a7238d17409e92ff1f13ad0c10658fc19406b9815", size = 1807184, upload-time = "2025-11-25T10:49:18.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/83/1ee69f9ccfc79a28317d2237dc77abf37810a45acd8c1321bc1f1141d8e0/selectolax-0.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:c3de11d86ae23f94d43c0af89b3e0e4351b6b72a4725cf707fc33c1460b93d45", size = 1753919, upload-time = "2025-11-25T10:49:20.615Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/ef/e595158147f73f66a4d8aabdbc677ecb3fbc93a54b5996bf9e049b958b78/selectolax-0.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b4c17de6d04e1ffe312fe3dfbba36c43e7ce897b24b611fea98128e994750374", size = 2031628, upload-time = "2025-11-25T10:49:22.056Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/ad/bc7b6726b8c5de59238487b340d809452aeb5e1c5a5ed96e3f3ec782283a/selectolax-0.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5504ce8865f47f3615f4bc93218b74d814482921d545e3e0c21eb47d52e759ed", size = 2023805, upload-time = "2025-11-25T10:49:23.629Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/4a/84e180515dc65a358551d188078ac3a0a19b9eb23d22b2dbbec932b654b7/selectolax-0.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:47ebd583fd632535b26091010cf6d339c0475c657f225ff5de3d176d97e63e01", size = 2213093, upload-time = "2025-11-25T10:49:25.199Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/6a/a36e88e64edc6cb2dc0a2c8b869dd6d9f5a582b820662be4e265e46e7c56/selectolax-0.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fc3c47af34f9be6e7a196f72e378ddbce45c18e246e0ece255bbacd80d7e6c3f", size = 2251056, upload-time = "2025-11-25T10:49:26.846Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/15/1d1c9ff23ac62449d92e44e1de0d2b34804ef88c771a90a5768f6a20f115/selectolax-0.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1d4a79b9b962072cf2dddd463c8262daf990ea67b40bc81ecfb9e93ff914ce10", size = 2226513, upload-time = "2025-11-25T10:49:29.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/3e/e2e542b12f1cb649d823f3baa7e72a215d7c588e3056b1a920effb9b3085/selectolax-0.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5957cb964460ff08480b8731429cf17979f9bd69261c227af3810df6a81a1312", size = 2256451, upload-time = "2025-11-25T10:49:30.967Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/38/8d6da174c0fb1c30e3d85e0d4609a97c2edc6c0ef3136c932c463d32cee1/selectolax-0.4.4-cp313-cp313-win32.whl", hash = "sha256:da4da1f4b24f62be1179ca20af30761bc84bd9a529bb2bd14482c0b843b3f26c", size = 1708078, upload-time = "2025-11-25T10:49:32.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/55/70469c9c26711354795be62221ff94f65925f40a255f1e434aa31071d2fb/selectolax-0.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:30a51bf34780341d92a31bb335a4e7f9dfdca0fc5d70fc84b9e0683c92bff200", size = 1808152, upload-time = "2025-11-25T10:49:34.066Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/8a/cda0a2466d26f8b8ab899ffcbfc7cc453bcaaaa0c279ad4c81ad1d3b4396/selectolax-0.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:4899eaed7ebd72312c1213836a0ae46e87bb18597c71495102789170e6218564", size = 1753636, upload-time = "2025-11-25T10:49:35.657Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/3b/6c0273eb5ed30cc429651a16b1946aa4a3e2e75ec4a4a3fdc8c555b09f5d/selectolax-0.4.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:da7a59a263296fbf8cc3e34a4b29a71d5eeb709f692e7cb564137e7e94f68002", size = 2049789, upload-time = "2025-11-25T10:49:37.266Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/2d/46b7ea9c23fac37fdb4210292d642601a04d62d1f83a1897b08b5255c668/selectolax-0.4.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b497d463b0fd1f82cf86ba8b25499a312b67d04dea6b4bb108c7dcb7ee99bc89", size = 2042344, upload-time = "2025-11-25T10:49:39.284Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/e7/f485a59cb9d0253ef9a0a7a5e662b13fe9f4956ea0840a79cee2b919bd16/selectolax-0.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4d9dc7063d279ff6342987850d6b28f5ac35510b13743a21526b95d7a1b6ef5e", size = 2220577, upload-time = "2025-11-25T10:49:41.384Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/6b/330e690d896be21abae68e277d5db5b4c8c1f7b79579c055b6af5c532267/selectolax-0.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21952bc64e1a279afba339cdec706572091518029cc170cb2bc86a6f7111cfba", size = 2252516, upload-time = "2025-11-25T10:49:42.955Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/fe/4e2bd40fde56fb80ce4099bd1131d488fa803e965495b2419707993101f6/selectolax-0.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5f049eee174f0b309d386953158165b6ded9c705899328e4aab892927a1fa9c7", size = 2249639, upload-time = "2025-11-25T10:49:45.003Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/99/6788eba093168c9a970e8e62e80890eb9003d5af299d3fe0ebc21b87a908/selectolax-0.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22e4be8364a43f1fe5d7d1653da21b066954e86fe7fbb1d9d3c841809eb22754", size = 2275023, upload-time = "2025-11-25T10:49:46.666Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/f9/96046779dbec180b2b6d078482d8dfc79b5db6df9be18560b9d452c3c415/selectolax-0.4.4-cp314-cp314-win32.whl", hash = "sha256:4d7d692e05cdfa07be81c04dd8f8d4f191a832fd30f2a1cb222e5468c0c8af2e", size = 1816295, upload-time = "2025-11-25T10:49:48.189Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/44/a58cacfba8c72dccc3076b4d83bbca54a26306643f7ca0449a07b38ab5b5/selectolax-0.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:0440c746ad4281ccccc7cd4297d64e150f221a35707c624509b304eda801f8bc", size = 1913888, upload-time = "2025-11-25T10:49:49.857Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/81/121d5de4ecc5bf3b936f8d7d55c6084c12f1a2faff1a4a1f82fb25cd0173/selectolax-0.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:e9cd5cfb086921f600a06fd2130178b5a95a8e446e0f2777db6937526050746e", size = 1863242, upload-time = "2025-11-25T10:49:51.427Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/e1/790a7d20b12729eb5fd0da9f9a725a7864d245850c67bc5a705fb3c1d7d2/selectolax-0.4.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c6691511f46c13d60b52276377e797dc2a3e758e885c63e1181f836d9827a12", size = 2063289, upload-time = "2025-11-25T10:49:53.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/ef/47e185c61f06d0445a647996b6fb48b36ea0b27de5ede8903b2a39583440/selectolax-0.4.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:845f74f8cebe2358a003e7b47238e1e0edc7402ddd3c07d8f7cea435dd1325ef", size = 2070354, upload-time = "2025-11-25T10:49:54.894Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/9a/058c4fff7aafaff1674fe25ac957c97596f9e28d7c57d631200f2a051807/selectolax-0.4.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a86c37c2498b1fa24936e7805e4e5cb2e97190606d04cb5ad9aedd6edcfcdaa9", size = 2227462, upload-time = "2025-11-25T10:49:56.636Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/04/fc616db7cfca0b90329e7e8e99ffe325f680818ab66ba0af62c2589d8660/selectolax-0.4.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e9fa37df59e550840dca51fda4b8cd1f288fa1a13d3021d5e55d50730a817244", size = 2254759, upload-time = "2025-11-25T10:49:58.634Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/9b/b1e583ff155656d7da9a6902f21af4c6942c1f34701b38b71ec7146631b0/selectolax-0.4.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bbb96598a871a87673582cd7e6b8112782bfdae537be5409f887aa4d78a6a081", size = 2259007, upload-time = "2025-11-25T10:50:01.07Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/72/bd07cd453cf4af639295b9064180d772e2f3ac382b7d56e40165710f3cad/selectolax-0.4.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d95b108b5fa2189e9da5cc2da147c84373dc8fa4ba9b9f5c787f3e8d43b57255", size = 2277464, upload-time = "2025-11-25T10:50:03.54Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/aa/969859acc65fbc07e2eae6d15da1cb9dfec9db6b72a3534ae7243d53ef43/selectolax-0.4.4-cp314-cp314t-win32.whl", hash = "sha256:7e680e091906ea33c79a7c27d6c39f7085fe1277c13288f71742e65601e4ebe9", size = 1865553, upload-time = "2025-11-25T10:50:05.131Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/36/a397b519a7503fe5a240c9099956a630f43a498edca7d7cba5556efc663a/selectolax-0.4.4-cp314-cp314t-win_amd64.whl", hash = "sha256:418d21d68013314f5e673a1c2647ec38a4fe3610f3b0dcaba7695641e0bcc60b", size = 1981864, upload-time = "2025-11-25T10:50:07.475Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/d2/3621c7ac690f7b7122c9c00aaa6aa65a8be0b811370a683e7782bf9f74a4/selectolax-0.4.4-cp314-cp314t-win_arm64.whl", hash = "sha256:acee1f44c82dbb9f219d7be362a413d5919dd3374186b877b97c4a14cc73771d", size = 1885304, upload-time = "2025-11-25T10:50:09.072Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.15.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
|
||||
]
|
||||
Loading…
Add table
Add a link
Reference in a new issue