mirror of
https://github.com/doms9/iptv.git
synced 2026-01-21 03:59:03 +01:00
e
This commit is contained in:
parent
919d3c54cc
commit
00000d9e24
4 changed files with 18 additions and 9 deletions
|
|
@ -53,18 +53,18 @@ async def main() -> None:
|
|||
asyncio.create_task(embedhd.scrape()),
|
||||
asyncio.create_task(fawa.scrape()),
|
||||
asyncio.create_task(istreameast.scrape()),
|
||||
# asyncio.create_task(pixel.scrape()),
|
||||
asyncio.create_task(pixel.scrape()),
|
||||
asyncio.create_task(ppv.scrape()),
|
||||
asyncio.create_task(roxie.scrape()),
|
||||
asyncio.create_task(shark.scrape()),
|
||||
asyncio.create_task(sport9.scrape()),
|
||||
# asyncio.create_task(streambtw.scrape()),
|
||||
asyncio.create_task(streambtw.scrape()),
|
||||
asyncio.create_task(streamcenter.scrape()),
|
||||
asyncio.create_task(streamfree.scrape()),
|
||||
asyncio.create_task(streamhub.scrape()),
|
||||
asyncio.create_task(streamsgate.scrape()),
|
||||
asyncio.create_task(strmd.scrape()),
|
||||
# asyncio.create_task(totalsportek.scrape()),
|
||||
asyncio.create_task(totalsportek.scrape()),
|
||||
asyncio.create_task(tvpass.scrape()),
|
||||
asyncio.create_task(webcast.scrape()),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -97,7 +97,9 @@ async def scrape() -> None:
|
|||
|
||||
if not (base_url and api_url):
|
||||
log.warning("No working PPV mirrors")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ TAG = "STRMBTW"
|
|||
|
||||
CACHE_FILE = Cache(f"{TAG.lower()}.json", exp=3_600)
|
||||
|
||||
BASE_URL = "https://streambtw.com"
|
||||
MIRRORS = ["https://hiteasport.info/", "https://streambtw.com/"]
|
||||
|
||||
|
||||
def fix_league(s: str) -> str:
|
||||
|
|
@ -45,10 +45,10 @@ async def process_event(url: str, url_num: int) -> str | None:
|
|||
return stream_link
|
||||
|
||||
|
||||
async def get_events() -> list[dict[str, str]]:
|
||||
async def get_events(url: str) -> list[dict[str, str]]:
|
||||
events = []
|
||||
|
||||
if not (html_data := await network.request(BASE_URL, log=log)):
|
||||
if not (html_data := await network.request(url, log=log)):
|
||||
return events
|
||||
|
||||
soup = HTMLParser(html_data.content)
|
||||
|
|
@ -72,7 +72,7 @@ async def get_events() -> list[dict[str, str]]:
|
|||
{
|
||||
"sport": fix_league(league),
|
||||
"event": name,
|
||||
"link": urljoin(BASE_URL, href),
|
||||
"link": urljoin(url, href),
|
||||
}
|
||||
)
|
||||
|
||||
|
|
@ -87,9 +87,14 @@ async def scrape() -> None:
|
|||
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working StreamBTW mirrors")
|
||||
|
||||
events = await get_events()
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(base_url)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
|
|
|
|||
|
|
@ -126,7 +126,9 @@ async def scrape() -> None:
|
|||
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working STRMD mirrors")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue