This commit is contained in:
doms9 2025-10-28 16:54:17 -04:00
parent 76331d1da6
commit 00000d931d
3 changed files with 8 additions and 8 deletions

View file

@ -8,8 +8,8 @@ from scrapers import (
streambtw, streambtw,
streameast, streameast,
streamed, streamed,
strfree,
strmd, strmd,
strmfree,
tvpass, tvpass,
watchfooty, watchfooty,
) )
@ -43,8 +43,8 @@ async def main() -> None:
asyncio.create_task(streambtw.scrape(network.client)), asyncio.create_task(streambtw.scrape(network.client)),
asyncio.create_task(streameast.scrape(network.client)), asyncio.create_task(streameast.scrape(network.client)),
asyncio.create_task(streamed.scrape(network.client)), asyncio.create_task(streamed.scrape(network.client)),
asyncio.create_task(strfree.scrape(network.client)),
asyncio.create_task(strmd.scrape(network.client)), asyncio.create_task(strmd.scrape(network.client)),
asyncio.create_task(strmfree.scrape(network.client)),
asyncio.create_task(tvpass.scrape(network.client)), asyncio.create_task(tvpass.scrape(network.client)),
asyncio.create_task(watchfooty.scrape(network.client)), asyncio.create_task(watchfooty.scrape(network.client)),
] ]
@ -57,8 +57,8 @@ async def main() -> None:
| streambtw.urls | streambtw.urls
| streameast.urls | streameast.urls
| streamed.urls | streamed.urls
| strfree.urls
| strmd.urls | strmd.urls
| strmfree.urls
| tvpass.urls | tvpass.urls
| watchfooty.urls | watchfooty.urls
) )

View file

@ -107,6 +107,8 @@ async def refresh_html_cache(client: httpx.AsyncClient, url: str) -> dict[str, s
sport, event = league.text(strip=True), title.text(strip=True) sport, event = league.text(strip=True), title.text(strip=True)
sport = sport.split("")[-1].strip()
onclick = hds_a.attributes.get("onclick", "") onclick = hds_a.attributes.get("onclick", "")
if not (m := re.search(r"openPlayerPopup\(\s*(\d+)\s*\)", onclick)): if not (m := re.search(r"openPlayerPopup\(\s*(\d+)\s*\)", onclick)):

View file

@ -119,17 +119,15 @@ async def get_events(
if not streams: if not streams:
continue continue
sport = "American Football" if category == "football" else category.capitalize()
for stream in streams: for stream in streams:
event_dt = Time.from_ts(stream["match_timestamp"]) event_dt = Time.from_ts(stream["match_timestamp"])
if not start_dt <= event_dt <= end_dt: if not start_dt <= event_dt <= end_dt:
continue continue
name = stream["name"] sport, name = stream["league"], stream["name"]
key = f"[{sport}] {name} (STRFREE)" key = f"[{sport}] {name} (STRMFR)"
if cached_keys & {key}: if cached_keys & {key}:
continue continue
@ -184,7 +182,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
ev["timestamp"], ev["timestamp"],
) )
key = f"[{sport}] {event} (STRFREE)" key = f"[{sport}] {event} (STRMFR)"
tvg_id, pic = leagues.get_tvg_info(sport, event) tvg_id, pic = leagues.get_tvg_info(sport, event)