e
This commit is contained in:
parent
69cf0b4754
commit
00000d9b81
9 changed files with 30 additions and 8 deletions
|
|
@ -25,6 +25,7 @@ async def get_events(
|
||||||
base_url: str,
|
base_url: str,
|
||||||
cached_hrefs: set[str],
|
cached_hrefs: set[str],
|
||||||
) -> list[dict[str, str]]:
|
) -> list[dict[str, str]]:
|
||||||
|
|
||||||
log.info(f'Scraping from "{base_url}"')
|
log.info(f'Scraping from "{base_url}"')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -80,6 +81,7 @@ async def process_event(
|
||||||
url: str,
|
url: str,
|
||||||
url_num: int,
|
url_num: int,
|
||||||
) -> tuple[str, str]:
|
) -> tuple[str, str]:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = await client.get(url)
|
r = await client.get(url)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
|
||||||
|
|
@ -34,6 +34,7 @@ async def write_to_cert(
|
||||||
url: str,
|
url: str,
|
||||||
cert: Path,
|
cert: Path,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = await client.get(url)
|
r = await client.get(url)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
@ -94,6 +95,7 @@ async def process_event(
|
||||||
url_num: int,
|
url_num: int,
|
||||||
context: BrowserContext,
|
context: BrowserContext,
|
||||||
) -> str | None:
|
) -> str | None:
|
||||||
|
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
captured: list[str] = []
|
captured: list[str] = []
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@ async def process_event(
|
||||||
url: str,
|
url: str,
|
||||||
url_num: int,
|
url_num: int,
|
||||||
) -> str | None:
|
) -> str | None:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = await client.get(url)
|
r = await client.get(url)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,7 @@ async def process_event(
|
||||||
url_num: int,
|
url_num: int,
|
||||||
context: BrowserContext,
|
context: BrowserContext,
|
||||||
) -> str | None:
|
) -> str | None:
|
||||||
|
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
captured: list[str] = []
|
captured: list[str] = []
|
||||||
|
|
|
||||||
|
|
@ -55,6 +55,7 @@ async def process_event(
|
||||||
url_num: int,
|
url_num: int,
|
||||||
context: BrowserContext,
|
context: BrowserContext,
|
||||||
) -> str | None:
|
) -> str | None:
|
||||||
|
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
captured: list[str] = []
|
captured: list[str] = []
|
||||||
|
|
|
||||||
|
|
@ -22,6 +22,7 @@ class Cache:
|
||||||
per_entry: bool = True,
|
per_entry: bool = True,
|
||||||
index: bool = False,
|
index: bool = False,
|
||||||
) -> dict[str, dict[str, str | float]]:
|
) -> dict[str, dict[str, str | float]]:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data: dict = json.loads(self.file.read_text(encoding="utf-8"))
|
data: dict = json.loads(self.file.read_text(encoding="utf-8"))
|
||||||
except (FileNotFoundError, json.JSONDecodeError):
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
|
|
|
||||||
|
|
@ -35,7 +35,12 @@ class Time(datetime):
|
||||||
return self.__class__.fromtimestamp(dt.timestamp(), tz=ZONES[tzone])
|
return self.__class__.fromtimestamp(dt.timestamp(), tz=ZONES[tzone])
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_str(cls, s: str, fmt: str | None = None) -> "Time":
|
def from_str(
|
||||||
|
cls,
|
||||||
|
s: str,
|
||||||
|
fmt: str | None = None,
|
||||||
|
) -> "Time":
|
||||||
|
|
||||||
pattern = re.compile(r"\b(ET|UTC|EST|EDT)\b")
|
pattern = re.compile(r"\b(ET|UTC|EST|EDT)\b")
|
||||||
|
|
||||||
match = pattern.search(s)
|
match = pattern.search(s)
|
||||||
|
|
@ -99,7 +104,12 @@ class Leagues:
|
||||||
|
|
||||||
return (None, self.live_img)
|
return (None, self.live_img)
|
||||||
|
|
||||||
def is_valid(self, event: str, league: str) -> bool:
|
def is_valid(
|
||||||
|
self,
|
||||||
|
event: str,
|
||||||
|
league: str,
|
||||||
|
) -> bool:
|
||||||
|
|
||||||
pattern = re.compile(r"\s+(?:-|vs\.?|at)\s+", flags=re.IGNORECASE)
|
pattern = re.compile(r"\s+(?:-|vs\.?|at)\s+", flags=re.IGNORECASE)
|
||||||
|
|
||||||
if pattern.search(event):
|
if pattern.search(event):
|
||||||
|
|
@ -109,7 +119,12 @@ class Leagues:
|
||||||
|
|
||||||
return event.lower() == "nfl redzone" if league == "NFL" else False
|
return event.lower() == "nfl redzone" if league == "NFL" else False
|
||||||
|
|
||||||
def get_tvg_info(self, sport: str, event: str) -> tuple[str | None, str]:
|
def get_tvg_info(
|
||||||
|
self,
|
||||||
|
sport: str,
|
||||||
|
event: str,
|
||||||
|
) -> tuple[str | None, str]:
|
||||||
|
|
||||||
match sport:
|
match sport:
|
||||||
case "American Football":
|
case "American Football":
|
||||||
return (
|
return (
|
||||||
|
|
|
||||||
|
|
@ -84,6 +84,7 @@ class Network:
|
||||||
got_one: asyncio.Event,
|
got_one: asyncio.Event,
|
||||||
patterns: list[str] | None = None,
|
patterns: list[str] | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
if not patterns:
|
if not patterns:
|
||||||
patterns = ["amazonaws", "knitcdn"]
|
patterns = ["amazonaws", "knitcdn"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -36,10 +36,7 @@ SPORT_ENDPOINTS = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
async def get_api_data(
|
async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, Any]]:
|
||||||
client: httpx.AsyncClient,
|
|
||||||
url: str,
|
|
||||||
) -> list[dict[str, Any]]:
|
|
||||||
try:
|
try:
|
||||||
r = await client.get(url, timeout=10)
|
r = await client.get(url, timeout=10)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
@ -137,6 +134,7 @@ async def process_event(
|
||||||
await iframe.click("button.vds-play-button")
|
await iframe.click("button.vds-play-button")
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
log.warning("Play button not found inside iframe.")
|
log.warning("Play button not found inside iframe.")
|
||||||
|
return
|
||||||
|
|
||||||
wait_task = asyncio.create_task(got_one.wait())
|
wait_task = asyncio.create_task(got_one.wait())
|
||||||
|
|
||||||
|
|
@ -218,7 +216,7 @@ async def get_events(
|
||||||
{
|
{
|
||||||
"sport": sport,
|
"sport": sport,
|
||||||
"event": name,
|
"event": name,
|
||||||
"link": f"https://www.watchfooty.live/en/stream/{match_id}",
|
"link": urljoin(base_url, f"stream/{match_id}"),
|
||||||
"logo": logo,
|
"logo": logo,
|
||||||
"timestamp": event_dt.timestamp(),
|
"timestamp": event_dt.timestamp(),
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue