mirror of
https://github.com/doms9/iptv.git
synced 2026-01-21 03:59:03 +01:00
e
This commit is contained in:
parent
8264afeefd
commit
00000d949b
3 changed files with 6 additions and 8 deletions
|
|
@ -141,7 +141,7 @@ async def get_events(
|
||||||
|
|
||||||
live = []
|
live = []
|
||||||
|
|
||||||
start_ts = now.delta(hours=-1).timestamp()
|
start_ts = now.delta(minutes=-30).timestamp()
|
||||||
|
|
||||||
for k, v in events.items():
|
for k, v in events.items():
|
||||||
if cached_keys & {k}:
|
if cached_keys & {k}:
|
||||||
|
|
|
||||||
|
|
@ -94,8 +94,7 @@ async def get_events(
|
||||||
|
|
||||||
events = []
|
events = []
|
||||||
|
|
||||||
start_dt = now.delta(hours=-1)
|
start_dt = now.delta(minutes=-30)
|
||||||
end_dt = now.delta(minutes=10)
|
|
||||||
|
|
||||||
for stream_group in api_data:
|
for stream_group in api_data:
|
||||||
event_ts = stream_group.get("ts")
|
event_ts = stream_group.get("ts")
|
||||||
|
|
@ -109,7 +108,7 @@ async def get_events(
|
||||||
|
|
||||||
event_dt = Time.from_ts(event_ts)
|
event_dt = Time.from_ts(event_ts)
|
||||||
|
|
||||||
if not start_dt <= event_dt <= end_dt:
|
if not start_dt <= event_dt:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
event = get_event(t1, t2)
|
event = get_event(t1, t2)
|
||||||
|
|
|
||||||
|
|
@ -177,8 +177,7 @@ async def get_events(
|
||||||
events = []
|
events = []
|
||||||
|
|
||||||
now = Time.clean(Time.now())
|
now = Time.clean(Time.now())
|
||||||
start_dt = now.delta(hours=-1)
|
start_dt = now.delta(minutes=-30)
|
||||||
end_dt = now.delta(minutes=10)
|
|
||||||
pattern = re.compile(r"\-+|\(")
|
pattern = re.compile(r"\-+|\(")
|
||||||
|
|
||||||
for event in api_data:
|
for event in api_data:
|
||||||
|
|
@ -196,7 +195,7 @@ async def get_events(
|
||||||
|
|
||||||
event_dt = Time.from_ts(start_ts)
|
event_dt = Time.from_ts(start_ts)
|
||||||
|
|
||||||
if not start_dt <= event_dt <= end_dt:
|
if not start_dt <= event_dt:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
sport = pattern.split(league, 1)[0].strip()
|
sport = pattern.split(league, 1)[0].strip()
|
||||||
|
|
@ -251,7 +250,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
if events:
|
if events:
|
||||||
async with async_playwright() as p:
|
async with async_playwright() as p:
|
||||||
browser, context = await network.browser(p)
|
browser, context = await network.browser(p, browser="brave")
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
handler = partial(
|
handler = partial(
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue