diff --git a/M3U8/scrapers/streamhub.py b/M3U8/scrapers/streamhub.py index 5103075..c21a195 100644 --- a/M3U8/scrapers/streamhub.py +++ b/M3U8/scrapers/streamhub.py @@ -141,7 +141,7 @@ async def get_events( live = [] - start_ts = now.delta(hours=-1).timestamp() + start_ts = now.delta(minutes=-30).timestamp() for k, v in events.items(): if cached_keys & {k}: diff --git a/M3U8/scrapers/streamsgate.py b/M3U8/scrapers/streamsgate.py index 1428154..fc2e689 100644 --- a/M3U8/scrapers/streamsgate.py +++ b/M3U8/scrapers/streamsgate.py @@ -94,8 +94,7 @@ async def get_events( events = [] - start_dt = now.delta(hours=-1) - end_dt = now.delta(minutes=10) + start_dt = now.delta(minutes=-30) for stream_group in api_data: event_ts = stream_group.get("ts") @@ -109,7 +108,7 @@ async def get_events( event_dt = Time.from_ts(event_ts) - if not start_dt <= event_dt <= end_dt: + if not start_dt <= event_dt: continue event = get_event(t1, t2) diff --git a/M3U8/scrapers/watchfooty.py b/M3U8/scrapers/watchfooty.py index 804edb1..f9d61f6 100644 --- a/M3U8/scrapers/watchfooty.py +++ b/M3U8/scrapers/watchfooty.py @@ -177,8 +177,7 @@ async def get_events( events = [] now = Time.clean(Time.now()) - start_dt = now.delta(hours=-1) - end_dt = now.delta(minutes=10) + start_dt = now.delta(minutes=-30) pattern = re.compile(r"\-+|\(") for event in api_data: @@ -196,7 +195,7 @@ async def get_events( event_dt = Time.from_ts(start_ts) - if not start_dt <= event_dt <= end_dt: + if not start_dt <= event_dt: continue sport = pattern.split(league, 1)[0].strip() @@ -251,7 +250,7 @@ async def scrape(client: httpx.AsyncClient) -> None: if events: async with async_playwright() as p: - browser, context = await network.browser(p) + browser, context = await network.browser(p, browser="brave") for i, ev in enumerate(events, start=1): handler = partial(