e
This commit is contained in:
parent
b110aee1e8
commit
00000d9ba6
11 changed files with 121 additions and 135 deletions
|
|
@ -95,7 +95,7 @@ async def get_events(
|
|||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}"\n{e}')
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
|
|
@ -132,7 +132,7 @@ async def get_events(
|
|||
|
||||
time_text = time_span.text(strip=True)
|
||||
|
||||
timestamp = int(a.attributes.get("data-time", 31496400))
|
||||
timestamp = int(a.attributes.get("data-time", Time.default_8()))
|
||||
|
||||
key = f"[{sport}] {name} (SEAST)"
|
||||
|
||||
|
|
@ -180,15 +180,9 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
|||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
url = await network.safe_process(
|
||||
lambda: process_event(
|
||||
ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
),
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
handler = partial(process_event, url=ev["link"], url_num=i, context=context)
|
||||
|
||||
url = await network.safe_process(handler, url_num=i, log=log)
|
||||
|
||||
if url:
|
||||
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue