This commit is contained in:
doms9 2025-12-16 20:28:51 -05:00
parent 3944fc8516
commit 00000d9bce
9 changed files with 9 additions and 9 deletions

View file

@ -113,7 +113,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
if events: if events:
now = Time.now().timestamp() now = Time.clean(Time.now()).timestamp()
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(

View file

@ -111,7 +111,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
if events: if events:
now = Time.now().timestamp() now = Time.clean(Time.now()).timestamp()
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p) browser, context = await network.browser(p)

View file

@ -118,7 +118,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
log.info(f"Processing {len(events)} new URL(s)") log.info(f"Processing {len(events)} new URL(s)")
if events: if events:
now = Time.now().timestamp() now = Time.clean(Time.now()).timestamp()
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p, browser="brave") browser, context = await network.browser(p, browser="brave")

View file

@ -32,7 +32,7 @@ async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | flo
events = {} events = {}
now = Time.now().timestamp() now = Time.clean(Time.now()).timestamp()
for streams in api_data.get("streams", {}).values(): for streams in api_data.get("streams", {}).values():
if not streams: if not streams:

View file

@ -172,7 +172,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
if events: if events:
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p) browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(

View file

@ -153,7 +153,7 @@ async def scrape(client: httpx.AsyncClient) -> None:
if events: if events:
async with async_playwright() as p: async with async_playwright() as p:
browser, context = await network.browser(p) browser, context = await network.browser(p, browser="brave")
for i, ev in enumerate(events, start=1): for i, ev in enumerate(events, start=1):
handler = partial( handler = partial(

View file

@ -28,7 +28,7 @@ async def get_data(client: httpx.AsyncClient) -> list[str]:
async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | float]]: async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | float]]:
now = Time.now().timestamp() now = Time.clean(Time.now()).timestamp()
events = {} events = {}

View file

@ -72,7 +72,7 @@ async def refresh_api_cache(
for ev in data: for ev in data:
ev["ts"] = ev.pop("timestamp") ev["ts"] = ev.pop("timestamp")
data[-1]["timestamp"] = Time.now().timestamp() data[-1]["timestamp"] = Time.clean(Time.now()).timestamp()
return data return data

View file

@ -35,7 +35,7 @@ async def refresh_html_cache(
return {} return {}
now = Time.now() now = Time.clean(Time.now())
soup = HTMLParser(r.content) soup = HTMLParser(r.content)