e
This commit is contained in:
parent
cc38eca60b
commit
00000d9383
4 changed files with 333 additions and 298 deletions
|
|
@ -7,7 +7,7 @@ from functools import partial
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from playwright.async_api import async_playwright
|
from playwright.async_api import BrowserContext, async_playwright
|
||||||
|
|
||||||
from .utils import Cache, Time, get_logger, leagues, network
|
from .utils import Cache, Time, get_logger, leagues, network
|
||||||
|
|
||||||
|
|
@ -89,10 +89,11 @@ async def fetch_xml_stream(url: str, ssl_ctx: ssl.SSLContext) -> io.BytesIO | No
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
async def process_event(url: str, url_num: int) -> str | None:
|
async def process_event(
|
||||||
async with async_playwright() as p:
|
url: str,
|
||||||
browser, context = await network.browser(p, ignore_https_errors=True)
|
url_num: int,
|
||||||
|
context: BrowserContext,
|
||||||
|
) -> str | None:
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
captured: list[str] = []
|
captured: list[str] = []
|
||||||
|
|
@ -198,8 +199,6 @@ async def process_event(url: str, url_num: int) -> str | None:
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
await browser.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def get_events(
|
async def get_events(
|
||||||
url: str,
|
url: str,
|
||||||
|
|
@ -289,11 +288,18 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
|
async with async_playwright() as p:
|
||||||
|
browser, context = await network.browser(p, ignore_https_errors=True)
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
link = ev["link"]
|
link = ev["link"]
|
||||||
|
|
||||||
url = await network.safe_process(
|
url = await network.safe_process(
|
||||||
lambda: process_event(link, url_num=i),
|
lambda: process_event(
|
||||||
|
link,
|
||||||
|
url_num=i,
|
||||||
|
context=context,
|
||||||
|
),
|
||||||
url_num=i,
|
url_num=i,
|
||||||
log=log,
|
log=log,
|
||||||
)
|
)
|
||||||
|
|
@ -323,6 +329,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
urls[key] = cached_urls[key] = entry
|
urls[key] = cached_urls[key] = entry
|
||||||
|
|
||||||
|
await browser.close()
|
||||||
|
|
||||||
if new_count := len(cached_urls) - cached_count:
|
if new_count := len(cached_urls) - cached_count:
|
||||||
log.info(f"Collected and cached {new_count} new event(s)")
|
log.info(f"Collected and cached {new_count} new event(s)")
|
||||||
else:
|
else:
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ from pathlib import Path
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from playwright.async_api import async_playwright
|
from playwright.async_api import BrowserContext, async_playwright
|
||||||
|
|
||||||
from .utils import Cache, Time, get_logger, leagues, network
|
from .utils import Cache, Time, get_logger, leagues, network
|
||||||
|
|
||||||
|
|
@ -68,10 +68,11 @@ async def refresh_api_cache(
|
||||||
return r.json()
|
return r.json()
|
||||||
|
|
||||||
|
|
||||||
async def process_event(url: str, url_num: int) -> str | None:
|
async def process_event(
|
||||||
async with async_playwright() as p:
|
url: str,
|
||||||
browser, context = await network.browser(p)
|
url_num: int,
|
||||||
|
context: BrowserContext,
|
||||||
|
) -> str | None:
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
captured: list[str] = []
|
captured: list[str] = []
|
||||||
|
|
@ -121,7 +122,6 @@ async def process_event(url: str, url_num: int) -> str | None:
|
||||||
finally:
|
finally:
|
||||||
page.remove_listener("request", handler)
|
page.remove_listener("request", handler)
|
||||||
await page.close()
|
await page.close()
|
||||||
await browser.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def get_events(
|
async def get_events(
|
||||||
|
|
@ -199,9 +199,16 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
|
async with async_playwright() as p:
|
||||||
|
browser, context = await network.browser(p)
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
url = await network.safe_process(
|
url = await network.safe_process(
|
||||||
lambda: process_event(ev["link"], url_num=i),
|
lambda: process_event(
|
||||||
|
ev["link"],
|
||||||
|
url_num=i,
|
||||||
|
context=context,
|
||||||
|
),
|
||||||
url_num=i,
|
url_num=i,
|
||||||
log=log,
|
log=log,
|
||||||
)
|
)
|
||||||
|
|
@ -226,6 +233,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
urls[key] = cached_urls[key] = entry
|
urls[key] = cached_urls[key] = entry
|
||||||
|
|
||||||
|
await browser.close()
|
||||||
|
|
||||||
if new_count := len(cached_urls) - cached_count:
|
if new_count := len(cached_urls) - cached_count:
|
||||||
log.info(f"Collected and cached {new_count} new event(s)")
|
log.info(f"Collected and cached {new_count} new event(s)")
|
||||||
else:
|
else:
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ from pathlib import Path
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from playwright.async_api import async_playwright
|
from playwright.async_api import BrowserContext, async_playwright
|
||||||
from selectolax.parser import HTMLParser
|
from selectolax.parser import HTMLParser
|
||||||
|
|
||||||
from .utils import Cache, Time, get_logger, leagues, network
|
from .utils import Cache, Time, get_logger, leagues, network
|
||||||
|
|
@ -34,10 +34,11 @@ MIRRORS = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
async def process_event(url: str, url_num: int) -> str | None:
|
async def process_event(
|
||||||
async with async_playwright() as p:
|
url: str,
|
||||||
browser, context = await network.browser(p, browser="brave")
|
url_num: int,
|
||||||
|
context: BrowserContext,
|
||||||
|
) -> str | None:
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
captured: list[str] = []
|
captured: list[str] = []
|
||||||
|
|
@ -83,7 +84,6 @@ async def process_event(url: str, url_num: int) -> str | None:
|
||||||
finally:
|
finally:
|
||||||
page.remove_listener("request", handler)
|
page.remove_listener("request", handler)
|
||||||
await page.close()
|
await page.close()
|
||||||
await browser.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def get_events(
|
async def get_events(
|
||||||
|
|
@ -173,9 +173,16 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
|
async with async_playwright() as p:
|
||||||
|
browser, context = await network.browser(p, browser="brave")
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
url = await network.safe_process(
|
url = await network.safe_process(
|
||||||
lambda: process_event(ev["link"], url_num=i),
|
lambda: process_event(
|
||||||
|
ev["link"],
|
||||||
|
url_num=i,
|
||||||
|
context=context,
|
||||||
|
),
|
||||||
url_num=i,
|
url_num=i,
|
||||||
log=log,
|
log=log,
|
||||||
)
|
)
|
||||||
|
|
@ -201,6 +208,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
urls[key] = cached_urls[key] = entry
|
urls[key] = cached_urls[key] = entry
|
||||||
|
|
||||||
|
await browser.close()
|
||||||
|
|
||||||
if new_count := len(cached_urls) - cached_count:
|
if new_count := len(cached_urls) - cached_count:
|
||||||
log.info(f"Collected and cached {new_count} new event(s)")
|
log.info(f"Collected and cached {new_count} new event(s)")
|
||||||
else:
|
else:
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ from functools import partial
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from playwright.async_api import async_playwright
|
from playwright.async_api import BrowserContext, async_playwright
|
||||||
from selectolax.parser import HTMLParser
|
from selectolax.parser import HTMLParser
|
||||||
|
|
||||||
from .utils import Cache, Time, get_logger, leagues, network
|
from .utils import Cache, Time, get_logger, leagues, network
|
||||||
|
|
@ -20,10 +20,11 @@ HTML_CACHE = Cache(Path(__file__).parent / "caches" / "streamed_php.json", exp=8
|
||||||
BASE_URL = "https://streamed.site/webmaster.php"
|
BASE_URL = "https://streamed.site/webmaster.php"
|
||||||
|
|
||||||
|
|
||||||
async def process_event(url: str, url_num: int) -> str | None:
|
async def process_event(
|
||||||
async with async_playwright() as p:
|
url: str,
|
||||||
browser, context = await network.browser(p, browser="brave")
|
url_num: int,
|
||||||
|
context: BrowserContext,
|
||||||
|
) -> str | None:
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
captured: list[str] = []
|
captured: list[str] = []
|
||||||
|
|
@ -69,7 +70,6 @@ async def process_event(url: str, url_num: int) -> str | None:
|
||||||
finally:
|
finally:
|
||||||
page.remove_listener("request", handler)
|
page.remove_listener("request", handler)
|
||||||
await page.close()
|
await page.close()
|
||||||
await browser.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def refresh_html_cache(client: httpx.AsyncClient, url: str) -> dict[str, str]:
|
async def refresh_html_cache(client: httpx.AsyncClient, url: str) -> dict[str, str]:
|
||||||
|
|
@ -168,9 +168,16 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
log.info(f"Processing {len(events)} new URL(s)")
|
log.info(f"Processing {len(events)} new URL(s)")
|
||||||
|
|
||||||
|
async with async_playwright() as p:
|
||||||
|
browser, context = await network.browser(p, browser="brave")
|
||||||
|
|
||||||
for i, ev in enumerate(events, start=1):
|
for i, ev in enumerate(events, start=1):
|
||||||
url = await network.safe_process(
|
url = await network.safe_process(
|
||||||
lambda: process_event(ev["link"], url_num=i),
|
lambda: process_event(
|
||||||
|
ev["link"],
|
||||||
|
url_num=i,
|
||||||
|
context=context,
|
||||||
|
),
|
||||||
url_num=i,
|
url_num=i,
|
||||||
log=log,
|
log=log,
|
||||||
)
|
)
|
||||||
|
|
@ -192,6 +199,8 @@ async def scrape(client: httpx.AsyncClient) -> None:
|
||||||
|
|
||||||
urls[key] = cached_urls[key] = entry
|
urls[key] = cached_urls[key] = entry
|
||||||
|
|
||||||
|
await browser.close()
|
||||||
|
|
||||||
if new_count := len(cached_urls) - cached_count:
|
if new_count := len(cached_urls) - cached_count:
|
||||||
log.info(f"Collected and cached {new_count} new event(s)")
|
log.info(f"Collected and cached {new_count} new event(s)")
|
||||||
else:
|
else:
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue