Add scout MCP server for browser recon + msgspec workspace dep

- tools/scout/: browser automation MCP server using Pydoll (CDP, no WebDriver)
  - scout_visit, scout_elements (text-first), scout_click, scout_fill, scout_select
  - scout_scroll, scout_text, scout_screenshot (opt-in)
  - scout_har_start / scout_har_stop (asyncio task holds recording context open)
  - scout_analyze: HAR parsing with HarEntry/HarSummary msgspec structs
  - Standalone project (not workspace member — websockets conflict with prefect)
  - Runs via: uv run --directory tools/scout scout-server

- .mcp.json: registers scout as Claude Code MCP server (project scope)

- msgspec>=0.19 added to root project deps (workspace-wide struct/validation)

- coding_philosophy.md: document msgspec as approved dep, usage rules

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Deeman
2026-02-21 15:44:02 +01:00
parent c9e9562030
commit b167a0a9f4
10 changed files with 1934 additions and 1 deletions

9
.mcp.json Normal file
View File

@@ -0,0 +1,9 @@
{
"mcpServers": {
"scout": {
"type": "stdio",
"command": "uv",
"args": ["run", "--directory", "tools/scout", "scout-server"]
}
}
}

View File

@@ -201,6 +201,13 @@ active_users = [u for u in users if u.is_active()]
- Small, focused libraries
- Direct solutions
- Understanding what code does
**Approved dependencies (earn their place):**
- `msgspec` — struct types and validation at system boundaries (external APIs, user input,
inter-process data). Use `msgspec.Struct` instead of dataclasses when you need: fast
encode/decode, built-in validation, or typed containers for boundary data.
**Rule:** use Structs at boundaries (API responses, HAR entries, MCP tool I/O) —
keep internal plumbing as plain dicts/tuples.
</question_dependencies>
</architecture_principles>

View File

@@ -15,6 +15,7 @@ dependencies = [
"niquests>=3.15.2",
"hcloud>=2.8.0",
"prefect>=3.6.15",
"msgspec>=0.19",
]
[project.scripts]
@@ -43,7 +44,6 @@ sqlmesh_materia = {workspace = true }
cftc_cot = {workspace = true }
coffee_prices = {workspace = true }
ice_stocks = {workspace = true }
[tool.uv.workspace]
members = [
"extract/*",

View File

@@ -0,0 +1,20 @@
[project]
name = "scout"
version = "0.1.0"
description = "Browser recon MCP server — discover API endpoints via HAR recording"
requires-python = ">=3.13"
dependencies = [
"pydoll-python>=1.5",
"mcp[cli]>=1.0",
"msgspec>=0.19",
]
[project.scripts]
scout-server = "scout.server:main"
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = ["src/scout"]

View File

View File

@@ -0,0 +1,190 @@
"""HAR file analysis — filter static assets, surface API endpoints and downloads.
Parses HAR 1.2 JSON files produced by Pydoll's network recorder. Filters out
static assets (JS, CSS, images, fonts) and returns a structured summary of:
- API calls (JSON responses, any POST request)
- Data downloads (CSV, PDF, Excel)
Typical call:
summary = analyze_har_file("data/scout/recording.har")
print(format_summary(summary))
"""
import json
import pathlib
import msgspec
STATIC_EXTENSIONS = frozenset(
{".js", ".css", ".png", ".jpg", ".jpeg", ".gif", ".svg", ".ico",
".woff", ".woff2", ".ttf", ".eot", ".map", ".webp", ".avif", ".apng"}
)
STATIC_CONTENT_TYPES = frozenset(
{"text/html", "text/javascript", "application/javascript",
"text/css", "image/", "font/", "audio/", "video/"}
)
DOWNLOAD_CONTENT_TYPES = (
"text/csv",
"application/pdf",
"application/vnd.ms-excel",
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
"application/octet-stream",
"text/plain",
)
POST_BODY_MAX_CHARS = 500
class HarEntry(msgspec.Struct):
"""A single interesting HTTP request/response from a HAR file."""
method: str
url: str
status: int
content_type: str
size_bytes: int
post_body: str = ""
class HarSummary(msgspec.Struct):
"""Analysis result: static assets filtered out, interesting entries categorized."""
api_calls: list[HarEntry]
downloads: list[HarEntry]
other_interesting: list[HarEntry]
total_entries: int
filtered_static: int
def _is_static(url: str, content_type: str) -> bool:
"""Return True if this entry looks like a static asset."""
path = url.split("?")[0].lower()
ext = pathlib.PurePosixPath(path).suffix
if ext in STATIC_EXTENSIONS:
return True
ct = content_type.lower().split(";")[0].strip()
return any(ct.startswith(s) for s in STATIC_CONTENT_TYPES)
def _extract_entry(raw: dict) -> HarEntry | None:
"""Parse a raw HAR entry dict into a typed HarEntry. Returns None for static assets."""
request = raw.get("request", {})
response = raw.get("response", {})
url = request.get("url", "")
method = request.get("method", "").upper()
status = response.get("status", 0)
content = response.get("content", {})
content_type = content.get("mimeType", "").lower().split(";")[0].strip()
size_bytes = max(content.get("size", 0), 0)
if _is_static(url, content_type):
return None
# Extract POST body from postData
post_body = ""
post_data = request.get("postData", {})
if post_data:
text = post_data.get("text", "")
params = post_data.get("params", [])
if text:
post_body = text[:POST_BODY_MAX_CHARS]
elif params:
post_body = "&".join(
f"{p['name']}={p.get('value', '')}" for p in params
)[:POST_BODY_MAX_CHARS]
return HarEntry(
method=method,
url=url,
status=status,
content_type=content_type,
size_bytes=size_bytes,
post_body=post_body,
)
def analyze_har_file(har_path: str) -> HarSummary:
"""Parse HAR JSON, filter static assets, categorize interesting entries."""
data = json.loads(pathlib.Path(har_path).read_bytes())
raw_entries = data.get("log", {}).get("entries", [])
assert raw_entries, f"No entries found in HAR file: {har_path}"
total = len(raw_entries)
filtered_static = 0
api_calls: list[HarEntry] = []
downloads: list[HarEntry] = []
other_interesting: list[HarEntry] = []
for raw in raw_entries:
entry = _extract_entry(raw)
if entry is None:
filtered_static += 1
continue
ct = entry.content_type
is_download = any(ct.startswith(t) for t in DOWNLOAD_CONTENT_TYPES)
is_api = ct == "application/json" or ct == "application/xml" or entry.method == "POST"
if is_download:
downloads.append(entry)
elif is_api:
api_calls.append(entry)
else:
other_interesting.append(entry)
return HarSummary(
api_calls=api_calls,
downloads=downloads,
other_interesting=other_interesting,
total_entries=total,
filtered_static=filtered_static,
)
def format_summary(summary: HarSummary) -> str:
"""Format HarSummary as human-readable text for MCP tool response."""
parts = [
f"HAR Analysis: {summary.total_entries} total entries, "
f"{summary.filtered_static} static assets filtered\n"
f"Found: {len(summary.api_calls)} API calls, "
f"{len(summary.downloads)} downloads, "
f"{len(summary.other_interesting)} other\n",
]
if summary.api_calls:
parts.append("API Calls:")
for e in summary.api_calls:
parts.append(
f" {e.method:<6} {e.url}"
f" [{e.status}, {e.content_type}, {e.size_bytes:,}B]"
)
if e.post_body:
parts.append(f" Body: {e.post_body}")
parts.append("")
if summary.downloads:
parts.append("Downloads:")
for e in summary.downloads:
parts.append(
f" {e.method:<6} {e.url}"
f" [{e.status}, {e.content_type}, {e.size_bytes:,}B]"
)
parts.append("")
if summary.other_interesting:
parts.append("Other (non-static, non-JSON, non-download):")
for e in summary.other_interesting[:10]: # cap output
parts.append(f" {e.method:<6} {e.url} [{e.status}, {e.content_type}]")
if len(summary.other_interesting) > 10:
parts.append(f" ... and {len(summary.other_interesting) - 10} more")
parts.append("")
if not summary.api_calls and not summary.downloads:
parts.append("No API calls or downloads found after filtering static assets.")
return "\n".join(parts)

View File

@@ -0,0 +1,396 @@
"""Pydoll browser session management for the scout MCP server.
Manages a single long-lived Chrome instance across multiple MCP tool calls.
The browser starts on the first scout_visit and stays alive until scout_close.
State is module-level (lives for the duration of the MCP server process).
HAR recording is managed via an asyncio.Task that holds the Pydoll context
manager open between scout_har_start and scout_har_stop calls.
Bot evasion:
- CDP-based (no chromedriver, navigator.webdriver stays false)
- Humanized mouse movement (Bezier curves) on all clicks
- Headed browser by default (no headless detection vectors)
"""
import asyncio
import logging
import pathlib
from datetime import datetime
import msgspec
from pydoll.browser.chromium import Chrome
logger = logging.getLogger("scout.browser")
# Module-level browser state — lives for the MCP server process lifetime.
# Using a plain dict so all fields are in one place and easy to reset.
_state: dict = {
"browser": None, # Chrome instance
"tab": None, # Active tab
"har_task": None, # asyncio.Task holding the recording context manager
"har_stop_event": None, # asyncio.Event signalled to stop recording
"har_result": None, # asyncio.Future resolving to HAR file path
}
OUTPUT_DIR = pathlib.Path("data/scout")
CLICK_TIMEOUT_SECONDS = 10
NAVIGATION_WAIT_SECONDS = 2
ELEMENT_CAP = 60 # max elements per category to avoid huge responses
class PageElement(msgspec.Struct):
"""An interactive element found on the current page."""
kind: str # "link", "button", "form", "select", "input"
text: str # visible text or label (truncated)
selector: str # usable CSS selector or description
href: str = "" # for links
action: str = "" # for forms (action URL)
method: str = "" # for forms (GET/POST)
options: list[str] = [] # for selects (option texts)
class PageInfo(msgspec.Struct):
"""Result of a page visit or navigation action."""
title: str
url: str
element_count: int
async def _ensure_browser() -> None:
"""Launch Chrome if not already running. Idempotent."""
if _state["tab"] is not None:
return
browser = Chrome()
tab = await browser.start()
_state["browser"] = browser
_state["tab"] = tab
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
logger.info("Chrome launched")
async def visit(url: str) -> PageInfo:
"""Navigate to url. Opens browser on first call."""
await _ensure_browser()
tab = _state["tab"]
await tab.go_to(url)
await asyncio.sleep(1) # let dynamic content settle
title = await tab.title
links = await tab.query("a", find_all=True)
element_count = len(links) if links else 0
return PageInfo(title=title, url=url, element_count=element_count)
async def get_elements(filter_type: str = "") -> list[PageElement]:
"""Enumerate interactive elements on the current page.
filter_type: "", "links", "buttons", "forms", "selects", "inputs"
Returns typed PageElement structs (not screenshots).
"""
assert _state["tab"] is not None, "No browser open — call scout_visit first"
tab = _state["tab"]
elements: list[PageElement] = []
# Links
if not filter_type or filter_type == "links":
nodes = await tab.query("a[href]", find_all=True) or []
for node in nodes[:ELEMENT_CAP]:
try:
text = (await node.text or "").strip()[:100]
href = (await node.get_attribute("href") or "").strip()
if text or href:
elements.append(PageElement(
kind="link",
text=text,
selector=f'a[href="{href}"]' if href else "a",
href=href,
))
except Exception:
continue
# Buttons
if not filter_type or filter_type == "buttons":
nodes = await tab.query(
"button, input[type=submit], input[type=button]", find_all=True
) or []
for node in nodes[:20]:
try:
text = (await node.text or "").strip()
if not text:
text = await node.get_attribute("value") or ""
text = text[:100]
cls = (await node.get_attribute("class") or "").strip()
sel = f"button.{cls.split()[0]}" if cls else "button"
elements.append(PageElement(kind="button", text=text, selector=sel))
except Exception:
continue
# Selects
if not filter_type or filter_type == "selects":
nodes = await tab.query("select", find_all=True) or []
for node in nodes[:10]:
try:
name = (
await node.get_attribute("name")
or await node.get_attribute("id")
or ""
).strip()
option_nodes = await node.query("option", find_all=True) or []
opts = []
for opt in option_nodes[:15]:
opt_text = (await opt.text or "").strip()
if opt_text:
opts.append(opt_text)
sel = f"select[name='{name}']" if name else "select"
elements.append(PageElement(
kind="select", text=name, selector=sel, options=opts
))
except Exception:
continue
# Forms
if not filter_type or filter_type == "forms":
nodes = await tab.query("form", find_all=True) or []
for node in nodes[:10]:
try:
action = (await node.get_attribute("action") or "").strip()
method = (await node.get_attribute("method") or "GET").upper()
elements.append(PageElement(
kind="form",
text=f"{method} {action}",
selector="form",
action=action,
method=method,
))
except Exception:
continue
# Inputs
if filter_type == "inputs":
nodes = await tab.query(
"input:not([type=hidden]):not([type=submit]):not([type=button])",
find_all=True,
) or []
for node in nodes[:20]:
try:
name = (await node.get_attribute("name") or "").strip()
input_type = (await node.get_attribute("type") or "text").strip()
placeholder = (await node.get_attribute("placeholder") or "").strip()
label = name or placeholder or input_type
sel = f"input[name='{name}']" if name else f"input[type='{input_type}']"
elements.append(PageElement(kind="input", text=label, selector=sel))
except Exception:
continue
return elements
def format_elements(elements: list[PageElement]) -> str:
"""Format a list of PageElement structs as human-readable text."""
if not elements:
return "No interactive elements found."
# Group by kind
groups: dict[str, list[PageElement]] = {}
for e in elements:
groups.setdefault(e.kind, []).append(e)
lines: list[str] = [f"Elements ({len(elements)} total):"]
kind_labels = {
"link": "Links", "button": "Buttons",
"form": "Forms", "select": "Selects", "input": "Inputs",
}
for kind in ["link", "button", "select", "form", "input"]:
group = groups.get(kind, [])
if not group:
continue
lines.append(f"\n{kind_labels.get(kind, kind.capitalize())} ({len(group)}):")
for i, e in enumerate(group):
if kind == "link":
lines.append(f" [{i}] {e.text!r:<40}{e.href}")
elif kind == "select":
opts = ", ".join(e.options[:5])
if len(e.options) > 5:
opts += f", ... (+{len(e.options) - 5} more)"
lines.append(f" [{i}] {e.text!r} selector: {e.selector}")
lines.append(f" options: {opts}")
elif kind == "form":
lines.append(f" [{i}] {e.text} selector: {e.selector}")
else:
lines.append(f" [{i}] {e.text!r:<40} selector: {e.selector}")
return "\n".join(lines)
async def click(selector: str) -> PageInfo:
"""Click an element. Use 'text=Foo' to click by visible text, else CSS selector."""
assert _state["tab"] is not None, "No browser open — call scout_visit first"
tab = _state["tab"]
if selector.startswith("text="):
element = await tab.find(text=selector[5:], timeout=CLICK_TIMEOUT_SECONDS)
else:
element = await tab.query(selector, timeout=CLICK_TIMEOUT_SECONDS)
assert element is not None, f"Element not found: {selector!r}"
await element.click()
await asyncio.sleep(NAVIGATION_WAIT_SECONDS)
title = await tab.title
url = await tab.current_url if hasattr(tab, "current_url") else ""
links = await tab.query("a", find_all=True) or []
return PageInfo(title=title, url=url or "", element_count=len(links))
async def fill(selector: str, value: str) -> str:
"""Type a value into a form field."""
assert _state["tab"] is not None, "No browser open — call scout_visit first"
tab = _state["tab"]
if selector.startswith("text="):
element = await tab.find(text=selector[5:], timeout=CLICK_TIMEOUT_SECONDS)
else:
element = await tab.query(selector, timeout=CLICK_TIMEOUT_SECONDS)
assert element is not None, f"Element not found: {selector!r}"
# insert_text is instant (no keystroke simulation)
await element.insert_text(value)
return f"Filled {selector!r} with {value!r}"
async def select_option(selector: str, value: str) -> str:
"""Select an option in a <select> element."""
assert _state["tab"] is not None, "No browser open — call scout_visit first"
tab = _state["tab"]
element = await tab.query(selector, timeout=CLICK_TIMEOUT_SECONDS)
assert element is not None, f"Select element not found: {selector!r}"
await element.select_option(value)
return f"Selected {value!r} in {selector!r}"
async def scroll(direction: str, amount_px: int = 400) -> str:
"""Scroll the page up or down."""
assert _state["tab"] is not None, "No browser open — call scout_visit first"
tab = _state["tab"]
# Execute scroll via JS — simple and reliable
direction_sign = 1 if direction == "down" else -1
await tab.execute_script(f"window.scrollBy(0, {direction_sign * amount_px})")
return f"Scrolled {direction} {amount_px}px"
async def get_text(selector: str = "") -> str:
"""Get visible text from the page or a specific element."""
assert _state["tab"] is not None, "No browser open — call scout_visit first"
tab = _state["tab"]
if selector:
element = await tab.query(selector, timeout=CLICK_TIMEOUT_SECONDS)
assert element is not None, f"Element not found: {selector!r}"
text = await element.text or ""
else:
# Get body text content
body = await tab.query("body", timeout=5)
text = await body.text if body else ""
# Truncate very long text to avoid overwhelming the response
return text[:3000] if text else "(no text content)"
async def screenshot(label: str = "") -> str:
"""Take a screenshot and save to data/scout/. Returns the file path."""
assert _state["tab"] is not None, "No browser open — call scout_visit first"
tab = _state["tab"]
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
name = f"{label}_{timestamp}" if label else timestamp
path = OUTPUT_DIR / f"screenshot_{name}.png"
await tab.take_screenshot(str(path), beyond_viewport=False)
assert path.exists(), f"Screenshot was not written: {path}"
return str(path)
# --- HAR recording (asyncio Task holds context manager open) ---
async def _har_recording_task(tab, har_path: pathlib.Path, stop_event: asyncio.Event, result_future: asyncio.Future) -> None:
"""Background task: enters recording context, waits for stop, saves HAR."""
try:
async with tab.request.record() as capture:
await stop_event.wait()
# Save while still inside context manager (capture is valid here)
capture.save(str(har_path))
result_future.set_result(str(har_path))
except Exception as e:
result_future.set_exception(e)
async def har_start() -> str:
"""Start recording all network traffic. Use scout_har_stop to save."""
assert _state["tab"] is not None, "No browser open — call scout_visit first"
assert _state["har_task"] is None, "HAR recording already in progress"
tab = _state["tab"]
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
har_path = OUTPUT_DIR / f"har_{timestamp}.har"
stop_event = asyncio.Event()
result_future: asyncio.Future = asyncio.get_event_loop().create_future()
_state["har_stop_event"] = stop_event
_state["har_result"] = result_future
_state["har_task"] = asyncio.create_task(
_har_recording_task(tab, har_path, stop_event, result_future)
)
# Give the task time to enter the context manager before returning
await asyncio.sleep(0.2)
return f"Recording started — will save to {har_path}"
async def har_stop() -> str:
"""Stop recording and save HAR file. Returns the file path."""
assert _state["har_stop_event"] is not None, "No HAR recording in progress"
_state["har_stop_event"].set()
har_path = await asyncio.wait_for(_state["har_result"], timeout=15.0)
_state["har_task"] = None
_state["har_stop_event"] = None
_state["har_result"] = None
assert pathlib.Path(har_path).exists(), f"HAR file not written: {har_path}"
size_kb = pathlib.Path(har_path).stat().st_size // 1024
return f"HAR saved: {har_path} ({size_kb}KB)"
async def close() -> str:
"""Close the browser and clean up all state."""
# Stop any active HAR recording first
if _state["har_stop_event"] is not None:
try:
await har_stop()
except Exception:
pass
if _state["browser"] is not None:
try:
await _state["browser"].stop()
except Exception:
pass
_state["browser"] = None
_state["tab"] = None
_state["har_task"] = None
_state["har_stop_event"] = None
_state["har_result"] = None
return "Browser closed"

View File

@@ -0,0 +1,170 @@
"""Scout MCP server — browser recon tools for Claude Code.
Exposes browser automation as MCP tools. The server runs as a Claude Code
child process (stdio transport) — starts when Claude Code starts, dies when
Claude Code exits. No daemon, no port, no systemd.
The browser session is stateful across tool calls: scout_visit opens Chrome,
subsequent tools operate on the same tab, scout_close shuts down.
Text-first: tools return structured text (element lists, page titles, HAR
summaries). Screenshots are an explicit opt-in via scout_screenshot.
Usage (via .mcp.json):
uv run --package scout scout-server
"""
import logging
import sys
from mcp.server.fastmcp import FastMCP
from scout import analyze, browser
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s %(name)s %(levelname)s %(message)s",
handlers=[logging.StreamHandler(sys.stderr)],
)
mcp = FastMCP("scout")
@mcp.tool()
async def scout_visit(url: str) -> str:
"""Visit a URL in the browser. Opens Chrome on the first call.
Returns: page title, URL, and element count.
"""
info = await browser.visit(url)
return f"Title: {info.title}\nURL: {info.url}\nElements detected: {info.element_count}"
@mcp.tool()
async def scout_elements(filter: str = "") -> str:
"""List interactive elements on the current page.
Args:
filter: Optional category — "links", "buttons", "forms", "selects",
"inputs", or "" for all.
Returns: structured text list with selectors for use in scout_click.
"""
elements = await browser.get_elements(filter)
return browser.format_elements(elements)
@mcp.tool()
async def scout_click(selector: str) -> str:
"""Click an element on the current page.
Args:
selector: "text=Foo" to click by visible text, or a CSS selector
like "a[href*=COFFEE]" or "button.download-btn".
Returns: new page title and URL if navigation occurred.
"""
info = await browser.click(selector)
return f"Clicked {selector!r}\nTitle: {info.title}\nURL: {info.url}\nElements: {info.element_count}"
@mcp.tool()
async def scout_fill(selector: str, value: str) -> str:
"""Type a value into a form field.
Args:
selector: CSS selector or "text=Label" for the input field.
value: The text to type.
"""
return await browser.fill(selector, value)
@mcp.tool()
async def scout_select(selector: str, value: str) -> str:
"""Select an option in a <select> dropdown.
Args:
selector: CSS selector for the <select> element.
value: The option value or text to select.
"""
return await browser.select_option(selector, value)
@mcp.tool()
async def scout_scroll(direction: str = "down", amount_px: int = 400) -> str:
"""Scroll the page up or down.
Args:
direction: "down" or "up".
amount_px: Number of pixels to scroll (default 400).
"""
assert direction in ("down", "up"), f"direction must be 'down' or 'up', got {direction!r}"
return await browser.scroll(direction, amount_px)
@mcp.tool()
async def scout_text(selector: str = "") -> str:
"""Get visible text from the page or a specific element.
Args:
selector: CSS selector for a specific element, or "" for full page body text.
Text is truncated to 3000 chars.
"""
return await browser.get_text(selector)
@mcp.tool()
async def scout_screenshot(label: str = "") -> str:
"""Take a screenshot and save to data/scout/. Use Read tool to view it.
Args:
label: Optional label included in the filename.
Returns: file path to the saved PNG.
"""
path = await browser.screenshot(label)
return f"Screenshot saved: {path}\nUse the Read tool to view it."
@mcp.tool()
async def scout_har_start() -> str:
"""Start recording all network traffic (HAR format).
Call scout_har_stop when done navigating. HAR spans all tool calls
between start and stop — visit, click, fill, etc.
"""
return await browser.har_start()
@mcp.tool()
async def scout_har_stop() -> str:
"""Stop network recording and save the HAR file.
Returns the HAR file path. Pass to scout_analyze to extract API endpoints.
"""
return await browser.har_stop()
@mcp.tool()
async def scout_analyze(har_path: str) -> str:
"""Analyze a HAR file for API endpoints, POST requests, and data downloads.
Filters out static assets (JS, CSS, images, fonts) and summarizes:
- API calls (JSON responses, POST requests)
- Downloads (CSV, PDF, Excel)
Args:
har_path: Path to the HAR file returned by scout_har_stop.
"""
summary = analyze.analyze_har_file(har_path)
return analyze.format_summary(summary)
@mcp.tool()
async def scout_close() -> str:
"""Close the browser and clean up. Stops any active HAR recording first."""
return await browser.close()
def main() -> None:
mcp.run()

1107
tools/scout/uv.lock generated Normal file

File diff suppressed because it is too large Load Diff

34
uv.lock generated
View File

@@ -1540,6 +1540,7 @@ version = "0.1.0"
source = { editable = "." }
dependencies = [
{ name = "hcloud" },
{ name = "msgspec" },
{ name = "niquests" },
{ name = "prefect" },
{ name = "python-dotenv" },
@@ -1565,6 +1566,7 @@ exploration = [
[package.metadata]
requires-dist = [
{ name = "hcloud", specifier = ">=2.8.0" },
{ name = "msgspec", specifier = ">=0.19" },
{ name = "niquests", specifier = ">=3.15.2" },
{ name = "prefect", specifier = ">=3.6.15" },
{ name = "python-dotenv", specifier = ">=1.1.0" },
@@ -1606,6 +1608,38 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
]
[[package]]
name = "msgspec"
version = "0.20.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ea/9c/bfbd12955a49180cbd234c5d29ec6f74fe641698f0cd9df154a854fc8a15/msgspec-0.20.0.tar.gz", hash = "sha256:692349e588fde322875f8d3025ac01689fead5901e7fb18d6870a44519d62a29", size = 317862, upload-time = "2025-11-24T03:56:28.934Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8a/d1/b902d38b6e5ba3bdddbec469bba388d647f960aeed7b5b3623a8debe8a76/msgspec-0.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9c1ff8db03be7598b50dd4b4a478d6fe93faae3bd54f4f17aa004d0e46c14c46", size = 196463, upload-time = "2025-11-24T03:55:43.405Z" },
{ url = "https://files.pythonhosted.org/packages/57/b6/eff0305961a1d9447ec2b02f8c73c8946f22564d302a504185b730c9a761/msgspec-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f6532369ece217fd37c5ebcfd7e981f2615628c21121b7b2df9d3adcf2fd69b8", size = 188650, upload-time = "2025-11-24T03:55:44.761Z" },
{ url = "https://files.pythonhosted.org/packages/99/93/f2ec1ae1de51d3fdee998a1ede6b2c089453a2ee82b5c1b361ed9095064a/msgspec-0.20.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9a1697da2f85a751ac3cc6a97fceb8e937fc670947183fb2268edaf4016d1ee", size = 218834, upload-time = "2025-11-24T03:55:46.441Z" },
{ url = "https://files.pythonhosted.org/packages/28/83/36557b04cfdc317ed8a525c4993b23e43a8fbcddaddd78619112ca07138c/msgspec-0.20.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7fac7e9c92eddcd24c19d9e5f6249760941485dff97802461ae7c995a2450111", size = 224917, upload-time = "2025-11-24T03:55:48.06Z" },
{ url = "https://files.pythonhosted.org/packages/8f/56/362037a1ed5be0b88aced59272442c4b40065c659700f4b195a7f4d0ac88/msgspec-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f953a66f2a3eb8d5ea64768445e2bb301d97609db052628c3e1bcb7d87192a9f", size = 222821, upload-time = "2025-11-24T03:55:49.388Z" },
{ url = "https://files.pythonhosted.org/packages/92/75/fa2370ec341cedf663731ab7042e177b3742645c5dd4f64dc96bd9f18a6b/msgspec-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:247af0313ae64a066d3aea7ba98840f6681ccbf5c90ba9c7d17f3e39dbba679c", size = 227227, upload-time = "2025-11-24T03:55:51.125Z" },
{ url = "https://files.pythonhosted.org/packages/f1/25/5e8080fe0117f799b1b68008dc29a65862077296b92550632de015128579/msgspec-0.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:67d5e4dfad52832017018d30a462604c80561aa62a9d548fc2bd4e430b66a352", size = 189966, upload-time = "2025-11-24T03:55:52.458Z" },
{ url = "https://files.pythonhosted.org/packages/79/b6/63363422153937d40e1cb349c5081338401f8529a5a4e216865decd981bf/msgspec-0.20.0-cp313-cp313-win_arm64.whl", hash = "sha256:91a52578226708b63a9a13de287b1ec3ed1123e4a088b198143860c087770458", size = 175378, upload-time = "2025-11-24T03:55:53.721Z" },
{ url = "https://files.pythonhosted.org/packages/bb/18/62dc13ab0260c7d741dda8dc7f481495b93ac9168cd887dda5929880eef8/msgspec-0.20.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:eead16538db1b3f7ec6e3ed1f6f7c5dec67e90f76e76b610e1ffb5671815633a", size = 196407, upload-time = "2025-11-24T03:55:55.001Z" },
{ url = "https://files.pythonhosted.org/packages/dd/1d/b9949e4ad6953e9f9a142c7997b2f7390c81e03e93570c7c33caf65d27e1/msgspec-0.20.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:703c3bb47bf47801627fb1438f106adbfa2998fe586696d1324586a375fca238", size = 188889, upload-time = "2025-11-24T03:55:56.311Z" },
{ url = "https://files.pythonhosted.org/packages/1e/19/f8bb2dc0f1bfe46cc7d2b6b61c5e9b5a46c62298e8f4d03bbe499c926180/msgspec-0.20.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6cdb227dc585fb109305cee0fd304c2896f02af93ecf50a9c84ee54ee67dbb42", size = 219691, upload-time = "2025-11-24T03:55:57.908Z" },
{ url = "https://files.pythonhosted.org/packages/b8/8e/6b17e43f6eb9369d9858ee32c97959fcd515628a1df376af96c11606cf70/msgspec-0.20.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:27d35044dd8818ac1bd0fedb2feb4fbdff4e3508dd7c5d14316a12a2d96a0de0", size = 224918, upload-time = "2025-11-24T03:55:59.322Z" },
{ url = "https://files.pythonhosted.org/packages/1c/db/0e833a177db1a4484797adba7f429d4242585980b90882cc38709e1b62df/msgspec-0.20.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b4296393a29ee42dd25947981c65506fd4ad39beaf816f614146fa0c5a6c91ae", size = 223436, upload-time = "2025-11-24T03:56:00.716Z" },
{ url = "https://files.pythonhosted.org/packages/c3/30/d2ee787f4c918fd2b123441d49a7707ae9015e0e8e1ab51aa7967a97b90e/msgspec-0.20.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:205fbdadd0d8d861d71c8f3399fe1a82a2caf4467bc8ff9a626df34c12176980", size = 227190, upload-time = "2025-11-24T03:56:02.371Z" },
{ url = "https://files.pythonhosted.org/packages/ff/37/9c4b58ff11d890d788e700b827db2366f4d11b3313bf136780da7017278b/msgspec-0.20.0-cp314-cp314-win_amd64.whl", hash = "sha256:7dfebc94fe7d3feec6bc6c9df4f7e9eccc1160bb5b811fbf3e3a56899e398a6b", size = 193950, upload-time = "2025-11-24T03:56:03.668Z" },
{ url = "https://files.pythonhosted.org/packages/e9/4e/cab707bf2fa57408e2934e5197fc3560079db34a1e3cd2675ff2e47e07de/msgspec-0.20.0-cp314-cp314-win_arm64.whl", hash = "sha256:2ad6ae36e4a602b24b4bf4eaf8ab5a441fec03e1f1b5931beca8ebda68f53fc0", size = 179018, upload-time = "2025-11-24T03:56:05.038Z" },
{ url = "https://files.pythonhosted.org/packages/4c/06/3da3fc9aaa55618a8f43eb9052453cfe01f82930bca3af8cea63a89f3a11/msgspec-0.20.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:f84703e0e6ef025663dd1de828ca028774797b8155e070e795c548f76dde65d5", size = 200389, upload-time = "2025-11-24T03:56:06.375Z" },
{ url = "https://files.pythonhosted.org/packages/83/3b/cc4270a5ceab40dfe1d1745856951b0a24fd16ac8539a66ed3004a60c91e/msgspec-0.20.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7c83fc24dd09cf1275934ff300e3951b3adc5573f0657a643515cc16c7dee131", size = 193198, upload-time = "2025-11-24T03:56:07.742Z" },
{ url = "https://files.pythonhosted.org/packages/cd/ae/4c7905ac53830c8e3c06fdd60e3cdcfedc0bbc993872d1549b84ea21a1bd/msgspec-0.20.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f13ccb1c335a124e80c4562573b9b90f01ea9521a1a87f7576c2e281d547f56", size = 225973, upload-time = "2025-11-24T03:56:09.18Z" },
{ url = "https://files.pythonhosted.org/packages/d9/da/032abac1de4d0678d99eaeadb1323bd9d247f4711c012404ba77ed6f15ca/msgspec-0.20.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:17c2b5ca19f19306fc83c96d85e606d2cc107e0caeea85066b5389f664e04846", size = 229509, upload-time = "2025-11-24T03:56:10.898Z" },
{ url = "https://files.pythonhosted.org/packages/69/52/fdc7bdb7057a166f309e0b44929e584319e625aaba4771b60912a9321ccd/msgspec-0.20.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d931709355edabf66c2dd1a756b2d658593e79882bc81aae5964969d5a291b63", size = 230434, upload-time = "2025-11-24T03:56:12.48Z" },
{ url = "https://files.pythonhosted.org/packages/cb/fe/1dfd5f512b26b53043884e4f34710c73e294e7cc54278c3fe28380e42c37/msgspec-0.20.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:565f915d2e540e8a0c93a01ff67f50aebe1f7e22798c6a25873f9fda8d1325f8", size = 231758, upload-time = "2025-11-24T03:56:13.765Z" },
{ url = "https://files.pythonhosted.org/packages/97/f6/9ba7121b8e0c4e0beee49575d1dbc804e2e72467692f0428cf39ceba1ea5/msgspec-0.20.0-cp314-cp314t-win_amd64.whl", hash = "sha256:726f3e6c3c323f283f6021ebb6c8ccf58d7cd7baa67b93d73bfbe9a15c34ab8d", size = 206540, upload-time = "2025-11-24T03:56:15.029Z" },
{ url = "https://files.pythonhosted.org/packages/c8/3e/c5187de84bb2c2ca334ab163fcacf19a23ebb1d876c837f81a1b324a15bf/msgspec-0.20.0-cp314-cp314t-win_arm64.whl", hash = "sha256:93f23528edc51d9f686808a361728e903d6f2be55c901d6f5c92e44c6d546bfc", size = 183011, upload-time = "2025-11-24T03:56:16.442Z" },
]
[[package]]
name = "multitasking"
version = "0.0.12"