diff --git a/.claude/settings.json b/.claude/settings.json index c72c6b73..aa06f43d 100644 --- a/.claude/settings.json +++ b/.claude/settings.json @@ -1,3 +1,7 @@ { - "enabledPlugins": {} + "enabledPlugins": { + "frontend-design@claude-plugins-official": true, + "context7@claude-plugins-official": true, + "playwright@claude-plugins-official": true + } } diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 00000000..7234c068 --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,18 @@ +{ + "permissions": { + "allow": [ + "Bash(codex exec:*)", + "WebSearch", + "WebFetch(domain:polygon.io)", + "WebFetch(domain:massive.com)", + "Bash(git checkout:*)", + "Bash(git add:*)", + "Bash(git commit -m ':*)", + "Bash(git push:*)", + "Bash(gh pr:*)", + "Bash(uv run:*)", + "Bash(git pull:*)", + "Bash(gh api:*)" + ] + } +} diff --git a/CLAUDE.md b/CLAUDE.md index db347335..006cedf1 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -2,6 +2,6 @@ All project documentation is in the `planning` directory. -The key document is PLAN.md included in full here: +The key document is PLAN.md included in full below; the market data component has been completed and is summarized in the file `planning/MARKET_DATA_DESIGN.md` with more details in the `planning/archive` folder. Consult these docs only when required. The remainder of the platform is still to be developed. -@planning/PLAN.md +@planning/PLAN.md \ No newline at end of file diff --git a/backend/CLAUDE.md b/backend/CLAUDE.md new file mode 100644 index 00000000..64af5639 --- /dev/null +++ b/backend/CLAUDE.md @@ -0,0 +1,59 @@ +# Backend — Developer Guide + +## Project Setup + +```bash +cd backend +uv sync --extra dev # Install all dependencies including test/lint tools +``` + +## Market Data API + +The market data subsystem lives in `app/market/`. Use these imports: + +```python +from app.market import PriceCache, PriceUpdate, MarketDataSource, create_market_data_source +``` + +### Core Types + +- **`PriceUpdate`** — Immutable dataclass: `ticker`, `price`, `prev_price`, `open_price`, `timestamp`, plus properties `change`, `change_percent`, `direction` ("up"/"down"/"flat"), and `to_dict()` for JSON serialization. `open_price` is the session-start baseline (never changes). `to_dict()` returns an ISO 8601 timestamp string. + +- **`PriceCache`** — Thread-safe in-memory store. Key methods: + - `update(ticker, price, timestamp=None, open_price=None) -> PriceUpdate` + - `get(ticker) -> PriceUpdate | None` + - `get_price(ticker) -> float | None` + - `get_all() -> dict[str, PriceUpdate]` + - `remove(ticker)` + - `version` property — monotonic counter, increments on every update (for SSE change detection) + +- **`MarketDataSource`** — Abstract interface implemented by `SimulatorDataSource` and `MassiveDataSource`. Lifecycle: `start(tickers)` -> `add_ticker()` / `remove_ticker()` -> `stop()`. + +- **`create_market_data_source(cache)`** — Factory. Returns `MassiveDataSource` if `MASSIVE_API_KEY` is set, otherwise `SimulatorDataSource`. + +### SSE Streaming + +```python +from app.market import create_stream_router + +router = create_stream_router(price_cache) # Returns FastAPI APIRouter +# Endpoint: GET /api/stream/prices (text/event-stream) +``` + +### Seed Data + +Default tickers: AAPL, GOOGL, MSFT, AMZN, TSLA, NVDA, META, JPM, V, NFLX. Seed prices and per-ticker volatility/drift params are in `app/market/seed_prices.py`. + +## Running Tests + +```bash +uv run --extra dev pytest -v # All tests +uv run --extra dev pytest --cov=app # With coverage +uv run --extra dev ruff check app/ tests/ # Lint +``` + +## Demo + +```bash +uv run market_data_demo.py # Live terminal dashboard with simulated prices +``` diff --git a/backend/app/market/cache.py b/backend/app/market/cache.py index 4d021577..6b64680e 100644 --- a/backend/app/market/cache.py +++ b/backend/app/market/cache.py @@ -20,21 +20,35 @@ def __init__(self) -> None: self._lock = Lock() self._version: int = 0 # Monotonically increasing; bumped on every update - def update(self, ticker: str, price: float, timestamp: float | None = None) -> PriceUpdate: + def update( + self, + ticker: str, + price: float, + timestamp: float | None = None, + open_price: float | None = None, + ) -> PriceUpdate: """Record a new price for a ticker. Returns the created PriceUpdate. - Automatically computes direction and change from the previous price. - If this is the first update for the ticker, previous_price == price (direction='flat'). + open_price is only used on the first update for a ticker; ignored on + subsequent calls so the session-start baseline is never overwritten. + If not provided on the first update, price is used as the baseline. """ with self._lock: ts = timestamp or time.time() prev = self._prices.get(ticker) - previous_price = prev.price if prev else price + + if prev: + prev_price = prev.price + effective_open = prev.open_price # Never overwrite + else: + prev_price = price + effective_open = open_price if open_price is not None else price update = PriceUpdate( ticker=ticker, price=round(price, 2), - previous_price=round(previous_price, 2), + prev_price=round(prev_price, 2), + open_price=round(effective_open, 2), timestamp=ts, ) self._prices[ticker] = update diff --git a/backend/app/market/factory.py b/backend/app/market/factory.py index 00360e94..d1a1ac54 100644 --- a/backend/app/market/factory.py +++ b/backend/app/market/factory.py @@ -7,8 +7,6 @@ from .cache import PriceCache from .interface import MarketDataSource -from .massive_client import MassiveDataSource -from .simulator import SimulatorDataSource logger = logging.getLogger(__name__) @@ -19,13 +17,20 @@ def create_market_data_source(price_cache: PriceCache) -> MarketDataSource: - MASSIVE_API_KEY set and non-empty → MassiveDataSource (real market data) - Otherwise → SimulatorDataSource (GBM simulation) + Imports are lazy so the `massive` package is only required when + MASSIVE_API_KEY is actually set. + Returns an unstarted source. Caller must await source.start(tickers). """ api_key = os.environ.get("MASSIVE_API_KEY", "").strip() if api_key: + from .massive_client import MassiveDataSource # noqa: PLC0415 + logger.info("Market data source: Massive API (real data)") return MassiveDataSource(api_key=api_key, price_cache=price_cache) else: + from .simulator import SimulatorDataSource # noqa: PLC0415 + logger.info("Market data source: GBM Simulator") return SimulatorDataSource(price_cache=price_cache) diff --git a/backend/app/market/massive_client.py b/backend/app/market/massive_client.py index 00bc7b2a..ea44435f 100644 --- a/backend/app/market/massive_client.py +++ b/backend/app/market/massive_client.py @@ -4,9 +4,7 @@ import asyncio import logging - -from massive import RESTClient -from massive.rest.models import SnapshotMarketType +from typing import Any from .cache import PriceCache from .interface import MarketDataSource @@ -23,6 +21,9 @@ class MassiveDataSource(MarketDataSource): Rate limits: - Free tier: 5 req/min → poll every 15s (default) - Paid tiers: higher limits → poll every 2-5s + + The `massive` package is imported lazily inside start() so it is only + required when MASSIVE_API_KEY is actually set. """ def __init__( @@ -36,9 +37,13 @@ def __init__( self._interval = poll_interval self._tickers: list[str] = [] self._task: asyncio.Task | None = None - self._client: RESTClient | None = None + self._client: Any = None async def start(self, tickers: list[str]) -> None: + # Lazy import: only required when MASSIVE_API_KEY is set. + # Students without a Massive API key never need this package installed. + from massive import RESTClient # noqa: PLC0415 + self._client = RESTClient(api_key=self._api_key) self._tickers = list(tickers) @@ -101,10 +106,17 @@ async def _poll_once(self) -> None: price = snap.last_trade.price # Massive timestamps are Unix milliseconds → convert to seconds timestamp = snap.last_trade.timestamp / 1000.0 + # Use day.open as open_price; fall back to prev_day.close pre-market + open_price = None + if snap.day and snap.day.open: + open_price = snap.day.open + elif snap.prev_day and snap.prev_day.close: + open_price = snap.prev_day.close self._cache.update( ticker=snap.ticker, price=price, timestamp=timestamp, + open_price=open_price, ) processed += 1 except (AttributeError, TypeError) as e: @@ -122,6 +134,8 @@ async def _poll_once(self) -> None: def _fetch_snapshots(self) -> list: """Synchronous call to the Massive REST API. Runs in a thread.""" + from massive.rest.models import SnapshotMarketType # noqa: PLC0415 + return self._client.get_snapshot_all( market_type=SnapshotMarketType.STOCKS, tickers=self._tickers, diff --git a/backend/app/market/models.py b/backend/app/market/models.py index de81b1db..35182d14 100644 --- a/backend/app/market/models.py +++ b/backend/app/market/models.py @@ -4,6 +4,7 @@ import time from dataclasses import dataclass, field +from datetime import datetime, timezone @dataclass(frozen=True, slots=True) @@ -12,37 +13,45 @@ class PriceUpdate: ticker: str price: float - previous_price: float + prev_price: float # Price from the previous update + open_price: float # Session-start seed price — set once, never overwritten timestamp: float = field(default_factory=time.time) # Unix seconds @property def change(self) -> float: """Absolute price change from previous update.""" - return round(self.price - self.previous_price, 4) + return round(self.price - self.prev_price, 4) @property def change_percent(self) -> float: """Percentage change from previous update.""" - if self.previous_price == 0: + if self.prev_price == 0: return 0.0 - return round((self.price - self.previous_price) / self.previous_price * 100, 4) + return round((self.price - self.prev_price) / self.prev_price * 100, 4) @property def direction(self) -> str: """'up', 'down', or 'flat'.""" - if self.price > self.previous_price: + if self.price > self.prev_price: return "up" - elif self.price < self.previous_price: + elif self.price < self.prev_price: return "down" return "flat" def to_dict(self) -> dict: - """Serialize for JSON / SSE transmission.""" + """Serialize for JSON / SSE transmission. + + timestamp is formatted as ISO 8601 UTC string per PLAN.md §6. + """ + ts_iso = datetime.fromtimestamp(self.timestamp, tz=timezone.utc).isoformat().replace( + "+00:00", "Z" + ) return { "ticker": self.ticker, "price": self.price, - "previous_price": self.previous_price, - "timestamp": self.timestamp, + "prev_price": self.prev_price, + "open_price": self.open_price, + "timestamp": ts_iso, "change": self.change, "change_percent": self.change_percent, "direction": self.direction, diff --git a/backend/app/market/seed_prices.py b/backend/app/market/seed_prices.py index 69586df0..34b0effa 100644 --- a/backend/app/market/seed_prices.py +++ b/backend/app/market/seed_prices.py @@ -33,6 +33,9 @@ # Default parameters for tickers not in the list above (dynamically added) DEFAULT_PARAMS: dict[str, float] = {"sigma": 0.25, "mu": 0.05} +# Default seed price for unknown tickers (per PLAN.md §6) +DEFAULT_SEED_PRICE: float = 100.00 + # Correlation groups for the simulator's Cholesky decomposition # Tickers in the same group have higher intra-group correlation CORRELATION_GROUPS: dict[str, set[str]] = { @@ -41,7 +44,7 @@ } # Correlation coefficients -INTRA_TECH_CORR = 0.6 # Tech stocks move together +INTRA_TECH_CORR = 0.6 # Tech stocks move together INTRA_FINANCE_CORR = 0.5 # Finance stocks move together CROSS_GROUP_CORR = 0.3 # Between sectors / unknown tickers -TSLA_CORR = 0.3 # TSLA does its own thing +TSLA_CORR = 0.25 # TSLA does its own thing (issue #9) diff --git a/backend/app/market/simulator.py b/backend/app/market/simulator.py index b6803f59..6c46e5fa 100644 --- a/backend/app/market/simulator.py +++ b/backend/app/market/simulator.py @@ -15,6 +15,7 @@ CORRELATION_GROUPS, CROSS_GROUP_CORR, DEFAULT_PARAMS, + DEFAULT_SEED_PRICE, INTRA_FINANCE_CORR, INTRA_TECH_CORR, SEED_PRICES, @@ -148,7 +149,7 @@ def _add_ticker_internal(self, ticker: str) -> None: if ticker in self._prices: return self._tickers.append(ticker) - self._prices[ticker] = SEED_PRICES.get(ticker, random.uniform(50.0, 300.0)) + self._prices[ticker] = SEED_PRICES.get(ticker, DEFAULT_SEED_PRICE) self._params[ticker] = TICKER_PARAMS.get(ticker, dict(DEFAULT_PARAMS)) def _rebuild_cholesky(self) -> None: diff --git a/backend/app/market/stream.py b/backend/app/market/stream.py index 7fd974b7..136d76f8 100644 --- a/backend/app/market/stream.py +++ b/backend/app/market/stream.py @@ -14,26 +14,28 @@ logger = logging.getLogger(__name__) -router = APIRouter(prefix="/api/stream", tags=["streaming"]) - def create_stream_router(price_cache: PriceCache) -> APIRouter: """Create the SSE streaming router with a reference to the price cache. - This factory pattern lets us inject the PriceCache without globals. + Returns a fresh APIRouter each call so the function is safe to call + multiple times (e.g. in tests) without registering duplicate routes. """ + router = APIRouter(prefix="/api/stream", tags=["streaming"]) @router.get("/prices") async def stream_prices(request: Request) -> StreamingResponse: """SSE endpoint for live price updates. - Streams all tracked ticker prices every ~500ms. The client connects - with EventSource and receives events in the format: + Streams one event per ticker every ~500ms. Each event is a JSON + object for a single ticker (per PLAN.md §6): - data: {"AAPL": {"ticker": "AAPL", "price": 190.50, ...}, ...} + data: {"ticker":"AAPL","price":190.50,"prev_price":190.42, + "open_price":190.00,"timestamp":"2026-04-10T12:00:00.500Z", + "direction":"up"} Includes a retry directive so the browser auto-reconnects on - disconnection (EventSource built-in behavior). + disconnection (EventSource built-in behaviour). """ return StreamingResponse( _generate_events(price_cache, request), @@ -55,8 +57,8 @@ async def _generate_events( ) -> AsyncGenerator[str, None]: """Async generator that yields SSE-formatted price events. - Sends all prices every `interval` seconds. Stops when the client - disconnects (detected via request.is_disconnected()). + Sends one event per ticker every `interval` seconds. Stops when the + client disconnects (detected via request.is_disconnected()). """ # Tell the client to retry after 1 second if the connection drops yield "retry: 1000\n\n" @@ -76,10 +78,8 @@ async def _generate_events( if current_version != last_version: last_version = current_version prices = price_cache.get_all() - - if prices: - data = {ticker: update.to_dict() for ticker, update in prices.items()} - payload = json.dumps(data) + for update in prices.values(): + payload = json.dumps(update.to_dict()) yield f"data: {payload}\n\n" await asyncio.sleep(interval) diff --git a/backend/market_data_demo.py b/backend/market_data_demo.py new file mode 100644 index 00000000..7414416c --- /dev/null +++ b/backend/market_data_demo.py @@ -0,0 +1,272 @@ +"""FinAlly Market Data Simulator Demo. + +Run with: uv run market_data_demo.py + +Displays a live-updating terminal dashboard of simulated stock prices +using the GBM simulator and Rich library. +""" + +from __future__ import annotations + +import asyncio +import time +from collections import deque + +from rich.console import Console +from rich.layout import Layout +from rich.live import Live +from rich.panel import Panel +from rich.table import Table +from rich.text import Text + +from app.market.cache import PriceCache +from app.market.seed_prices import SEED_PRICES +from app.market.simulator import SimulatorDataSource + +# Sparkline characters, low to high +SPARK_CHARS = "▁▂▃▄▅▆▇█" + +# Ordered ticker list matching the default watchlist +TICKERS = ["AAPL", "GOOGL", "MSFT", "AMZN", "TSLA", "NVDA", "META", "JPM", "V", "NFLX"] + +DURATION = 60 # seconds + + +def sparkline(values: list[float]) -> str: + """Render a sequence of values as a unicode sparkline.""" + if len(values) < 2: + return "" + lo, hi = min(values), max(values) + spread = hi - lo + if spread == 0: + return SPARK_CHARS[3] * len(values) + n = len(SPARK_CHARS) - 1 + return "".join(SPARK_CHARS[int((v - lo) / spread * n)] for v in values) + + +def format_price(price: float) -> str: + """Format a price with comma separator.""" + if price >= 1000: + return f"{price:,.2f}" + return f"{price:.2f}" + + +def build_table( + cache: PriceCache, + history: dict[str, deque], + elapsed: float, +) -> Table: + """Build the price table.""" + table = Table( + title=None, + expand=True, + border_style="bright_black", + header_style="bold bright_white", + pad_edge=True, + padding=(0, 1), + ) + table.add_column("Ticker", style="bold bright_white", width=8) + table.add_column("Price", justify="right", width=10) + table.add_column("Change", justify="right", width=9) + table.add_column("Chg %", justify="right", width=8) + table.add_column("", width=3) # arrow + table.add_column("Sparkline", width=42, no_wrap=True) + + for ticker in TICKERS: + update = cache.get(ticker) + if update is None: + table.add_row(ticker, "---", "---", "---", "", "") + continue + + # Direction styling + if update.direction == "up": + color = "green" + arrow = "[bold green]\u25b2[/]" + elif update.direction == "down": + color = "red" + arrow = "[bold red]\u25bc[/]" + else: + color = "bright_black" + arrow = "[bright_black]\u2500[/]" + + price_str = f"[{color}]${format_price(update.price)}[/]" + change_str = f"[{color}]{update.change:+.2f}[/]" + pct_str = f"[{color}]{update.change_percent:+.2f}%[/]" + + # Sparkline from history + vals = list(history.get(ticker, [])) + spark_str = f"[bright_cyan]{sparkline(vals)}[/]" if len(vals) > 1 else "" + + table.add_row(ticker, price_str, change_str, pct_str, arrow, spark_str) + + return table + + +def build_event_log(events: deque) -> Panel: + """Build the event log panel.""" + text = Text() + for evt in events: + text.append(evt) + text.append("\n") + if not events: + text.append("Watching for notable moves (>1% change)...", style="bright_black italic") + return Panel( + text, + title="[bold bright_yellow]Recent Events[/]", + border_style="bright_black", + height=8, + ) + + +def build_dashboard( + cache: PriceCache, + history: dict[str, deque], + events: deque, + start_time: float, +) -> Layout: + """Build the full dashboard layout.""" + elapsed = time.time() - start_time + remaining = max(0, DURATION - elapsed) + + layout = Layout() + layout.split_column( + Layout(name="header", size=3), + Layout(name="body"), + Layout(name="footer", size=10), + ) + + # Header + header_text = Text.assemble( + (" FinAlly ", "bold bright_yellow"), + ("Market Data Simulator", "bold bright_white"), + (" | ", "bright_black"), + (f"{elapsed:5.1f}s elapsed", "bright_cyan"), + (" | ", "bright_black"), + (f"{remaining:4.1f}s remaining", "bright_cyan"), + (" | ", "bright_black"), + (f"{len(cache)} tickers", "bright_white"), + (" | ", "bright_black"), + ("Ctrl+C to exit", "bright_black italic"), + ) + layout["header"].update(Panel(header_text, border_style="bright_yellow")) + + # Body: price table + layout["body"].update( + Panel( + build_table(cache, history, elapsed), + title="[bold bright_white]Live Prices[/]", + border_style="bright_black", + ) + ) + + # Footer: event log + layout["footer"].update(build_event_log(events)) + + return layout + + +def print_summary(cache: PriceCache) -> None: + """Print final summary comparing to seed prices.""" + console = Console() + console.print() + console.print("[bold bright_yellow] FinAlly[/] [bold]Session Summary[/]") + console.print() + + table = Table(border_style="bright_black", header_style="bold bright_white", expand=False) + table.add_column("Ticker", style="bold bright_white", width=8) + table.add_column("Seed Price", justify="right", width=12) + table.add_column("Final Price", justify="right", width=12) + table.add_column("Session Change", justify="right", width=14) + + for ticker in TICKERS: + seed = SEED_PRICES.get(ticker, 0) + update = cache.get(ticker) + if update is None: + continue + final = update.price + session_change = ((final - seed) / seed) * 100 if seed else 0 + + if session_change > 0: + color = "green" + elif session_change < 0: + color = "red" + else: + color = "bright_black" + + table.add_row( + ticker, + f"${format_price(seed)}", + f"[{color}]${format_price(final)}[/]", + f"[{color}]{session_change:+.2f}%[/]", + ) + + console.print(table) + console.print() + + +async def run() -> None: + """Main demo loop.""" + cache = PriceCache() + source = SimulatorDataSource(price_cache=cache, update_interval=0.5) + + # Per-ticker price history for sparklines + history: dict[str, deque] = {t: deque(maxlen=40) for t in TICKERS} + + # Recent event log + events: deque = deque(maxlen=12) + + await source.start(TICKERS) + start_time = time.time() + + # Seed initial history points + for ticker in TICKERS: + update = cache.get(ticker) + if update: + history[ticker].append(update.price) + + try: + with Live( + build_dashboard(cache, history, events, start_time), + refresh_per_second=4, + screen=True, + ) as live: + last_version = cache.version + while time.time() - start_time < DURATION: + await asyncio.sleep(0.25) + + # Check for updates + if cache.version == last_version: + continue + last_version = cache.version + + # Record history & detect events + for ticker in TICKERS: + update = cache.get(ticker) + if update is None: + continue + history[ticker].append(update.price) + + # Log notable moves + if abs(update.change_percent) > 1.0: + direction = "\u25b2" if update.direction == "up" else "\u25bc" + color = "green" if update.direction == "up" else "red" + timestamp = time.strftime("%H:%M:%S") + events.appendleft( + f"[bright_black]{timestamp}[/] " + f"[bold {color}]{direction} {ticker}[/] " + f"[{color}]{update.change_percent:+.2f}%[/] " + f"${format_price(update.price)}" + ) + + live.update(build_dashboard(cache, history, events, start_time)) + + except KeyboardInterrupt: + pass + finally: + await source.stop() + + print_summary(cache) + + +if __name__ == "__main__": + asyncio.run(run()) diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 26c70870..8c3306f4 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -8,10 +8,11 @@ dependencies = [ "fastapi>=0.115.0", "uvicorn[standard]>=0.32.0", "numpy>=2.0.0", - "massive>=1.0.0", + "rich>=13.0.0", ] [project.optional-dependencies] +massive = ["massive>=1.0.0"] dev = [ "pytest>=8.3.0", "pytest-asyncio>=0.24.0", diff --git a/backend/tests/market/test_cache.py b/backend/tests/market/test_cache.py index b5ab3d55..6f456e14 100644 --- a/backend/tests/market/test_cache.py +++ b/backend/tests/market/test_cache.py @@ -7,7 +7,6 @@ class TestPriceCache: """Unit tests for the PriceCache.""" def test_update_and_get(self): - """Test updating and getting a price.""" cache = PriceCache() update = cache.update("AAPL", 190.50) assert update.ticker == "AAPL" @@ -15,14 +14,32 @@ def test_update_and_get(self): assert cache.get("AAPL") == update def test_first_update_is_flat(self): - """Test that the first update has flat direction.""" cache = PriceCache() update = cache.update("AAPL", 190.50) assert update.direction == "flat" - assert update.previous_price == 190.50 + assert update.prev_price == 190.50 + + def test_first_update_open_price_defaults_to_price(self): + """When no open_price provided, open_price equals the first price.""" + cache = PriceCache() + update = cache.update("AAPL", 190.50) + assert update.open_price == 190.50 + + def test_first_update_open_price_explicit(self): + """Explicitly provided open_price is stored on first update.""" + cache = PriceCache() + update = cache.update("AAPL", 192.00, open_price=190.00) + assert update.open_price == 190.00 + + def test_open_price_never_overwritten(self): + """open_price is fixed at session start regardless of subsequent updates.""" + cache = PriceCache() + cache.update("AAPL", 190.00, open_price=185.00) + cache.update("AAPL", 195.00, open_price=999.00) # open_price arg ignored + cache.update("AAPL", 188.00) + assert cache.get("AAPL").open_price == 185.00 def test_direction_up(self): - """Test price update with upward direction.""" cache = PriceCache() cache.update("AAPL", 190.00) update = cache.update("AAPL", 191.00) @@ -30,27 +47,29 @@ def test_direction_up(self): assert update.change == 1.00 def test_direction_down(self): - """Test price update with downward direction.""" cache = PriceCache() cache.update("AAPL", 190.00) update = cache.update("AAPL", 189.00) assert update.direction == "down" assert update.change == -1.00 + def test_prev_price_tracks_last_price(self): + cache = PriceCache() + cache.update("AAPL", 190.00) + update = cache.update("AAPL", 191.00) + assert update.prev_price == 190.00 + def test_remove(self): - """Test removing a ticker from cache.""" cache = PriceCache() cache.update("AAPL", 190.00) cache.remove("AAPL") assert cache.get("AAPL") is None def test_remove_nonexistent(self): - """Test removing a ticker that doesn't exist.""" cache = PriceCache() cache.remove("AAPL") # Should not raise def test_get_all(self): - """Test getting all prices.""" cache = PriceCache() cache.update("AAPL", 190.00) cache.update("GOOGL", 175.00) @@ -58,7 +77,6 @@ def test_get_all(self): assert set(all_prices.keys()) == {"AAPL", "GOOGL"} def test_version_increments(self): - """Test that version counter increments.""" cache = PriceCache() v0 = cache.version cache.update("AAPL", 190.00) @@ -67,14 +85,12 @@ def test_version_increments(self): assert cache.version == v0 + 2 def test_get_price_convenience(self): - """Test the convenience get_price method.""" cache = PriceCache() cache.update("AAPL", 190.50) assert cache.get_price("AAPL") == 190.50 assert cache.get_price("NOPE") is None def test_len(self): - """Test __len__ method.""" cache = PriceCache() assert len(cache) == 0 cache.update("AAPL", 190.00) @@ -83,21 +99,18 @@ def test_len(self): assert len(cache) == 2 def test_contains(self): - """Test __contains__ method.""" cache = PriceCache() cache.update("AAPL", 190.00) assert "AAPL" in cache assert "GOOGL" not in cache def test_custom_timestamp(self): - """Test updating with a custom timestamp.""" cache = PriceCache() custom_ts = 1234567890.0 update = cache.update("AAPL", 190.50, timestamp=custom_ts) assert update.timestamp == custom_ts def test_price_rounding(self): - """Test that prices are rounded to 2 decimal places.""" cache = PriceCache() update = cache.update("AAPL", 190.12345) assert update.price == 190.12 diff --git a/backend/tests/market/test_massive.py b/backend/tests/market/test_massive.py index cdd7dbd2..c1c11651 100644 --- a/backend/tests/market/test_massive.py +++ b/backend/tests/market/test_massive.py @@ -171,8 +171,8 @@ async def test_stop_cancels_task(self): cache = PriceCache() source = MassiveDataSource(api_key="test-key", price_cache=cache, poll_interval=10.0) - # Mock the client and start - with patch("app.market.massive_client.RESTClient"): + # RESTClient is a lazy import inside start(); patch it at the source module + with patch("massive.RESTClient"): with patch.object(source, "_fetch_snapshots", return_value=[]): await source.start(["AAPL"]) @@ -191,7 +191,7 @@ async def test_start_immediate_poll(self): mock_snapshots = [_make_snapshot("AAPL", 190.50, 1707580800000)] - with patch("app.market.massive_client.RESTClient"): + with patch("massive.RESTClient"): with patch.object(source, "_fetch_snapshots", return_value=mock_snapshots): await source.start(["AAPL"]) diff --git a/backend/tests/market/test_models.py b/backend/tests/market/test_models.py index 21600dfd..461ff614 100644 --- a/backend/tests/market/test_models.py +++ b/backend/tests/market/test_models.py @@ -9,69 +9,93 @@ class TestPriceUpdate: """Unit tests for the PriceUpdate model.""" def test_price_update_creation(self): - """Test basic PriceUpdate creation.""" - update = PriceUpdate(ticker="AAPL", price=190.50, previous_price=190.00, timestamp=1234567890.0) + update = PriceUpdate( + ticker="AAPL", price=190.50, prev_price=190.00, open_price=189.00, timestamp=1234567890.0 + ) assert update.ticker == "AAPL" assert update.price == 190.50 - assert update.previous_price == 190.00 + assert update.prev_price == 190.00 + assert update.open_price == 189.00 assert update.timestamp == 1234567890.0 def test_change_calculation(self): - """Test price change calculation.""" - update = PriceUpdate(ticker="AAPL", price=190.50, previous_price=190.00, timestamp=1234567890.0) + update = PriceUpdate( + ticker="AAPL", price=190.50, prev_price=190.00, open_price=190.00, timestamp=1234567890.0 + ) assert update.change == 0.50 def test_change_negative(self): - """Test negative price change.""" - update = PriceUpdate(ticker="AAPL", price=189.50, previous_price=190.00, timestamp=1234567890.0) + update = PriceUpdate( + ticker="AAPL", price=189.50, prev_price=190.00, open_price=190.00, timestamp=1234567890.0 + ) assert update.change == -0.50 def test_change_percent_up(self): - """Test percentage change calculation (up).""" - update = PriceUpdate(ticker="AAPL", price=190.00, previous_price=100.00, timestamp=1234567890.0) + update = PriceUpdate( + ticker="AAPL", price=190.00, prev_price=100.00, open_price=100.00, timestamp=1234567890.0 + ) assert update.change_percent == 90.0 def test_change_percent_down(self): - """Test percentage change calculation (down).""" - update = PriceUpdate(ticker="AAPL", price=100.00, previous_price=200.00, timestamp=1234567890.0) + update = PriceUpdate( + ticker="AAPL", price=100.00, prev_price=200.00, open_price=200.00, timestamp=1234567890.0 + ) assert update.change_percent == -50.0 def test_change_percent_zero_previous(self): - """Test percentage change with zero previous price.""" - update = PriceUpdate(ticker="AAPL", price=100.00, previous_price=0.00, timestamp=1234567890.0) + update = PriceUpdate( + ticker="AAPL", price=100.00, prev_price=0.00, open_price=0.00, timestamp=1234567890.0 + ) assert update.change_percent == 0.0 def test_direction_up(self): - """Test direction calculation (up).""" - update = PriceUpdate(ticker="AAPL", price=191.00, previous_price=190.00, timestamp=1234567890.0) + update = PriceUpdate( + ticker="AAPL", price=191.00, prev_price=190.00, open_price=190.00, timestamp=1234567890.0 + ) assert update.direction == "up" def test_direction_down(self): - """Test direction calculation (down).""" - update = PriceUpdate(ticker="AAPL", price=189.00, previous_price=190.00, timestamp=1234567890.0) + update = PriceUpdate( + ticker="AAPL", price=189.00, prev_price=190.00, open_price=190.00, timestamp=1234567890.0 + ) assert update.direction == "down" def test_direction_flat(self): - """Test direction calculation (flat).""" - update = PriceUpdate(ticker="AAPL", price=190.00, previous_price=190.00, timestamp=1234567890.0) + update = PriceUpdate( + ticker="AAPL", price=190.00, prev_price=190.00, open_price=190.00, timestamp=1234567890.0 + ) assert update.direction == "flat" - def test_to_dict(self): - """Test serialization to dictionary.""" - update = PriceUpdate(ticker="AAPL", price=190.50, previous_price=190.00, timestamp=1234567890.0) + def test_to_dict_fields(self): + update = PriceUpdate( + ticker="AAPL", price=190.50, prev_price=190.00, open_price=189.00, timestamp=1234567890.0 + ) result = update.to_dict() assert result["ticker"] == "AAPL" assert result["price"] == 190.50 - assert result["previous_price"] == 190.00 - assert result["timestamp"] == 1234567890.0 + assert result["prev_price"] == 190.00 + assert result["open_price"] == 189.00 assert result["change"] == 0.50 assert result["change_percent"] == 0.2632 # (0.50 / 190.00) * 100 assert result["direction"] == "up" - def test_immutability(self): - """Test that PriceUpdate is immutable.""" - update = PriceUpdate(ticker="AAPL", price=190.50, previous_price=190.00, timestamp=1234567890.0) + def test_to_dict_timestamp_is_iso_string(self): + """to_dict() must return an ISO 8601 UTC string, not a Unix float.""" + update = PriceUpdate( + ticker="AAPL", price=190.00, prev_price=190.00, open_price=190.00, timestamp=1234567890.0 + ) + result = update.to_dict() + ts = result["timestamp"] + assert isinstance(ts, str) + assert ts.endswith("Z") + assert "T" in ts + # 1234567890 UTC = 2009-02-13T23:31:30Z + assert ts == "2009-02-13T23:31:30Z" + def test_immutability(self): + update = PriceUpdate( + ticker="AAPL", price=190.50, prev_price=190.00, open_price=190.00, timestamp=1234567890.0 + ) with pytest.raises(AttributeError): - update.price = 200.00 # Should raise error + update.price = 200.00 diff --git a/backend/tests/market/test_simulator.py b/backend/tests/market/test_simulator.py index 1845ec16..81a02237 100644 --- a/backend/tests/market/test_simulator.py +++ b/backend/tests/market/test_simulator.py @@ -52,12 +52,11 @@ def test_remove_nonexistent_is_noop(self): sim = GBMSimulator(tickers=["AAPL"]) sim.remove_ticker("NOPE") # Should not raise - def test_unknown_ticker_gets_random_seed_price(self): - """Test that unknown tickers get random seed prices.""" + def test_unknown_ticker_gets_default_seed_price(self): + """Unknown tickers start at DEFAULT_SEED_PRICE ($100) per PLAN.md §6.""" + from app.market.seed_prices import DEFAULT_SEED_PRICE sim = GBMSimulator(tickers=["ZZZZ"]) - price = sim.get_price("ZZZZ") - assert price is not None - assert 50.0 <= price <= 300.0 + assert sim.get_price("ZZZZ") == DEFAULT_SEED_PRICE def test_empty_step(self): """Test stepping with no tickers.""" @@ -105,11 +104,11 @@ def test_pairwise_correlation_finance_stocks(self): assert corr == 0.5 def test_pairwise_correlation_tsla(self): - """Test that TSLA has lower correlation with everything.""" + """TSLA has the lowest correlation — 0.25 per design docs.""" corr = GBMSimulator._pairwise_correlation("TSLA", "AAPL") - assert corr == 0.3 + assert corr == 0.25 corr = GBMSimulator._pairwise_correlation("TSLA", "JPM") - assert corr == 0.3 + assert corr == 0.25 def test_pairwise_correlation_cross_sector(self): """Test cross-sector correlation.""" diff --git a/backend/tests/market/test_stream.py b/backend/tests/market/test_stream.py new file mode 100644 index 00000000..70869d2e --- /dev/null +++ b/backend/tests/market/test_stream.py @@ -0,0 +1,171 @@ +"""Tests for the SSE streaming endpoint.""" + +from __future__ import annotations + +import json +from unittest.mock import MagicMock + +import pytest + +from app.market.cache import PriceCache +from app.market.stream import _generate_events, create_stream_router + + +def _make_request(disconnect_after: int = 2) -> MagicMock: + """Build a mock Request that disconnects after `disconnect_after` checks.""" + request = MagicMock() + request.client.host = "127.0.0.1" + call_count = 0 + + async def is_disconnected() -> bool: + nonlocal call_count + call_count += 1 + return call_count > disconnect_after + + request.is_disconnected = is_disconnected + return request + + +async def _collect(gen, max_items: int = 20) -> list[str]: + """Collect up to max_items from an async generator.""" + items = [] + async for item in gen: + items.append(item) + if len(items) >= max_items: + break + return items + + +class TestCreateStreamRouter: + + def test_returns_router_with_prices_route(self): + """create_stream_router returns a router that has /api/stream/prices.""" + cache = PriceCache() + router = create_stream_router(cache) + routes = [r.path for r in router.routes] + assert "/api/stream/prices" in routes + + def test_returns_fresh_router_each_call(self): + """Calling create_stream_router twice returns independent routers.""" + cache = PriceCache() + r1 = create_stream_router(cache) + r2 = create_stream_router(cache) + assert r1 is not r2 + + def test_no_duplicate_routes_on_multiple_calls(self): + """Multiple calls do not accumulate duplicate routes.""" + cache = PriceCache() + r1 = create_stream_router(cache) + r2 = create_stream_router(cache) + assert len(r1.routes) == len(r2.routes) == 1 + + +@pytest.mark.asyncio +class TestGenerateEvents: + + async def test_first_event_is_retry_directive(self): + """Generator opens with a retry: directive for EventSource reconnection.""" + cache = PriceCache() + request = _make_request(disconnect_after=1) + events = await _collect(_generate_events(cache, request, interval=0.01)) + assert events[0] == "retry: 1000\n\n" + + async def test_yields_one_event_per_ticker(self): + """Each ticker gets its own SSE event (not batched).""" + cache = PriceCache() + cache.update("AAPL", 190.50, open_price=190.00) + cache.update("GOOGL", 175.25, open_price=175.00) + + request = _make_request(disconnect_after=3) + events = await _collect(_generate_events(cache, request, interval=0.01)) + + data_events = [e for e in events if e.startswith("data:")] + assert len(data_events) == 2 + tickers = {json.loads(e[len("data: "):-2])["ticker"] for e in data_events} + assert tickers == {"AAPL", "GOOGL"} + + async def test_event_contains_required_fields(self): + """Each SSE event has all fields required by PLAN.md §6.""" + cache = PriceCache() + cache.update("AAPL", 190.50, open_price=190.00) + + request = _make_request(disconnect_after=2) + events = await _collect(_generate_events(cache, request, interval=0.01)) + + data_events = [e for e in events if e.startswith("data:")] + assert data_events, "Expected at least one data event" + + payload = json.loads(data_events[0][len("data: "):-2]) + for field in ("ticker", "price", "prev_price", "open_price", "timestamp", "direction"): + assert field in payload, f"Missing field: {field}" + + async def test_timestamp_is_iso_string(self): + """SSE timestamp must be ISO 8601, not a Unix float.""" + cache = PriceCache() + cache.update("AAPL", 190.50, open_price=190.00) + + request = _make_request(disconnect_after=2) + events = await _collect(_generate_events(cache, request, interval=0.01)) + + data_events = [e for e in events if e.startswith("data:")] + payload = json.loads(data_events[0][len("data: "):-2]) + ts = payload["timestamp"] + assert isinstance(ts, str) + assert "T" in ts + assert ts.endswith("Z") + + async def test_open_price_in_event(self): + """open_price is present in SSE events so frontend can compute daily change %.""" + cache = PriceCache() + cache.update("AAPL", 192.00, open_price=190.00) + + request = _make_request(disconnect_after=2) + events = await _collect(_generate_events(cache, request, interval=0.01)) + + data_events = [e for e in events if e.startswith("data:")] + payload = json.loads(data_events[0][len("data: "):-2]) + assert payload["open_price"] == 190.00 + + async def test_stops_on_disconnect(self): + """Generator terminates when request.is_disconnected() returns True.""" + cache = PriceCache() + cache.update("AAPL", 190.00, open_price=190.00) + + request = _make_request(disconnect_after=1) + events = await _collect(_generate_events(cache, request, interval=0.01), max_items=100) + # Should stop; not infinite + assert len(events) < 100 + + async def test_skips_send_when_cache_unchanged(self): + """Version-based deduplication: no data event when cache has not changed.""" + cache = PriceCache() + cache.update("AAPL", 190.00, open_price=190.00) + + # Force disconnect after 4 is_disconnected calls but use a request + # that we can count events on — with fast interval, if no cache update + # happens, the second pass should not yield a new data event. + call_count = 0 + + async def is_disconnected(): + nonlocal call_count + call_count += 1 + return call_count > 4 + + request = MagicMock() + request.client.host = "test" + request.is_disconnected = is_disconnected + + events = await _collect(_generate_events(cache, request, interval=0.01), max_items=50) + data_events = [e for e in events if e.startswith("data:")] + # Only one pass should produce data events (cache version only changed once) + assert len(data_events) == 1 + + async def test_empty_cache_yields_no_data_events(self): + """No data events when cache is empty.""" + cache = PriceCache() # empty + + request = _make_request(disconnect_after=2) + events = await _collect(_generate_events(cache, request, interval=0.01)) + + data_events = [e for e in events if e.startswith("data:")] + assert data_events == [] diff --git a/backend/uv.lock b/backend/uv.lock new file mode 100644 index 00000000..53116471 --- /dev/null +++ b/backend/uv.lock @@ -0,0 +1,815 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, +] + +[[package]] +name = "certifi" +version = "2026.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" }, + { url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" }, + { url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" }, + { url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" }, + { url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" }, + { url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" }, + { url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" }, + { url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" }, + { url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" }, + { url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/23/aad45061a31677d68e47499197a131eea55da4875d16c1f42021ab963503/coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9", size = 219474, upload-time = "2026-02-09T12:57:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/a5/70/9b8b67a0945f3dfec1fd896c5cefb7c19d5a3a6d74630b99a895170999ae/coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac", size = 219844, upload-time = "2026-02-09T12:57:20.66Z" }, + { url = "https://files.pythonhosted.org/packages/97/fd/7e859f8fab324cef6c4ad7cff156ca7c489fef9179d5749b0c8d321281c2/coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea", size = 250832, upload-time = "2026-02-09T12:57:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/e4/dc/b2442d10020c2f52617828862d8b6ee337859cd8f3a1f13d607dddda9cf7/coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b", size = 253434, upload-time = "2026-02-09T12:57:23.339Z" }, + { url = "https://files.pythonhosted.org/packages/5a/88/6728a7ad17428b18d836540630487231f5470fb82454871149502f5e5aa2/coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525", size = 254676, upload-time = "2026-02-09T12:57:24.774Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bc/21244b1b8cedf0dff0a2b53b208015fe798d5f2a8d5348dbfece04224fff/coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242", size = 256807, upload-time = "2026-02-09T12:57:26.125Z" }, + { url = "https://files.pythonhosted.org/packages/97/a0/ddba7ed3251cff51006737a727d84e05b61517d1784a9988a846ba508877/coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148", size = 251058, upload-time = "2026-02-09T12:57:27.614Z" }, + { url = "https://files.pythonhosted.org/packages/9b/55/e289addf7ff54d3a540526f33751951bf0878f3809b47f6dfb3def69c6f7/coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a", size = 252805, upload-time = "2026-02-09T12:57:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/13/4e/cc276b1fa4a59be56d96f1dabddbdc30f4ba22e3b1cd42504c37b3313255/coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23", size = 250766, upload-time = "2026-02-09T12:57:30.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/1093b8f93018f8b41a8cf29636c9292502f05e4a113d4d107d14a3acd044/coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80", size = 254923, upload-time = "2026-02-09T12:57:31.946Z" }, + { url = "https://files.pythonhosted.org/packages/8b/55/ea2796da2d42257f37dbea1aab239ba9263b31bd91d5527cdd6db5efe174/coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea", size = 250591, upload-time = "2026-02-09T12:57:33.842Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/7c4bb72aacf8af5020675aa633e59c1fbe296d22aed191b6a5b711eb2bc7/coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a", size = 252364, upload-time = "2026-02-09T12:57:35.743Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/a8d2ec0146479c20bbaa7181b5b455a0c41101eed57f10dd19a78ab44c80/coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d", size = 222010, upload-time = "2026-02-09T12:57:37.25Z" }, + { url = "https://files.pythonhosted.org/packages/e2/0c/dbfafbe90a185943dcfbc766fe0e1909f658811492d79b741523a414a6cc/coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd", size = 222818, upload-time = "2026-02-09T12:57:38.734Z" }, + { url = "https://files.pythonhosted.org/packages/04/d1/934918a138c932c90d78301f45f677fb05c39a3112b96fd2c8e60503cdc7/coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af", size = 221438, upload-time = "2026-02-09T12:57:40.223Z" }, + { url = "https://files.pythonhosted.org/packages/52/57/ee93ced533bcb3e6df961c0c6e42da2fc6addae53fb95b94a89b1e33ebd7/coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d", size = 220165, upload-time = "2026-02-09T12:57:41.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e0/969fc285a6fbdda49d91af278488d904dcd7651b2693872f0ff94e40e84a/coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12", size = 220516, upload-time = "2026-02-09T12:57:44.215Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b8/9531944e16267e2735a30a9641ff49671f07e8138ecf1ca13db9fd2560c7/coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b", size = 261804, upload-time = "2026-02-09T12:57:45.989Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f3/e63df6d500314a2a60390d1989240d5f27318a7a68fa30ad3806e2a9323e/coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9", size = 263885, upload-time = "2026-02-09T12:57:47.42Z" }, + { url = "https://files.pythonhosted.org/packages/f3/67/7654810de580e14b37670b60a09c599fa348e48312db5b216d730857ffe6/coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092", size = 266308, upload-time = "2026-02-09T12:57:49.345Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/39d41eca0eab3cc82115953ad41c4e77935286c930e8fad15eaed1389d83/coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9", size = 267452, upload-time = "2026-02-09T12:57:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/50/6d/39c0fbb8fc5cd4d2090811e553c2108cf5112e882f82505ee7495349a6bf/coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26", size = 261057, upload-time = "2026-02-09T12:57:52.447Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a2/60010c669df5fa603bb5a97fb75407e191a846510da70ac657eb696b7fce/coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2", size = 263875, upload-time = "2026-02-09T12:57:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/63b22a6bdbd17f1f96e9ed58604c2a6b0e72a9133e37d663bef185877cf6/coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940", size = 261500, upload-time = "2026-02-09T12:57:56.012Z" }, + { url = "https://files.pythonhosted.org/packages/70/bf/69f86ba1ad85bc3ad240e4c0e57a2e620fbc0e1645a47b5c62f0e941ad7f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c", size = 265212, upload-time = "2026-02-09T12:57:57.5Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f2/5f65a278a8c2148731831574c73e42f57204243d33bedaaf18fa79c5958f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0", size = 260398, upload-time = "2026-02-09T12:57:59.027Z" }, + { url = "https://files.pythonhosted.org/packages/ef/80/6e8280a350ee9fea92f14b8357448a242dcaa243cb2c72ab0ca591f66c8c/coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b", size = 262584, upload-time = "2026-02-09T12:58:01.129Z" }, + { url = "https://files.pythonhosted.org/packages/22/63/01ff182fc95f260b539590fb12c11ad3e21332c15f9799cb5e2386f71d9f/coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9", size = 222688, upload-time = "2026-02-09T12:58:02.736Z" }, + { url = "https://files.pythonhosted.org/packages/a9/43/89de4ef5d3cd53b886afa114065f7e9d3707bdb3e5efae13535b46ae483d/coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd", size = 223746, upload-time = "2026-02-09T12:58:05.362Z" }, + { url = "https://files.pythonhosted.org/packages/35/39/7cf0aa9a10d470a5309b38b289b9bb07ddeac5d61af9b664fe9775a4cb3e/coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997", size = 222003, upload-time = "2026-02-09T12:58:06.952Z" }, + { url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" }, + { url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" }, + { url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" }, + { url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" }, + { url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" }, + { url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" }, + { url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" }, + { url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" }, + { url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" }, + { url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" }, + { url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" }, + { url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" }, + { url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" }, + { url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" }, + { url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" }, + { url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" }, + { url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" }, + { url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" }, + { url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" }, + { url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" }, + { url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" }, + { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" }, +] + +[[package]] +name = "fastapi" +version = "0.128.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/fc/af386750b3fd8d8828167e4c82b787a8eeca2eca5c5429c9db8bb7c70e04/fastapi-0.128.7.tar.gz", hash = "sha256:783c273416995486c155ad2c0e2b45905dedfaf20b9ef8d9f6a9124670639a24", size = 375325, upload-time = "2026-02-10T12:26:40.968Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/1a/f983b45661c79c31be575c570d46c437a5409b67a939c1b3d8d6b3ed7a7f/fastapi-0.128.7-py3-none-any.whl", hash = "sha256:6bd9bd31cb7047465f2d3fa3ba3f33b0870b17d4eaf7cdb36d1576ab060ad662", size = 103630, upload-time = "2026-02-10T12:26:39.414Z" }, +] + +[[package]] +name = "finally-backend" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "fastapi" }, + { name = "numpy" }, + { name = "rich" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[package.optional-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "ruff" }, +] +massive = [ + { name = "massive" }, +] + +[package.metadata] +requires-dist = [ + { name = "fastapi", specifier = ">=0.115.0" }, + { name = "massive", marker = "extra == 'massive'", specifier = ">=1.0.0" }, + { name = "numpy", specifier = ">=2.0.0" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.3.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.24.0" }, + { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=5.0.0" }, + { name = "rich", specifier = ">=13.0.0" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.7.0" }, + { name = "uvicorn", extras = ["standard"], specifier = ">=0.32.0" }, +] +provides-extras = ["massive", "dev"] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httptools" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" }, + { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" }, + { url = "https://files.pythonhosted.org/packages/09/8f/c77b1fcbfd262d422f12da02feb0d218fa228d52485b77b953832105bb90/httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3", size = 202889, upload-time = "2025-10-10T03:54:47.089Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1a/22887f53602feaa066354867bc49a68fc295c2293433177ee90870a7d517/httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca", size = 108180, upload-time = "2025-10-10T03:54:48.052Z" }, + { url = "https://files.pythonhosted.org/packages/32/6a/6aaa91937f0010d288d3d124ca2946d48d60c3a5ee7ca62afe870e3ea011/httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c", size = 478596, upload-time = "2025-10-10T03:54:48.919Z" }, + { url = "https://files.pythonhosted.org/packages/6d/70/023d7ce117993107be88d2cbca566a7c1323ccbaf0af7eabf2064fe356f6/httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66", size = 473268, upload-time = "2025-10-10T03:54:49.993Z" }, + { url = "https://files.pythonhosted.org/packages/32/4d/9dd616c38da088e3f436e9a616e1d0cc66544b8cdac405cc4e81c8679fc7/httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346", size = 455517, upload-time = "2025-10-10T03:54:51.066Z" }, + { url = "https://files.pythonhosted.org/packages/1d/3a/a6c595c310b7df958e739aae88724e24f9246a514d909547778d776799be/httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650", size = 458337, upload-time = "2025-10-10T03:54:52.196Z" }, + { url = "https://files.pythonhosted.org/packages/fd/82/88e8d6d2c51edc1cc391b6e044c6c435b6aebe97b1abc33db1b0b24cd582/httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6", size = 85743, upload-time = "2025-10-10T03:54:53.448Z" }, + { url = "https://files.pythonhosted.org/packages/34/50/9d095fcbb6de2d523e027a2f304d4551855c2f46e0b82befd718b8b20056/httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270", size = 203619, upload-time = "2025-10-10T03:54:54.321Z" }, + { url = "https://files.pythonhosted.org/packages/07/f0/89720dc5139ae54b03f861b5e2c55a37dba9a5da7d51e1e824a1f343627f/httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3", size = 108714, upload-time = "2025-10-10T03:54:55.163Z" }, + { url = "https://files.pythonhosted.org/packages/b3/cb/eea88506f191fb552c11787c23f9a405f4c7b0c5799bf73f2249cd4f5228/httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1", size = 472909, upload-time = "2025-10-10T03:54:56.056Z" }, + { url = "https://files.pythonhosted.org/packages/e0/4a/a548bdfae6369c0d078bab5769f7b66f17f1bfaa6fa28f81d6be6959066b/httptools-0.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b", size = 470831, upload-time = "2025-10-10T03:54:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/14df99e1c43bd132eec921c2e7e11cda7852f65619bc0fc5bdc2d0cb126c/httptools-0.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60", size = 452631, upload-time = "2025-10-10T03:54:58.219Z" }, + { url = "https://files.pythonhosted.org/packages/22/d2/b7e131f7be8d854d48cb6d048113c30f9a46dca0c9a8b08fcb3fcd588cdc/httptools-0.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca", size = 452910, upload-time = "2025-10-10T03:54:59.366Z" }, + { url = "https://files.pythonhosted.org/packages/53/cf/878f3b91e4e6e011eff6d1fa9ca39f7eb17d19c9d7971b04873734112f30/httptools-0.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96", size = 88205, upload-time = "2025-10-10T03:55:00.389Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "massive" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "urllib3" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/27/fe/eec0d88e20269d837a0e319963d944f2c62cb275a8cd664863e2174d6b4f/massive-2.2.0.tar.gz", hash = "sha256:5a5c7b73fc1bbd3754c985ff20bc3c1db3fd9b2c64ddd5145a837a2e2f4bd5fc", size = 46463, upload-time = "2026-02-05T19:02:48.698Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/45/700942c1114c654d185f3e467b536f958c92ca3eb186bf0cb8a0c9db393a/massive-2.2.0-py3-none-any.whl", hash = "sha256:009e63b709b063bd9633a033608fb3aca6368510df909d8728a23e60bdb21c89", size = 64035, upload-time = "2026-02-05T19:02:49.807Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "numpy" +version = "2.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/fd/0005efbd0af48e55eb3c7208af93f2862d4b1a56cd78e84309a2d959208d/numpy-2.4.2.tar.gz", hash = "sha256:659a6107e31a83c4e33f763942275fd278b21d095094044eb35569e86a21ddae", size = 20723651, upload-time = "2026-01-31T23:13:10.135Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/6e/6f394c9c77668153e14d4da83bcc247beb5952f6ead7699a1a2992613bea/numpy-2.4.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:21982668592194c609de53ba4933a7471880ccbaadcc52352694a59ecc860b3a", size = 16667963, upload-time = "2026-01-31T23:10:52.147Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f8/55483431f2b2fd015ae6ed4fe62288823ce908437ed49db5a03d15151678/numpy-2.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40397bda92382fcec844066efb11f13e1c9a3e2a8e8f318fb72ed8b6db9f60f1", size = 14693571, upload-time = "2026-01-31T23:10:54.789Z" }, + { url = "https://files.pythonhosted.org/packages/2f/20/18026832b1845cdc82248208dd929ca14c9d8f2bac391f67440707fff27c/numpy-2.4.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b3a24467af63c67829bfaa61eecf18d5432d4f11992688537be59ecd6ad32f5e", size = 5203469, upload-time = "2026-01-31T23:10:57.343Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/2eb97c8a77daaba34eaa3fa7241a14ac5f51c46a6bd5911361b644c4a1e2/numpy-2.4.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:805cc8de9fd6e7a22da5aed858e0ab16be5a4db6c873dde1d7451c541553aa27", size = 6550820, upload-time = "2026-01-31T23:10:59.429Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/b97fdfd12dc75b02c44e26c6638241cc004d4079a0321a69c62f51470c4c/numpy-2.4.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d82351358ffbcdcd7b686b90742a9b86632d6c1c051016484fa0b326a0a1548", size = 15663067, upload-time = "2026-01-31T23:11:01.291Z" }, + { url = "https://files.pythonhosted.org/packages/f5/c6/a18e59f3f0b8071cc85cbc8d80cd02d68aa9710170b2553a117203d46936/numpy-2.4.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e35d3e0144137d9fdae62912e869136164534d64a169f86438bc9561b6ad49f", size = 16619782, upload-time = "2026-01-31T23:11:03.669Z" }, + { url = "https://files.pythonhosted.org/packages/b7/83/9751502164601a79e18847309f5ceec0b1446d7b6aa12305759b72cf98b2/numpy-2.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adb6ed2ad29b9e15321d167d152ee909ec73395901b70936f029c3bc6d7f4460", size = 17013128, upload-time = "2026-01-31T23:11:05.913Z" }, + { url = "https://files.pythonhosted.org/packages/61/c4/c4066322256ec740acc1c8923a10047818691d2f8aec254798f3dd90f5f2/numpy-2.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8906e71fd8afcb76580404e2a950caef2685df3d2a57fe82a86ac8d33cc007ba", size = 18345324, upload-time = "2026-01-31T23:11:08.248Z" }, + { url = "https://files.pythonhosted.org/packages/ab/af/6157aa6da728fa4525a755bfad486ae7e3f76d4c1864138003eb84328497/numpy-2.4.2-cp312-cp312-win32.whl", hash = "sha256:ec055f6dae239a6299cace477b479cca2fc125c5675482daf1dd886933a1076f", size = 5960282, upload-time = "2026-01-31T23:11:10.497Z" }, + { url = "https://files.pythonhosted.org/packages/92/0f/7ceaaeaacb40567071e94dbf2c9480c0ae453d5bb4f52bea3892c39dc83c/numpy-2.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:209fae046e62d0ce6435fcfe3b1a10537e858249b3d9b05829e2a05218296a85", size = 12314210, upload-time = "2026-01-31T23:11:12.176Z" }, + { url = "https://files.pythonhosted.org/packages/2f/a3/56c5c604fae6dd40fa2ed3040d005fca97e91bd320d232ac9931d77ba13c/numpy-2.4.2-cp312-cp312-win_arm64.whl", hash = "sha256:fbde1b0c6e81d56f5dccd95dd4a711d9b95df1ae4009a60887e56b27e8d903fa", size = 10220171, upload-time = "2026-01-31T23:11:14.684Z" }, + { url = "https://files.pythonhosted.org/packages/a1/22/815b9fe25d1d7ae7d492152adbc7226d3eff731dffc38fe970589fcaaa38/numpy-2.4.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25f2059807faea4b077a2b6837391b5d830864b3543627f381821c646f31a63c", size = 16663696, upload-time = "2026-01-31T23:11:17.516Z" }, + { url = "https://files.pythonhosted.org/packages/09/f0/817d03a03f93ba9c6c8993de509277d84e69f9453601915e4a69554102a1/numpy-2.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bd3a7a9f5847d2fb8c2c6d1c862fa109c31a9abeca1a3c2bd5a64572955b2979", size = 14688322, upload-time = "2026-01-31T23:11:19.883Z" }, + { url = "https://files.pythonhosted.org/packages/da/b4/f805ab79293c728b9a99438775ce51885fd4f31b76178767cfc718701a39/numpy-2.4.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8e4549f8a3c6d13d55041925e912bfd834285ef1dd64d6bc7d542583355e2e98", size = 5198157, upload-time = "2026-01-31T23:11:22.375Z" }, + { url = "https://files.pythonhosted.org/packages/74/09/826e4289844eccdcd64aac27d13b0fd3f32039915dd5b9ba01baae1f436c/numpy-2.4.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:aea4f66ff44dfddf8c2cffd66ba6538c5ec67d389285292fe428cb2c738c8aef", size = 6546330, upload-time = "2026-01-31T23:11:23.958Z" }, + { url = "https://files.pythonhosted.org/packages/19/fb/cbfdbfa3057a10aea5422c558ac57538e6acc87ec1669e666d32ac198da7/numpy-2.4.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3cd545784805de05aafe1dde61752ea49a359ccba9760c1e5d1c88a93bbf2b7", size = 15660968, upload-time = "2026-01-31T23:11:25.713Z" }, + { url = "https://files.pythonhosted.org/packages/04/dc/46066ce18d01645541f0186877377b9371b8fa8017fa8262002b4ef22612/numpy-2.4.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0d9b7c93578baafcbc5f0b83eaf17b79d345c6f36917ba0c67f45226911d499", size = 16607311, upload-time = "2026-01-31T23:11:28.117Z" }, + { url = "https://files.pythonhosted.org/packages/14/d9/4b5adfc39a43fa6bf918c6d544bc60c05236cc2f6339847fc5b35e6cb5b0/numpy-2.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f74f0f7779cc7ae07d1810aab8ac6b1464c3eafb9e283a40da7309d5e6e48fbb", size = 17012850, upload-time = "2026-01-31T23:11:30.888Z" }, + { url = "https://files.pythonhosted.org/packages/b7/20/adb6e6adde6d0130046e6fdfb7675cc62bc2f6b7b02239a09eb58435753d/numpy-2.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7ac672d699bf36275c035e16b65539931347d68b70667d28984c9fb34e07fa7", size = 18334210, upload-time = "2026-01-31T23:11:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/78/0e/0a73b3dff26803a8c02baa76398015ea2a5434d9b8265a7898a6028c1591/numpy-2.4.2-cp313-cp313-win32.whl", hash = "sha256:8e9afaeb0beff068b4d9cd20d322ba0ee1cecfb0b08db145e4ab4dd44a6b5110", size = 5958199, upload-time = "2026-01-31T23:11:35.385Z" }, + { url = "https://files.pythonhosted.org/packages/43/bc/6352f343522fcb2c04dbaf94cb30cca6fd32c1a750c06ad6231b4293708c/numpy-2.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:7df2de1e4fba69a51c06c28f5a3de36731eb9639feb8e1cf7e4a7b0daf4cf622", size = 12310848, upload-time = "2026-01-31T23:11:38.001Z" }, + { url = "https://files.pythonhosted.org/packages/6e/8d/6da186483e308da5da1cc6918ce913dcfe14ffde98e710bfeff2a6158d4e/numpy-2.4.2-cp313-cp313-win_arm64.whl", hash = "sha256:0fece1d1f0a89c16b03442eae5c56dc0be0c7883b5d388e0c03f53019a4bfd71", size = 10221082, upload-time = "2026-01-31T23:11:40.392Z" }, + { url = "https://files.pythonhosted.org/packages/25/a1/9510aa43555b44781968935c7548a8926274f815de42ad3997e9e83680dd/numpy-2.4.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5633c0da313330fd20c484c78cdd3f9b175b55e1a766c4a174230c6b70ad8262", size = 14815866, upload-time = "2026-01-31T23:11:42.495Z" }, + { url = "https://files.pythonhosted.org/packages/36/30/6bbb5e76631a5ae46e7923dd16ca9d3f1c93cfa8d4ed79a129814a9d8db3/numpy-2.4.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d9f64d786b3b1dd742c946c42d15b07497ed14af1a1f3ce840cce27daa0ce913", size = 5325631, upload-time = "2026-01-31T23:11:44.7Z" }, + { url = "https://files.pythonhosted.org/packages/46/00/3a490938800c1923b567b3a15cd17896e68052e2145d8662aaf3e1ffc58f/numpy-2.4.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:b21041e8cb6a1eb5312dd1d2f80a94d91efffb7a06b70597d44f1bd2dfc315ab", size = 6646254, upload-time = "2026-01-31T23:11:46.341Z" }, + { url = "https://files.pythonhosted.org/packages/d3/e9/fac0890149898a9b609caa5af7455a948b544746e4b8fe7c212c8edd71f8/numpy-2.4.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:00ab83c56211a1d7c07c25e3217ea6695e50a3e2f255053686b081dc0b091a82", size = 15720138, upload-time = "2026-01-31T23:11:48.082Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5c/08887c54e68e1e28df53709f1893ce92932cc6f01f7c3d4dc952f61ffd4e/numpy-2.4.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fb882da679409066b4603579619341c6d6898fc83a8995199d5249f986e8e8f", size = 16655398, upload-time = "2026-01-31T23:11:50.293Z" }, + { url = "https://files.pythonhosted.org/packages/4d/89/253db0fa0e66e9129c745e4ef25631dc37d5f1314dad2b53e907b8538e6d/numpy-2.4.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:66cb9422236317f9d44b67b4d18f44efe6e9c7f8794ac0462978513359461554", size = 17079064, upload-time = "2026-01-31T23:11:52.927Z" }, + { url = "https://files.pythonhosted.org/packages/2a/d5/cbade46ce97c59c6c3da525e8d95b7abe8a42974a1dc5c1d489c10433e88/numpy-2.4.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0f01dcf33e73d80bd8dc0f20a71303abbafa26a19e23f6b68d1aa9990af90257", size = 18379680, upload-time = "2026-01-31T23:11:55.22Z" }, + { url = "https://files.pythonhosted.org/packages/40/62/48f99ae172a4b63d981babe683685030e8a3df4f246c893ea5c6ef99f018/numpy-2.4.2-cp313-cp313t-win32.whl", hash = "sha256:52b913ec40ff7ae845687b0b34d8d93b60cb66dcee06996dd5c99f2fc9328657", size = 6082433, upload-time = "2026-01-31T23:11:58.096Z" }, + { url = "https://files.pythonhosted.org/packages/07/38/e054a61cfe48ad9f1ed0d188e78b7e26859d0b60ef21cd9de4897cdb5326/numpy-2.4.2-cp313-cp313t-win_amd64.whl", hash = "sha256:5eea80d908b2c1f91486eb95b3fb6fab187e569ec9752ab7d9333d2e66bf2d6b", size = 12451181, upload-time = "2026-01-31T23:11:59.782Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a4/a05c3a6418575e185dd84d0b9680b6bb2e2dc3e4202f036b7b4e22d6e9dc/numpy-2.4.2-cp313-cp313t-win_arm64.whl", hash = "sha256:fd49860271d52127d61197bb50b64f58454e9f578cb4b2c001a6de8b1f50b0b1", size = 10290756, upload-time = "2026-01-31T23:12:02.438Z" }, + { url = "https://files.pythonhosted.org/packages/18/88/b7df6050bf18fdcfb7046286c6535cabbdd2064a3440fca3f069d319c16e/numpy-2.4.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:444be170853f1f9d528428eceb55f12918e4fda5d8805480f36a002f1415e09b", size = 16663092, upload-time = "2026-01-31T23:12:04.521Z" }, + { url = "https://files.pythonhosted.org/packages/25/7a/1fee4329abc705a469a4afe6e69b1ef7e915117747886327104a8493a955/numpy-2.4.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d1240d50adff70c2a88217698ca844723068533f3f5c5fa6ee2e3220e3bdb000", size = 14698770, upload-time = "2026-01-31T23:12:06.96Z" }, + { url = "https://files.pythonhosted.org/packages/fb/0b/f9e49ba6c923678ad5bc38181c08ac5e53b7a5754dbca8e581aa1a56b1ff/numpy-2.4.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:7cdde6de52fb6664b00b056341265441192d1291c130e99183ec0d4b110ff8b1", size = 5208562, upload-time = "2026-01-31T23:12:09.632Z" }, + { url = "https://files.pythonhosted.org/packages/7d/12/d7de8f6f53f9bb76997e5e4c069eda2051e3fe134e9181671c4391677bb2/numpy-2.4.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:cda077c2e5b780200b6b3e09d0b42205a3d1c68f30c6dceb90401c13bff8fe74", size = 6543710, upload-time = "2026-01-31T23:12:11.969Z" }, + { url = "https://files.pythonhosted.org/packages/09/63/c66418c2e0268a31a4cf8a8b512685748200f8e8e8ec6c507ce14e773529/numpy-2.4.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d30291931c915b2ab5717c2974bb95ee891a1cf22ebc16a8006bd59cd210d40a", size = 15677205, upload-time = "2026-01-31T23:12:14.33Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6c/7f237821c9642fb2a04d2f1e88b4295677144ca93285fd76eff3bcba858d/numpy-2.4.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bba37bc29d4d85761deed3954a1bc62be7cf462b9510b51d367b769a8c8df325", size = 16611738, upload-time = "2026-01-31T23:12:16.525Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a7/39c4cdda9f019b609b5c473899d87abff092fc908cfe4d1ecb2fcff453b0/numpy-2.4.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b2f0073ed0868db1dcd86e052d37279eef185b9c8db5bf61f30f46adac63c909", size = 17028888, upload-time = "2026-01-31T23:12:19.306Z" }, + { url = "https://files.pythonhosted.org/packages/da/b3/e84bb64bdfea967cc10950d71090ec2d84b49bc691df0025dddb7c26e8e3/numpy-2.4.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7f54844851cdb630ceb623dcec4db3240d1ac13d4990532446761baede94996a", size = 18339556, upload-time = "2026-01-31T23:12:21.816Z" }, + { url = "https://files.pythonhosted.org/packages/88/f5/954a291bc1192a27081706862ac62bb5920fbecfbaa302f64682aa90beed/numpy-2.4.2-cp314-cp314-win32.whl", hash = "sha256:12e26134a0331d8dbd9351620f037ec470b7c75929cb8a1537f6bfe411152a1a", size = 6006899, upload-time = "2026-01-31T23:12:24.14Z" }, + { url = "https://files.pythonhosted.org/packages/05/cb/eff72a91b2efdd1bc98b3b8759f6a1654aa87612fc86e3d87d6fe4f948c4/numpy-2.4.2-cp314-cp314-win_amd64.whl", hash = "sha256:068cdb2d0d644cdb45670810894f6a0600797a69c05f1ac478e8d31670b8ee75", size = 12443072, upload-time = "2026-01-31T23:12:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/37/75/62726948db36a56428fce4ba80a115716dc4fad6a3a4352487f8bb950966/numpy-2.4.2-cp314-cp314-win_arm64.whl", hash = "sha256:6ed0be1ee58eef41231a5c943d7d1375f093142702d5723ca2eb07db9b934b05", size = 10494886, upload-time = "2026-01-31T23:12:28.488Z" }, + { url = "https://files.pythonhosted.org/packages/36/2f/ee93744f1e0661dc267e4b21940870cabfae187c092e1433b77b09b50ac4/numpy-2.4.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:98f16a80e917003a12c0580f97b5f875853ebc33e2eaa4bccfc8201ac6869308", size = 14818567, upload-time = "2026-01-31T23:12:30.709Z" }, + { url = "https://files.pythonhosted.org/packages/a7/24/6535212add7d76ff938d8bdc654f53f88d35cddedf807a599e180dcb8e66/numpy-2.4.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:20abd069b9cda45874498b245c8015b18ace6de8546bf50dfa8cea1696ed06ef", size = 5328372, upload-time = "2026-01-31T23:12:32.962Z" }, + { url = "https://files.pythonhosted.org/packages/5e/9d/c48f0a035725f925634bf6b8994253b43f2047f6778a54147d7e213bc5a7/numpy-2.4.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:e98c97502435b53741540a5717a6749ac2ada901056c7db951d33e11c885cc7d", size = 6649306, upload-time = "2026-01-31T23:12:34.797Z" }, + { url = "https://files.pythonhosted.org/packages/81/05/7c73a9574cd4a53a25907bad38b59ac83919c0ddc8234ec157f344d57d9a/numpy-2.4.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da6cad4e82cb893db4b69105c604d805e0c3ce11501a55b5e9f9083b47d2ffe8", size = 15722394, upload-time = "2026-01-31T23:12:36.565Z" }, + { url = "https://files.pythonhosted.org/packages/35/fa/4de10089f21fc7d18442c4a767ab156b25c2a6eaf187c0db6d9ecdaeb43f/numpy-2.4.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e4424677ce4b47fe73c8b5556d876571f7c6945d264201180db2dc34f676ab5", size = 16653343, upload-time = "2026-01-31T23:12:39.188Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f9/d33e4ffc857f3763a57aa85650f2e82486832d7492280ac21ba9efda80da/numpy-2.4.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2b8f157c8a6f20eb657e240f8985cc135598b2b46985c5bccbde7616dc9c6b1e", size = 17078045, upload-time = "2026-01-31T23:12:42.041Z" }, + { url = "https://files.pythonhosted.org/packages/c8/b8/54bdb43b6225badbea6389fa038c4ef868c44f5890f95dd530a218706da3/numpy-2.4.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5daf6f3914a733336dab21a05cdec343144600e964d2fcdabaac0c0269874b2a", size = 18380024, upload-time = "2026-01-31T23:12:44.331Z" }, + { url = "https://files.pythonhosted.org/packages/a5/55/6e1a61ded7af8df04016d81b5b02daa59f2ea9252ee0397cb9f631efe9e5/numpy-2.4.2-cp314-cp314t-win32.whl", hash = "sha256:8c50dd1fc8826f5b26a5ee4d77ca55d88a895f4e4819c7ecc2a9f5905047a443", size = 6153937, upload-time = "2026-01-31T23:12:47.229Z" }, + { url = "https://files.pythonhosted.org/packages/45/aa/fa6118d1ed6d776b0983f3ceac9b1a5558e80df9365b1c3aa6d42bf9eee4/numpy-2.4.2-cp314-cp314t-win_amd64.whl", hash = "sha256:fcf92bee92742edd401ba41135185866f7026c502617f422eb432cfeca4fe236", size = 12631844, upload-time = "2026-01-31T23:12:48.997Z" }, + { url = "https://files.pythonhosted.org/packages/32/0a/2ec5deea6dcd158f254a7b372fb09cfba5719419c8d66343bab35237b3fb/numpy-2.4.2-cp314-cp314t-win_arm64.whl", hash = "sha256:1f92f53998a17265194018d1cc321b2e96e900ca52d54c7c77837b71b9465181", size = 10565379, upload-time = "2026-01-31T23:12:51.345Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage" }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "rich" +version = "14.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/99/a4cab2acbb884f80e558b0771e97e21e939c5dfb460f488d19df485e8298/rich-14.3.2.tar.gz", hash = "sha256:e712f11c1a562a11843306f5ed999475f09ac31ffb64281f73ab29ffdda8b3b8", size = 230143, upload-time = "2026-02-01T16:20:47.908Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl", hash = "sha256:08e67c3e90884651da3239ea668222d19bea7b589149d8014a21c633420dbb69", size = 309963, upload-time = "2026-02-01T16:20:46.078Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/39/5cee96809fbca590abea6b46c6d1c586b49663d1d2830a751cc8fc42c666/ruff-0.15.0.tar.gz", hash = "sha256:6bdea47cdbea30d40f8f8d7d69c0854ba7c15420ec75a26f463290949d7f7e9a", size = 4524893, upload-time = "2026-02-03T17:53:35.357Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/88/3fd1b0aa4b6330d6aaa63a285bc96c9f71970351579152d231ed90914586/ruff-0.15.0-py3-none-linux_armv6l.whl", hash = "sha256:aac4ebaa612a82b23d45964586f24ae9bc23ca101919f5590bdb368d74ad5455", size = 10354332, upload-time = "2026-02-03T17:52:54.892Z" }, + { url = "https://files.pythonhosted.org/packages/72/f6/62e173fbb7eb75cc29fe2576a1e20f0a46f671a2587b5f604bfb0eaf5f6f/ruff-0.15.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dcd4be7cc75cfbbca24a98d04d0b9b36a270d0833241f776b788d59f4142b14d", size = 10767189, upload-time = "2026-02-03T17:53:19.778Z" }, + { url = "https://files.pythonhosted.org/packages/99/e4/968ae17b676d1d2ff101d56dc69cf333e3a4c985e1ec23803df84fc7bf9e/ruff-0.15.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d747e3319b2bce179c7c1eaad3d884dc0a199b5f4d5187620530adf9105268ce", size = 10075384, upload-time = "2026-02-03T17:53:29.241Z" }, + { url = "https://files.pythonhosted.org/packages/a2/bf/9843c6044ab9e20af879c751487e61333ca79a2c8c3058b15722386b8cae/ruff-0.15.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:650bd9c56ae03102c51a5e4b554d74d825ff3abe4db22b90fd32d816c2e90621", size = 10481363, upload-time = "2026-02-03T17:52:43.332Z" }, + { url = "https://files.pythonhosted.org/packages/55/d9/4ada5ccf4cd1f532db1c8d44b6f664f2208d3d93acbeec18f82315e15193/ruff-0.15.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6664b7eac559e3048223a2da77769c2f92b43a6dfd4720cef42654299a599c9", size = 10187736, upload-time = "2026-02-03T17:53:00.522Z" }, + { url = "https://files.pythonhosted.org/packages/86/e2/f25eaecd446af7bb132af0a1d5b135a62971a41f5366ff41d06d25e77a91/ruff-0.15.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f811f97b0f092b35320d1556f3353bf238763420ade5d9e62ebd2b73f2ff179", size = 10968415, upload-time = "2026-02-03T17:53:15.705Z" }, + { url = "https://files.pythonhosted.org/packages/e7/dc/f06a8558d06333bf79b497d29a50c3a673d9251214e0d7ec78f90b30aa79/ruff-0.15.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:761ec0a66680fab6454236635a39abaf14198818c8cdf691e036f4bc0f406b2d", size = 11809643, upload-time = "2026-02-03T17:53:23.031Z" }, + { url = "https://files.pythonhosted.org/packages/dd/45/0ece8db2c474ad7df13af3a6d50f76e22a09d078af63078f005057ca59eb/ruff-0.15.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:940f11c2604d317e797b289f4f9f3fa5555ffe4fb574b55ed006c3d9b6f0eb78", size = 11234787, upload-time = "2026-02-03T17:52:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/8a/d9/0e3a81467a120fd265658d127db648e4d3acfe3e4f6f5d4ea79fac47e587/ruff-0.15.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcbca3d40558789126da91d7ef9a7c87772ee107033db7191edefa34e2c7f1b4", size = 11112797, upload-time = "2026-02-03T17:52:49.274Z" }, + { url = "https://files.pythonhosted.org/packages/b2/cb/8c0b3b0c692683f8ff31351dfb6241047fa873a4481a76df4335a8bff716/ruff-0.15.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9a121a96db1d75fa3eb39c4539e607f628920dd72ff1f7c5ee4f1b768ac62d6e", size = 11033133, upload-time = "2026-02-03T17:53:33.105Z" }, + { url = "https://files.pythonhosted.org/packages/f8/5e/23b87370cf0f9081a8c89a753e69a4e8778805b8802ccfe175cc410e50b9/ruff-0.15.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5298d518e493061f2eabd4abd067c7e4fb89e2f63291c94332e35631c07c3662", size = 10442646, upload-time = "2026-02-03T17:53:06.278Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9a/3c94de5ce642830167e6d00b5c75aacd73e6347b4c7fc6828699b150a5ee/ruff-0.15.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afb6e603d6375ff0d6b0cee563fa21ab570fd15e65c852cb24922cef25050cf1", size = 10195750, upload-time = "2026-02-03T17:53:26.084Z" }, + { url = "https://files.pythonhosted.org/packages/30/15/e396325080d600b436acc970848d69df9c13977942fb62bb8722d729bee8/ruff-0.15.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77e515f6b15f828b94dc17d2b4ace334c9ddb7d9468c54b2f9ed2b9c1593ef16", size = 10676120, upload-time = "2026-02-03T17:53:09.363Z" }, + { url = "https://files.pythonhosted.org/packages/8d/c9/229a23d52a2983de1ad0fb0ee37d36e0257e6f28bfd6b498ee2c76361874/ruff-0.15.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6f6e80850a01eb13b3e42ee0ebdf6e4497151b48c35051aab51c101266d187a3", size = 11201636, upload-time = "2026-02-03T17:52:57.281Z" }, + { url = "https://files.pythonhosted.org/packages/6f/b0/69adf22f4e24f3677208adb715c578266842e6e6a3cc77483f48dd999ede/ruff-0.15.0-py3-none-win32.whl", hash = "sha256:238a717ef803e501b6d51e0bdd0d2c6e8513fe9eec14002445134d3907cd46c3", size = 10465945, upload-time = "2026-02-03T17:53:12.591Z" }, + { url = "https://files.pythonhosted.org/packages/51/ad/f813b6e2c97e9b4598be25e94a9147b9af7e60523b0cb5d94d307c15229d/ruff-0.15.0-py3-none-win_amd64.whl", hash = "sha256:dd5e4d3301dc01de614da3cdffc33d4b1b96fb89e45721f1598e5532ccf78b18", size = 11564657, upload-time = "2026-02-03T17:52:51.893Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b0/2d823f6e77ebe560f4e397d078487e8d52c1516b331e3521bc75db4272ca/ruff-0.15.0-py3-none-win_arm64.whl", hash = "sha256:c480d632cc0ca3f0727acac8b7d053542d9e114a462a145d0b00e7cd658c515a", size = 10865753, upload-time = "2026-02-03T17:53:03.014Z" }, +] + +[[package]] +name = "starlette" +version = "0.52.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" }, + { url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307, upload-time = "2025-10-16T22:16:32.917Z" }, + { url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970, upload-time = "2025-10-16T22:16:34.015Z" }, + { url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343, upload-time = "2025-10-16T22:16:35.149Z" }, + { url = "https://files.pythonhosted.org/packages/89/8c/182a2a593195bfd39842ea68ebc084e20c850806117213f5a299dfc513d9/uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705", size = 1358611, upload-time = "2025-10-16T22:16:36.833Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/e301ee96a6dc95224b6f1162cd3312f6d1217be3907b79173b06785f2fe7/uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8", size = 751811, upload-time = "2025-10-16T22:16:38.275Z" }, + { url = "https://files.pythonhosted.org/packages/b7/02/654426ce265ac19e2980bfd9ea6590ca96a56f10c76e63801a2df01c0486/uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d", size = 4288562, upload-time = "2025-10-16T22:16:39.375Z" }, + { url = "https://files.pythonhosted.org/packages/15/c0/0be24758891ef825f2065cd5db8741aaddabe3e248ee6acc5e8a80f04005/uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e", size = 4366890, upload-time = "2025-10-16T22:16:40.547Z" }, + { url = "https://files.pythonhosted.org/packages/d2/53/8369e5219a5855869bcee5f4d317f6da0e2c669aecf0ef7d371e3d084449/uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e", size = 4119472, upload-time = "2025-10-16T22:16:41.694Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/d69adbe699b768f6b29a5eec7b47dd610bd17a69de51b251126a801369ea/uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad", size = 4239051, upload-time = "2025-10-16T22:16:43.224Z" }, + { url = "https://files.pythonhosted.org/packages/90/cd/b62bdeaa429758aee8de8b00ac0dd26593a9de93d302bff3d21439e9791d/uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142", size = 1362067, upload-time = "2025-10-16T22:16:44.503Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f8/a132124dfda0777e489ca86732e85e69afcd1ff7686647000050ba670689/uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74", size = 752423, upload-time = "2025-10-16T22:16:45.968Z" }, + { url = "https://files.pythonhosted.org/packages/a3/94/94af78c156f88da4b3a733773ad5ba0b164393e357cc4bd0ab2e2677a7d6/uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35", size = 4272437, upload-time = "2025-10-16T22:16:47.451Z" }, + { url = "https://files.pythonhosted.org/packages/b5/35/60249e9fd07b32c665192cec7af29e06c7cd96fa1d08b84f012a56a0b38e/uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25", size = 4292101, upload-time = "2025-10-16T22:16:49.318Z" }, + { url = "https://files.pythonhosted.org/packages/02/62/67d382dfcb25d0a98ce73c11ed1a6fba5037a1a1d533dcbb7cab033a2636/uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6", size = 4114158, upload-time = "2025-10-16T22:16:50.517Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/f1171b4a882a5d13c8b7576f348acfe6074d72eaf52cccef752f748d4a9f/uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079", size = 4177360, upload-time = "2025-10-16T22:16:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/79/7b/b01414f31546caf0919da80ad57cbfe24c56b151d12af68cee1b04922ca8/uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289", size = 1454790, upload-time = "2025-10-16T22:16:54.355Z" }, + { url = "https://files.pythonhosted.org/packages/d4/31/0bb232318dd838cad3fa8fb0c68c8b40e1145b32025581975e18b11fab40/uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3", size = 796783, upload-time = "2025-10-16T22:16:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/42/38/c9b09f3271a7a723a5de69f8e237ab8e7803183131bc57c890db0b6bb872/uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c", size = 4647548, upload-time = "2025-10-16T22:16:57.008Z" }, + { url = "https://files.pythonhosted.org/packages/c1/37/945b4ca0ac27e3dc4952642d4c900edd030b3da6c9634875af6e13ae80e5/uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21", size = 4467065, upload-time = "2025-10-16T22:16:58.206Z" }, + { url = "https://files.pythonhosted.org/packages/97/cc/48d232f33d60e2e2e0b42f4e73455b146b76ebe216487e862700457fbf3c/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88", size = 4328384, upload-time = "2025-10-16T22:16:59.36Z" }, + { url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" }, + { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" }, + { url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" }, + { url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" }, + { url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" }, + { url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" }, + { url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" }, + { url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" }, + { url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" }, + { url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" }, + { url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" }, + { url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" }, + { url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" }, + { url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" }, + { url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" }, + { url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, +] + +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, +] diff --git a/planning/MARKET_DATA_DESIGN.md b/planning/MARKET_DATA_DESIGN.md index 0d2cfd5f..6d39cfd3 100644 --- a/planning/MARKET_DATA_DESIGN.md +++ b/planning/MARKET_DATA_DESIGN.md @@ -1,8 +1,6 @@ -# Market Data Backend — Detailed Design +# Market Data Backend — Implementation Design -Implementation-ready design for the FinAlly market data subsystem. Covers the unified interface, in-memory price cache, GBM simulator, Massive API client, SSE streaming endpoint, and FastAPI lifecycle integration. - -Everything in this document lives under `backend/app/market/`. +Complete implementation guide for the FinAlly market data subsystem. Everything in this document lives under `backend/app/market/`. --- @@ -12,7 +10,7 @@ Everything in this document lives under `backend/app/market/`. 2. [Data Model — `models.py`](#2-data-model) 3. [Price Cache — `cache.py`](#3-price-cache) 4. [Abstract Interface — `interface.py`](#4-abstract-interface) -5. [Seed Prices & Ticker Parameters — `seed_prices.py`](#5-seed-prices--ticker-parameters) +5. [Seed Data — `seed_data.py`](#5-seed-data) 6. [GBM Simulator — `simulator.py`](#6-gbm-simulator) 7. [Massive API Client — `massive_client.py`](#7-massive-api-client) 8. [Factory — `factory.py`](#8-factory) @@ -31,18 +29,18 @@ Everything in this document lives under `backend/app/market/`. backend/ app/ market/ - __init__.py # Re-exports: PriceUpdate, PriceCache, MarketDataSource, create_market_data_source - models.py # PriceUpdate dataclass - cache.py # PriceCache (thread-safe in-memory store) - interface.py # MarketDataSource ABC - seed_prices.py # SEED_PRICES, TICKER_PARAMS, DEFAULT_PARAMS, CORRELATION_GROUPS - simulator.py # GBMSimulator + SimulatorDataSource - massive_client.py # MassiveDataSource - factory.py # create_market_data_source() - stream.py # SSE endpoint (FastAPI router) + __init__.py # Re-exports public API + models.py # PriceUpdate dataclass + cache.py # PriceCache (thread-safe in-memory store) + interface.py # MarketDataSource ABC + seed_data.py # SEED_PRICES, TICKER_PARAMS, correlation constants + simulator.py # GBMSimulator + SimulatorDataSource + massive_client.py # MassiveDataSource (polygon-api-client) + factory.py # create_market_data_source() + stream.py # SSE FastAPI router ``` -Each file has a single responsibility. The `__init__.py` re-exports the public API so that the rest of the backend imports from `app.market` without reaching into submodules. +Each file has a single responsibility. `__init__.py` re-exports the public API so the rest of the backend imports from `app.market` without reaching into submodules. --- @@ -50,13 +48,11 @@ Each file has a single responsibility. The `__init__.py` re-exports the public A **File: `backend/app/market/models.py`** -`PriceUpdate` is the only data structure that leaves the market data layer. Every downstream consumer — SSE streaming, portfolio valuation, trade execution — works exclusively with this type. +`PriceUpdate` is the **only** data structure that leaves the market data layer. Every downstream consumer — SSE streaming, portfolio valuation, trade execution, watchlist API — works with this type. ```python from __future__ import annotations - -import time -from dataclasses import dataclass, field +from dataclasses import dataclass @dataclass(frozen=True, slots=True) @@ -65,49 +61,33 @@ class PriceUpdate: ticker: str price: float - previous_price: float - timestamp: float = field(default_factory=time.time) # Unix seconds - - @property - def change(self) -> float: - """Absolute price change from previous update.""" - return round(self.price - self.previous_price, 4) - - @property - def change_percent(self) -> float: - """Percentage change from previous update.""" - if self.previous_price == 0: - return 0.0 - return round((self.price - self.previous_price) / self.previous_price * 100, 4) - - @property - def direction(self) -> str: - """'up', 'down', or 'flat'.""" - if self.price > self.previous_price: - return "up" - elif self.price < self.previous_price: - return "down" - return "flat" - - def to_dict(self) -> dict: - """Serialize for JSON / SSE transmission.""" + prev_price: float # Price from the previous update + open_price: float # Session-start seed price — set once, never overwritten + timestamp: float # Unix seconds + direction: str # "up", "down", or "flat" + + def to_sse_dict(self) -> dict: + """Serialize to the SSE event wire format.""" + from datetime import datetime, timezone + ts_iso = datetime.fromtimestamp( + self.timestamp, tz=timezone.utc + ).isoformat().replace("+00:00", "Z") return { "ticker": self.ticker, "price": self.price, - "previous_price": self.previous_price, - "timestamp": self.timestamp, - "change": self.change, - "change_percent": self.change_percent, + "prev_price": self.prev_price, + "open_price": self.open_price, + "timestamp": ts_iso, "direction": self.direction, } ``` ### Design decisions -- **`frozen=True`**: Price updates are immutable value objects. Once created they never change, which makes them safe to share across async tasks without copying. -- **`slots=True`**: Minor memory optimization — we create many of these per second. -- **Computed properties** (`change`, `direction`, `change_percent`): Derived from `price` and `previous_price` so they can never be inconsistent. No risk of a stale `direction` field. -- **`to_dict()`**: Single serialization point used by both the SSE endpoint and REST API responses. +- **`frozen=True`**: Immutable value objects — safe to share across async tasks without copying. +- **`slots=True`**: Minor memory optimization; many instances created per second. +- **`open_price`**: Set once when the ticker first enters the cache; never overwritten. This is the baseline for `(price - open_price) / open_price * 100` daily change % on the frontend. +- **`direction`**: Computed and stored (not a property) so it is consistent with `prev_price` at creation time. --- @@ -115,106 +95,120 @@ class PriceUpdate: **File: `backend/app/market/cache.py`** -The price cache is the central data hub. Data sources write to it; SSE streaming and portfolio valuation read from it. It must be thread-safe because the simulator/poller may run in a thread pool executor while SSE reads happen on the async event loop. +The shared in-memory hub. Data sources write to it; SSE streaming, portfolio valuation, and trade execution read from it. ```python from __future__ import annotations -import asyncio import time from threading import Lock -from typing import Callable from .models import PriceUpdate class PriceCache: - """Thread-safe in-memory cache of the latest price for each ticker. + """Thread-safe cache of the latest price per ticker. - Writers: SimulatorDataSource or MassiveDataSource (one at a time). + Writers: SimulatorDataSource or MassiveDataSource (one active at a time). Readers: SSE streaming endpoint, portfolio valuation, trade execution. """ def __init__(self) -> None: - self._prices: dict[str, PriceUpdate] = {} + self._data: dict[str, PriceUpdate] = {} self._lock = Lock() - self._version: int = 0 # Monotonically increasing; bumped on every update - - def update(self, ticker: str, price: float, timestamp: float | None = None) -> PriceUpdate: - """Record a new price for a ticker. Returns the created PriceUpdate. + self._version: int = 0 # Incremented on every write — used by SSE for change detection - Automatically computes direction and change from the previous price. - If this is the first update for the ticker, previous_price == price (direction='flat'). + def update( + self, + ticker: str, + price: float, + timestamp: float | None = None, + open_price: float | None = None, + ) -> PriceUpdate: + """Record a new price. Returns the resulting PriceUpdate. + + open_price is only used on the *first* update for a ticker; ignored + on subsequent calls. If not provided on first update, price is used. """ with self._lock: ts = timestamp or time.time() - prev = self._prices.get(ticker) - previous_price = prev.price if prev else price + existing = self._data.get(ticker) + + if existing: + prev_price = existing.price + effective_open = existing.open_price # Never overwrite + else: + prev_price = price + effective_open = open_price if open_price is not None else price + + if price > prev_price: + direction = "up" + elif price < prev_price: + direction = "down" + else: + direction = "flat" update = PriceUpdate( ticker=ticker, price=round(price, 2), - previous_price=round(previous_price, 2), + prev_price=round(prev_price, 2), + open_price=round(effective_open, 2), timestamp=ts, + direction=direction, ) - self._prices[ticker] = update + self._data[ticker] = update self._version += 1 return update def get(self, ticker: str) -> PriceUpdate | None: - """Get the latest price for a single ticker, or None if unknown.""" with self._lock: - return self._prices.get(ticker) - - def get_all(self) -> dict[str, PriceUpdate]: - """Snapshot of all current prices. Returns a shallow copy.""" - with self._lock: - return dict(self._prices) + return self._data.get(ticker) def get_price(self, ticker: str) -> float | None: - """Convenience: get just the price float, or None.""" + """Convenience: return just the price float, or None.""" update = self.get(ticker) return update.price if update else None + def get_all(self) -> dict[str, PriceUpdate]: + """Shallow copy of all current prices.""" + with self._lock: + return dict(self._data) + def remove(self, ticker: str) -> None: - """Remove a ticker from the cache (e.g., when removed from watchlist).""" + """Remove a ticker — called when it leaves the watchlist.""" with self._lock: - self._prices.pop(ticker, None) + self._data.pop(ticker, None) @property def version(self) -> int: - """Current version counter. Useful for SSE change detection.""" return self._version def __len__(self) -> int: with self._lock: - return len(self._prices) + return len(self._data) def __contains__(self, ticker: str) -> bool: with self._lock: - return ticker in self._prices + return ticker in self._data ``` -### Why a version counter? +### Why `threading.Lock` not `asyncio.Lock` + +The Massive client calls the synchronous `polygon` SDK inside `asyncio.to_thread()`, which runs in a real OS thread. `asyncio.Lock` only works within the async event loop — it doesn't protect against concurrent OS threads. `threading.Lock` works correctly from both sync threads and the async event loop. -The SSE streaming loop polls the cache every ~500ms. Without a version counter, it would serialize and send all prices every tick even if nothing changed (e.g., Massive API only updates every 15s). The version counter lets the SSE loop skip sends when nothing is new: +### Version counter + +The SSE loop polls the cache every ~500ms. The version counter lets it skip serialization when nothing has changed (important for the Massive poller, which only updates every 15s): ```python last_version = -1 while True: if price_cache.version != last_version: last_version = price_cache.version - yield format_sse(price_cache.get_all()) + # serialize and yield await asyncio.sleep(0.5) ``` -### Thread safety rationale - -The `threading.Lock` is used instead of `asyncio.Lock` because: -- The Massive client's synchronous `get_snapshot_all()` runs in `asyncio.to_thread()`, which operates in a real OS thread — `asyncio.Lock` would not protect against that. -- The GBM simulator's `step()` is CPU-bound and could also be offloaded to a thread for fairness. -- `threading.Lock` works correctly from both sync threads and the async event loop. - --- ## 4. Abstract Interface @@ -223,7 +217,6 @@ The `threading.Lock` is used instead of `asyncio.Lock` because: ```python from __future__ import annotations - from abc import ABC, abstractmethod @@ -231,115 +224,95 @@ class MarketDataSource(ABC): """Contract for market data providers. Implementations push price updates into a shared PriceCache on their own - schedule. Downstream code never calls the data source directly for prices — - it reads from the cache. + schedule. Downstream code never calls the source for prices — it reads + from the cache. Lifecycle: source = create_market_data_source(cache) await source.start(["AAPL", "GOOGL", ...]) - # ... app runs ... await source.add_ticker("TSLA") await source.remove_ticker("GOOGL") - # ... app shutting down ... await source.stop() """ @abstractmethod async def start(self, tickers: list[str]) -> None: - """Begin producing price updates for the given tickers. - - Starts a background task that periodically writes to the PriceCache. - Must be called exactly once. Calling start() twice is undefined behavior. - """ + """Begin producing price updates. Must be called exactly once.""" @abstractmethod async def stop(self) -> None: - """Stop the background task and release resources. - - Safe to call multiple times. After stop(), the source will not write - to the cache again. - """ + """Stop the background task. Safe to call multiple times.""" @abstractmethod async def add_ticker(self, ticker: str) -> None: - """Add a ticker to the active set. No-op if already present. - - The next update cycle will include this ticker. - """ + """Add a ticker. No-op if already present.""" @abstractmethod async def remove_ticker(self, ticker: str) -> None: - """Remove a ticker from the active set. No-op if not present. - - Also removes the ticker from the PriceCache. - """ + """Remove a ticker and purge it from the cache. No-op if absent.""" @abstractmethod def get_tickers(self) -> list[str]: - """Return the current list of actively tracked tickers.""" + """Return the current active ticker list.""" ``` -### Why the source writes to the cache instead of returning prices +### Push model rationale -This push model decouples timing. The simulator ticks at 500ms, Massive polls at 15s, but SSE always reads from the cache at its own 500ms cadence. There is no need for the SSE layer to know which data source is active or what its update interval is. +The source writes to the cache rather than returning prices. This decouples timing: the simulator ticks at 500ms, Massive polls at 15s, but SSE always reads the cache at its own 500ms cadence. The SSE layer never needs to know which source is active. --- -## 5. Seed Prices & Ticker Parameters +## 5. Seed Data -**File: `backend/app/market/seed_prices.py`** +**File: `backend/app/market/seed_data.py`** -Constants only — no logic, no imports beyond stdlib. This file is shared by both the simulator (for initial prices and GBM parameters) and potentially by the Massive client (as fallback prices if the API hasn't responded yet). +Constants only — no logic, no imports. ```python -"""Seed prices and per-ticker parameters for the market simulator.""" +"""Seed prices and GBM parameters for the market simulator.""" -# Realistic starting prices for the default watchlist (as of project creation) +# Realistic starting prices for the 10 default watchlist tickers SEED_PRICES: dict[str, float] = { - "AAPL": 190.00, + "AAPL": 190.00, "GOOGL": 175.00, - "MSFT": 420.00, - "AMZN": 185.00, - "TSLA": 250.00, - "NVDA": 800.00, - "META": 500.00, - "JPM": 195.00, - "V": 280.00, - "NFLX": 600.00, + "MSFT": 420.00, + "AMZN": 185.00, + "TSLA": 250.00, + "NVDA": 800.00, + "META": 500.00, + "JPM": 195.00, + "V": 280.00, + "NFLX": 600.00, } # Per-ticker GBM parameters -# sigma: annualized volatility (higher = more price movement) -# mu: annualized drift / expected return +# sigma: annualized volatility mu: annualized drift TICKER_PARAMS: dict[str, dict[str, float]] = { "AAPL": {"sigma": 0.22, "mu": 0.05}, "GOOGL": {"sigma": 0.25, "mu": 0.05}, "MSFT": {"sigma": 0.20, "mu": 0.05}, "AMZN": {"sigma": 0.28, "mu": 0.05}, - "TSLA": {"sigma": 0.50, "mu": 0.03}, # High volatility - "NVDA": {"sigma": 0.40, "mu": 0.08}, # High volatility, strong drift + "TSLA": {"sigma": 0.50, "mu": 0.03}, # High vol, low drift + "NVDA": {"sigma": 0.40, "mu": 0.08}, # High vol, strong drift "META": {"sigma": 0.30, "mu": 0.05}, - "JPM": {"sigma": 0.18, "mu": 0.04}, # Low volatility (bank) - "V": {"sigma": 0.17, "mu": 0.04}, # Low volatility (payments) + "JPM": {"sigma": 0.18, "mu": 0.04}, # Low vol (bank) + "V": {"sigma": 0.17, "mu": 0.04}, # Low vol (payments) "NFLX": {"sigma": 0.35, "mu": 0.05}, } -# Default parameters for tickers not in the list above (dynamically added) +# Fallback for tickers not in TICKER_PARAMS (dynamically added tickers) DEFAULT_PARAMS: dict[str, float] = {"sigma": 0.25, "mu": 0.05} +DEFAULT_SEED_PRICE: float = 100.00 # Per PLAN.md: unknown tickers start at $100 -# Correlation groups for the simulator's Cholesky decomposition -# Tickers in the same group have higher intra-group correlation -CORRELATION_GROUPS: dict[str, set[str]] = { - "tech": {"AAPL", "GOOGL", "MSFT", "AMZN", "META", "NVDA", "NFLX"}, - "finance": {"JPM", "V"}, -} +# Sector groups for Cholesky correlation matrix +TECH_TICKERS: frozenset[str] = frozenset({"AAPL", "GOOGL", "MSFT", "AMZN", "META", "NVDA", "NFLX"}) +FINANCE_TICKERS: frozenset[str] = frozenset({"JPM", "V"}) -# Correlation coefficients -INTRA_TECH_CORR = 0.6 # Tech stocks move together -INTRA_FINANCE_CORR = 0.5 # Finance stocks move together -CROSS_GROUP_CORR = 0.3 # Between sectors -TSLA_CORR = 0.3 # TSLA does its own thing -DEFAULT_CORR = 0.3 # Unknown tickers +INTRA_TECH_CORR: float = 0.60 +INTRA_FINANCE_CORR: float = 0.50 +CROSS_GROUP_CORR: float = 0.30 +TSLA_CORR: float = 0.25 # TSLA does its own thing +DEFAULT_CORR: float = 0.30 ``` --- @@ -348,11 +321,28 @@ DEFAULT_CORR = 0.3 # Unknown tickers **File: `backend/app/market/simulator.py`** -This file contains two classes: -- `GBMSimulator`: Pure math engine. Stateful — holds current prices and advances them one step at a time. -- `SimulatorDataSource`: The `MarketDataSource` implementation that wraps `GBMSimulator` in an async loop and writes to the `PriceCache`. +Two classes: `GBMSimulator` (pure math engine) and `SimulatorDataSource` (async wrapper implementing `MarketDataSource`). + +### GBM Math + +At each time step a price evolves as: + +``` +S(t+dt) = S(t) * exp((mu - sigma²/2) * dt + sigma * sqrt(dt) * Z) +``` + +**Deriving `dt` for 500ms ticks:** +``` +dt = 0.5s / (252 days/yr * 6.5 hr/day * 3600 s/hr) + = 0.5 / 5,896,800 ≈ 8.48e-8 +``` + +Correlated random draws use Cholesky decomposition of a correlation matrix: +``` +Z_correlated = L @ Z_independent where L = cholesky(C) +``` -### 6.1 GBMSimulator — The Math Engine +### 6.1 GBMSimulator ```python from __future__ import annotations @@ -366,122 +356,91 @@ import numpy as np from .cache import PriceCache from .interface import MarketDataSource -from .seed_prices import ( - CORRELATION_GROUPS, +from .seed_data import ( CROSS_GROUP_CORR, DEFAULT_CORR, DEFAULT_PARAMS, + DEFAULT_SEED_PRICE, + FINANCE_TICKERS, INTRA_FINANCE_CORR, INTRA_TECH_CORR, SEED_PRICES, + TECH_TICKERS, TICKER_PARAMS, TSLA_CORR, ) logger = logging.getLogger(__name__) +# 500ms as fraction of a trading year (252 days * 6.5h * 3600s) +_TRADING_SECONDS_PER_YEAR = 252 * 6.5 * 3600 +_DT = 0.5 / _TRADING_SECONDS_PER_YEAR # ~8.48e-8 +_EVENT_PROB = 0.001 # ~0.1% chance of shock per tick per ticker -class GBMSimulator: - """Geometric Brownian Motion simulator for correlated stock prices. - - Math: - S(t+dt) = S(t) * exp((mu - sigma^2/2) * dt + sigma * sqrt(dt) * Z) - Where: - S(t) = current price - mu = annualized drift (expected return) - sigma = annualized volatility - dt = time step as fraction of a trading year - Z = correlated standard normal random variable +class GBMSimulator: + """Geometric Brownian Motion price simulator for a dynamic ticker set. - The tiny dt (~8.5e-8 for 500ms ticks over 252 trading days * 6.5h/day) - produces sub-cent moves per tick that accumulate naturally over time. + Prices can never go negative (exp() is always positive). Correlated moves + are generated via Cholesky decomposition of a sector-based correlation matrix. """ - # 500ms expressed as a fraction of a trading year - # 252 trading days * 6.5 hours/day * 3600 seconds/hour = 5,896,800 seconds - TRADING_SECONDS_PER_YEAR = 252 * 6.5 * 3600 # 5,896,800 - DEFAULT_DT = 0.5 / TRADING_SECONDS_PER_YEAR # ~8.48e-8 - def __init__( self, tickers: list[str], - dt: float = DEFAULT_DT, - event_probability: float = 0.001, + dt: float = _DT, + event_probability: float = _EVENT_PROB, ) -> None: self._dt = dt self._event_prob = event_probability - - # Per-ticker state self._tickers: list[str] = [] self._prices: dict[str, float] = {} self._params: dict[str, dict[str, float]] = {} - - # Cholesky decomposition of the correlation matrix (for correlated moves) self._cholesky: np.ndarray | None = None - # Initialize all starting tickers for ticker in tickers: - self._add_ticker_internal(ticker) + self._add(ticker) self._rebuild_cholesky() # --- Public API --- def step(self) -> dict[str, float]: - """Advance all tickers by one time step. Returns {ticker: new_price}. - - This is the hot path — called every 500ms. Keep it fast. - """ + """Advance all tickers one time step. Returns {ticker: new_price}.""" n = len(self._tickers) if n == 0: return {} - # Generate n independent standard normal draws - z_independent = np.random.standard_normal(n) - - # Apply Cholesky to get correlated draws - if self._cholesky is not None: - z_correlated = self._cholesky @ z_independent - else: - z_correlated = z_independent + z_raw = np.random.standard_normal(n) + z = (self._cholesky @ z_raw) if self._cholesky is not None else z_raw result: dict[str, float] = {} for i, ticker in enumerate(self._tickers): - params = self._params[ticker] - mu = params["mu"] - sigma = params["sigma"] + mu = self._params[ticker]["mu"] + sigma = self._params[ticker]["sigma"] - # GBM: S(t+dt) = S(t) * exp((mu - 0.5*sigma^2)*dt + sigma*sqrt(dt)*Z) drift = (mu - 0.5 * sigma ** 2) * self._dt - diffusion = sigma * math.sqrt(self._dt) * z_correlated[i] + diffusion = sigma * math.sqrt(self._dt) * float(z[i]) self._prices[ticker] *= math.exp(drift + diffusion) - # Random event: ~0.1% chance per tick per ticker - # With 10 tickers at 2 ticks/sec, expect an event ~every 50 seconds + # Random event shock: ~1 event per 50s across 10 tickers at 2 ticks/s if random.random() < self._event_prob: - shock_magnitude = random.uniform(0.02, 0.05) - shock_sign = random.choice([-1, 1]) - self._prices[ticker] *= 1 + shock_magnitude * shock_sign - logger.debug( - "Random event on %s: %.1f%% %s", - ticker, - shock_magnitude * 100, - "up" if shock_sign > 0 else "down", - ) + shock = random.uniform(0.02, 0.05) * random.choice([-1, 1]) + self._prices[ticker] *= (1 + shock) + logger.debug("Random event: %s shock %.1f%%", ticker, shock * 100) result[ticker] = round(self._prices[ticker], 2) return result def add_ticker(self, ticker: str) -> None: - """Add a ticker to the simulation. Rebuilds the correlation matrix.""" + """Add a ticker. Rebuilds the Cholesky matrix.""" if ticker in self._prices: return - self._add_ticker_internal(ticker) + self._add(ticker) self._rebuild_cholesky() def remove_ticker(self, ticker: str) -> None: - """Remove a ticker from the simulation. Rebuilds the correlation matrix.""" + """Remove a ticker. Rebuilds the Cholesky matrix.""" if ticker not in self._prices: return self._tickers.remove(ticker) @@ -490,80 +449,66 @@ class GBMSimulator: self._rebuild_cholesky() def get_price(self, ticker: str) -> float | None: - """Current price for a ticker, or None if not tracked.""" return self._prices.get(ticker) + def get_tickers(self) -> list[str]: + return list(self._tickers) + # --- Internals --- - def _add_ticker_internal(self, ticker: str) -> None: - """Add a ticker without rebuilding Cholesky (for batch initialization).""" - if ticker in self._prices: - return + def _add(self, ticker: str) -> None: + """Add without rebuilding Cholesky — for batch init.""" self._tickers.append(ticker) - self._prices[ticker] = SEED_PRICES.get(ticker, random.uniform(50.0, 300.0)) - self._params[ticker] = TICKER_PARAMS.get(ticker, dict(DEFAULT_PARAMS)) + self._prices[ticker] = SEED_PRICES.get(ticker, DEFAULT_SEED_PRICE) + self._params[ticker] = dict(TICKER_PARAMS.get(ticker, DEFAULT_PARAMS)) def _rebuild_cholesky(self) -> None: - """Rebuild the Cholesky decomposition of the ticker correlation matrix. - - Called whenever tickers are added or removed. O(n^2) but n < 50. - """ + """Recompute Cholesky factor of the n×n correlation matrix. O(n²).""" n = len(self._tickers) if n <= 1: self._cholesky = None return - # Build the correlation matrix corr = np.eye(n) for i in range(n): for j in range(i + 1, n): - rho = self._pairwise_correlation(self._tickers[i], self._tickers[j]) + rho = _pairwise_corr(self._tickers[i], self._tickers[j]) corr[i, j] = rho corr[j, i] = rho self._cholesky = np.linalg.cholesky(corr) - @staticmethod - def _pairwise_correlation(t1: str, t2: str) -> float: - """Determine correlation between two tickers based on sector grouping. - - Correlation structure: - - Same tech sector: 0.6 - - Same finance sector: 0.5 - - TSLA with anything: 0.3 (it does its own thing) - - Cross-sector: 0.3 - - Unknown tickers: 0.3 - """ - tech = CORRELATION_GROUPS["tech"] - finance = CORRELATION_GROUPS["finance"] - - # TSLA is in tech set but behaves independently - if t1 == "TSLA" or t2 == "TSLA": - return TSLA_CORR - - if t1 in tech and t2 in tech: - return INTRA_TECH_CORR - if t1 in finance and t2 in finance: - return INTRA_FINANCE_CORR - return CROSS_GROUP_CORR +def _pairwise_corr(t1: str, t2: str) -> float: + """Sector-based pairwise correlation.""" + if t1 == "TSLA" or t2 == "TSLA": + return TSLA_CORR + if t1 in TECH_TICKERS and t2 in TECH_TICKERS: + return INTRA_TECH_CORR + if t1 in FINANCE_TICKERS and t2 in FINANCE_TICKERS: + return INTRA_FINANCE_CORR + return CROSS_GROUP_CORR ``` -### 6.2 SimulatorDataSource — Async Wrapper +### 6.2 SimulatorDataSource ```python +UPDATE_INTERVAL = 0.5 # seconds + + class SimulatorDataSource(MarketDataSource): - """MarketDataSource backed by the GBM simulator. + """MarketDataSource backed by GBMSimulator. - Runs a background asyncio task that calls GBMSimulator.step() every - `update_interval` seconds and writes results to the PriceCache. + Runs one asyncio background task that calls GBMSimulator.step() every + UPDATE_INTERVAL seconds and writes results to the PriceCache. + The SSE endpoint reads from the same cache — there is no second timer. """ def __init__( self, price_cache: PriceCache, - update_interval: float = 0.5, - event_probability: float = 0.001, + update_interval: float = UPDATE_INTERVAL, + event_probability: float = _EVENT_PROB, ) -> None: self._cache = price_cache self._interval = update_interval @@ -572,11 +517,8 @@ class SimulatorDataSource(MarketDataSource): self._task: asyncio.Task | None = None async def start(self, tickers: list[str]) -> None: - self._sim = GBMSimulator( - tickers=tickers, - event_probability=self._event_prob, - ) - # Seed the cache with initial prices so SSE has data immediately + self._sim = GBMSimulator(tickers=tickers, event_probability=self._event_prob) + # Seed cache immediately so SSE has data on its very first tick for ticker in tickers: price = self._sim.get_price(ticker) if price is not None: @@ -597,23 +539,21 @@ class SimulatorDataSource(MarketDataSource): async def add_ticker(self, ticker: str) -> None: if self._sim: self._sim.add_ticker(ticker) - # Seed cache immediately so the ticker has a price right away price = self._sim.get_price(ticker) if price is not None: self._cache.update(ticker=ticker, price=price) - logger.info("Simulator: added ticker %s", ticker) + logger.info("Simulator: added %s", ticker) async def remove_ticker(self, ticker: str) -> None: if self._sim: self._sim.remove_ticker(ticker) self._cache.remove(ticker) - logger.info("Simulator: removed ticker %s", ticker) + logger.info("Simulator: removed %s", ticker) def get_tickers(self) -> list[str]: - return list(self._sim._tickers) if self._sim else [] + return self._sim.get_tickers() if self._sim else [] async def _run_loop(self) -> None: - """Core loop: step the simulation, write to cache, sleep.""" while True: try: if self._sim: @@ -627,9 +567,13 @@ class SimulatorDataSource(MarketDataSource): ### Key behaviors -- **Immediate seeding**: When `start()` is called, the cache is populated with seed prices *before* the loop begins. This means the SSE endpoint has data to send on its very first tick, with no blank-screen delay. -- **Graceful cancellation**: `stop()` cancels the task and awaits it, catching `CancelledError`. This ensures clean shutdown during FastAPI lifespan teardown. -- **Exception resilience**: The loop catches exceptions per-step so a single bad tick doesn't kill the entire data feed. +| Behavior | Detail | +|----------|--------| +| Immediate seeding | `start()` populates the cache before the loop begins — no blank-screen delay | +| Prices never negative | `exp()` is always positive | +| Graceful shutdown | `stop()` cancels and awaits the task, catching `CancelledError` | +| Exception resilience | Loop catches per-step exceptions — a bad tick doesn't kill the feed | +| Cholesky rebuild | O(n²), negligible for n < 50 tickers | --- @@ -637,7 +581,9 @@ class SimulatorDataSource(MarketDataSource): **File: `backend/app/market/massive_client.py`** -Polls the Massive (formerly Polygon.io) REST API snapshot endpoint on a configurable interval. The synchronous Massive client runs in `asyncio.to_thread()` to avoid blocking the event loop. +Polls the Massive (Polygon.io) REST API snapshot endpoint. The synchronous `polygon` SDK runs in `asyncio.to_thread()` to avoid blocking the event loop. + +**Package**: `polygon-api-client` (install with `uv add polygon-api-client`) ```python from __future__ import annotations @@ -651,40 +597,37 @@ from .interface import MarketDataSource logger = logging.getLogger(__name__) +POLL_INTERVAL_FREE_TIER = 15.0 # 5 req/min free tier → poll every 15s + class MassiveDataSource(MarketDataSource): """MarketDataSource backed by the Massive (Polygon.io) REST API. Polls GET /v2/snapshot/locale/us/markets/stocks/tickers for all watched tickers in a single API call, then writes results to the PriceCache. - - Rate limits: - - Free tier: 5 req/min → poll every 15s (default) - - Paid tiers: higher limits → poll every 2-5s """ def __init__( self, api_key: str, price_cache: PriceCache, - poll_interval: float = 15.0, + poll_interval: float = POLL_INTERVAL_FREE_TIER, ) -> None: self._api_key = api_key self._cache = price_cache self._interval = poll_interval self._tickers: list[str] = [] self._task: asyncio.Task | None = None - self._client: Any = None # Lazy import to avoid hard dependency + self._client: Any = None async def start(self, tickers: list[str]) -> None: - # Lazy import: only import massive when actually using real market data. - # This means the massive package is not required when using the simulator. - from massive import RESTClient - + # Lazy import: only required when MASSIVE_API_KEY is set. + # Students without an API key never need this package installed. + from polygon import RESTClient self._client = RESTClient(api_key=self._api_key) self._tickers = list(tickers) - # Do an immediate first poll so the cache has data right away + # Immediate first poll so the cache has data before the loop starts await self._poll_once() self._task = asyncio.create_task(self._poll_loop(), name="massive-poller") @@ -709,33 +652,27 @@ class MassiveDataSource(MarketDataSource): ticker = ticker.upper().strip() if ticker not in self._tickers: self._tickers.append(ticker) - logger.info("Massive: added ticker %s (will appear on next poll)", ticker) + logger.info("Massive: added %s (will appear on next poll)", ticker) async def remove_ticker(self, ticker: str) -> None: ticker = ticker.upper().strip() self._tickers = [t for t in self._tickers if t != ticker] self._cache.remove(ticker) - logger.info("Massive: removed ticker %s", ticker) + logger.info("Massive: removed %s", ticker) def get_tickers(self) -> list[str]: return list(self._tickers) - # --- Internal --- - async def _poll_loop(self) -> None: - """Poll on interval. First poll already happened in start().""" + """Sleep first, then poll. (First poll already happened in start().)""" while True: await asyncio.sleep(self._interval) await self._poll_once() async def _poll_once(self) -> None: - """Execute one poll cycle: fetch snapshots, update cache.""" if not self._tickers or not self._client: return - try: - # The Massive RESTClient is synchronous — run in a thread to - # avoid blocking the event loop. snapshots = await asyncio.to_thread(self._fetch_snapshots) processed = 0 for snap in snapshots: @@ -743,53 +680,48 @@ class MassiveDataSource(MarketDataSource): price = snap.last_trade.price # Massive timestamps are Unix milliseconds → convert to seconds timestamp = snap.last_trade.timestamp / 1000.0 + # Use day.open as open_price; fall back to prev_day.close pre-market + open_price = None + if snap.day and snap.day.open: + open_price = snap.day.open + elif snap.prev_day and snap.prev_day.close: + open_price = snap.prev_day.close self._cache.update( ticker=snap.ticker, price=price, timestamp=timestamp, + open_price=open_price, ) processed += 1 except (AttributeError, TypeError) as e: - logger.warning( - "Skipping snapshot for %s: %s", - getattr(snap, "ticker", "???"), - e, - ) - logger.debug("Massive poll: updated %d/%d tickers", processed, len(self._tickers)) - + logger.warning("Skipping snapshot for %s: %s", getattr(snap, "ticker", "?"), e) + logger.debug("Massive poll: %d/%d tickers updated", processed, len(self._tickers)) except Exception as e: logger.error("Massive poll failed: %s", e) - # Don't re-raise — the loop will retry on the next interval. - # Common failures: 401 (bad key), 429 (rate limit), network errors. + # Don't re-raise — the loop retries on the next interval def _fetch_snapshots(self) -> list: - """Synchronous call to the Massive REST API. Runs in a thread.""" - from massive.rest.models import SnapshotMarketType - - return self._client.get_snapshot_all( - market_type=SnapshotMarketType.STOCKS, - tickers=self._tickers, - ) + """Synchronous SDK call. Runs in a thread via asyncio.to_thread().""" + return self._client.get_snapshot_all("stocks", tickers=self._tickers) ``` -### Error handling philosophy - -The Massive poller is intentionally resilient: +### Error handling -| Error | Behavior | -|-------|----------| -| **401 Unauthorized** | Logged as error. Poller keeps running (user might fix `.env` and restart). | -| **429 Rate Limited** | Logged as error. Next poll retries after `poll_interval` seconds. | -| **Network timeout** | Logged as error. Retries automatically on next cycle. | -| **Malformed snapshot** | Individual ticker skipped with warning. Other tickers still processed. | -| **All tickers fail** | Cache retains last-known prices. SSE keeps streaming stale data (better than no data). | +| Scenario | Behavior | +|----------|----------| +| 401 Unauthorized | Logged as error; poller keeps running (fix `.env`, restart) | +| 429 Rate Limited | Logged as error; retries after `poll_interval` seconds | +| Network timeout | Logged as error; retries automatically next cycle | +| Malformed snapshot | Individual ticker skipped with warning; others still processed | +| All tickers fail | Cache retains last-known prices; SSE streams stale data | -### Lazy import strategy +### open_price handling -`from massive import RESTClient` happens inside `start()`, not at module import time. This means: -- The `massive` package is only required when `MASSIVE_API_KEY` is set. -- Students who don't have a Massive API key don't need the package installed at all. -- The simulator path has zero external dependencies beyond `numpy`. +``` +day.open set → use it (normal trading hours) +day.open missing → fall back to prev_day.close (pre-market) +both missing → pass None; PriceCache sets open_price = first observed price +``` --- @@ -810,33 +742,28 @@ logger = logging.getLogger(__name__) def create_market_data_source(price_cache: PriceCache) -> MarketDataSource: - """Create the appropriate market data source based on environment variables. + """Select simulator or Massive based on MASSIVE_API_KEY env var. - - MASSIVE_API_KEY set and non-empty → MassiveDataSource (real market data) - - Otherwise → SimulatorDataSource (GBM simulation) - - Returns an unstarted source. Caller must await source.start(tickers). + Returns an unstarted source. Caller must: await source.start(tickers). """ api_key = os.environ.get("MASSIVE_API_KEY", "").strip() if api_key: from .massive_client import MassiveDataSource - - logger.info("Market data source: Massive API (real data)") + logger.info("Market data: Massive API (real data)") return MassiveDataSource(api_key=api_key, price_cache=price_cache) else: from .simulator import SimulatorDataSource - - logger.info("Market data source: GBM Simulator") + logger.info("Market data: GBM Simulator") return SimulatorDataSource(price_cache=price_cache) ``` -### Usage at app startup +### Usage ```python price_cache = PriceCache() source = create_market_data_source(price_cache) -await source.start(initial_tickers) # e.g., ["AAPL", "GOOGL", ...] +await source.start(["AAPL", "GOOGL", "MSFT", ...]) # tickers from DB watchlist ``` --- @@ -845,7 +772,7 @@ await source.start(initial_tickers) # e.g., ["AAPL", "GOOGL", ...] **File: `backend/app/market/stream.py`** -The SSE endpoint is a FastAPI route that holds open a long-lived HTTP connection and pushes price updates to the client as `text/event-stream`. +Long-lived HTTP connection pushing price updates as `text/event-stream`. The SSE loop reads from the cache on its own 500ms cadence — decoupled from the data source's update rate. ```python from __future__ import annotations @@ -853,7 +780,6 @@ from __future__ import annotations import asyncio import json import logging -import time from fastapi import APIRouter, Request from fastapi.responses import StreamingResponse @@ -866,22 +792,17 @@ router = APIRouter(prefix="/api/stream", tags=["streaming"]) def create_stream_router(price_cache: PriceCache) -> APIRouter: - """Create the SSE streaming router with a reference to the price cache. - - This factory pattern lets us inject the PriceCache without globals. - """ + """Factory so the router has access to the PriceCache without globals.""" @router.get("/prices") async def stream_prices(request: Request) -> StreamingResponse: - """SSE endpoint for live price updates. - - Streams all tracked ticker prices every ~500ms. The client connects - with EventSource and receives events in the format: - - data: {"AAPL": {"ticker": "AAPL", "price": 190.50, ...}, ...} + """SSE endpoint. Client connects with EventSource, receives all + tracked ticker prices every ~500ms. - Includes a retry directive so the browser auto-reconnects on - disconnection (EventSource built-in behavior). + Wire format per event: + data: {"ticker":"AAPL","price":190.50,"prev_price":190.42, + "open_price":190.00,"timestamp":"2026-04-10T12:00:00.500Z", + "direction":"up"} """ return StreamingResponse( _generate_events(price_cache, request), @@ -900,13 +821,9 @@ async def _generate_events( price_cache: PriceCache, request: Request, interval: float = 0.5, -) -> None: - """Async generator that yields SSE-formatted price events. - - Sends all prices every `interval` seconds. Stops when the client - disconnects (detected via request.is_disconnected()). - """ - # Tell the client to retry after 1 second if the connection drops +): + """Yield one SSE event per ticker per interval. One event = one JSON object.""" + # Tell the browser to retry after 1 second on disconnect yield "retry: 1000\n\n" last_version = -1 @@ -915,7 +832,6 @@ async def _generate_events( try: while True: - # Check for client disconnect if await request.is_disconnected(): logger.info("SSE client disconnected: %s", client_ip) break @@ -924,85 +840,66 @@ async def _generate_events( if current_version != last_version: last_version = current_version prices = price_cache.get_all() - - if prices: - data = { - ticker: update.to_dict() - for ticker, update in prices.items() - } - payload = json.dumps(data) + for update in prices.values(): + payload = json.dumps(update.to_sse_dict()) yield f"data: {payload}\n\n" await asyncio.sleep(interval) except asyncio.CancelledError: - logger.info("SSE stream cancelled for: %s", client_ip) + logger.info("SSE stream cancelled: %s", client_ip) ``` -### SSE wire format - -Each event the client receives looks like this: +### SSE wire format (one event per ticker) ``` -data: {"AAPL":{"ticker":"AAPL","price":190.50,"previous_price":190.42,"timestamp":1707580800.5,"change":0.08,"change_percent":0.042,"direction":"up"},"GOOGL":{"ticker":"GOOGL","price":175.12,...}} +data: {"ticker":"AAPL","price":190.50,"prev_price":190.42,"open_price":190.00,"timestamp":"2026-04-10T12:00:00.500Z","direction":"up"} + +data: {"ticker":"GOOGL","price":175.12,"prev_price":175.08,"open_price":175.00,"timestamp":"2026-04-10T12:00:00.500Z","direction":"up"} ``` -The client parses this with: +### Frontend consumption ```javascript const eventSource = new EventSource('/api/stream/prices'); eventSource.onmessage = (event) => { - const prices = JSON.parse(event.data); - // prices is { "AAPL": { ticker, price, previous_price, ... }, ... } + const update = JSON.parse(event.data); + // update: { ticker, price, prev_price, open_price, timestamp, direction } + // Flash the price cell, update sparkline, compute daily change % + const dailyChange = (update.price - update.open_price) / update.open_price * 100; +}; +eventSource.onerror = () => { + // EventSource auto-reconnects after `retry` ms — no manual handling needed }; ``` -### Why poll-and-push instead of event-driven? - -The SSE endpoint polls the cache on a fixed interval rather than being notified by the data source. This is simpler and produces predictable, evenly-spaced updates for the frontend. The frontend accumulates these into sparkline charts — regular spacing is important for clean visualization. - --- ## 10. FastAPI Lifecycle Integration -The market data system starts and stops with the FastAPI application using the `lifespan` context manager pattern. - **In `backend/app/main.py`:** ```python from contextlib import asynccontextmanager - from fastapi import FastAPI - -from app.market.cache import PriceCache -from app.market.factory import create_market_data_source -from app.market.interface import MarketDataSource -from app.market.stream import create_stream_router +from app.market import PriceCache, MarketDataSource, create_market_data_source, create_stream_router @asynccontextmanager async def lifespan(app: FastAPI): - """Manage startup and shutdown of background services.""" - # --- STARTUP --- - - # 1. Create the shared price cache price_cache = PriceCache() app.state.price_cache = price_cache - # 2. Create and start the market data source source = create_market_data_source(price_cache) app.state.market_source = source - # 3. Load initial tickers from the database watchlist - initial_tickers = await load_watchlist_tickers() # reads from SQLite + initial_tickers = await load_watchlist_tickers() # reads default 10 from SQLite await source.start(initial_tickers) - # 4. Register the SSE streaming router - stream_router = create_stream_router(price_cache) - app.include_router(stream_router) + app.include_router(create_stream_router(price_cache)) - yield # App is running + yield # App running # --- SHUTDOWN --- await source.stop() @@ -1011,23 +908,18 @@ async def lifespan(app: FastAPI): app = FastAPI(title="FinAlly", lifespan=lifespan) -# Dependency for injecting the price cache into route handlers +# FastAPI dependencies for injecting market state into route handlers def get_price_cache() -> PriceCache: return app.state.price_cache - def get_market_source() -> MarketDataSource: return app.state.market_source ``` -### Accessing market data from other routes - -Other parts of the backend (trade execution, portfolio valuation, watchlist management) access the price cache and data source via FastAPI's dependency injection: +### Using market state in other routes ```python -from fastapi import APIRouter, Depends - -router = APIRouter(prefix="/api") +from fastapi import Depends @router.post("/portfolio/trade") async def execute_trade( @@ -1036,20 +928,17 @@ async def execute_trade( ): current_price = price_cache.get_price(trade.ticker) if current_price is None: - raise HTTPException(404, f"No price available for {trade.ticker}") - # ... execute trade at current_price ... + raise HTTPException(400, f"No price available for {trade.ticker}. Try again shortly.") + # ... execute at current_price ... @router.post("/watchlist") async def add_to_watchlist( payload: WatchlistAdd, source: MarketDataSource = Depends(get_market_source), - price_cache: PriceCache = Depends(get_price_cache), ): - # Add to database ... - # Then tell the data source to start tracking it + await db.insert_watchlist(payload.ticker) await source.add_ticker(payload.ticker) - # ... @router.delete("/watchlist/{ticker}") @@ -1057,149 +946,120 @@ async def remove_from_watchlist( ticker: str, source: MarketDataSource = Depends(get_market_source), ): - # Remove from database ... - # Then stop tracking - await source.remove_ticker(ticker) - # ... + await db.delete_watchlist(ticker) + position = await db.get_position(ticker) + if position is None or position.quantity == 0: + await source.remove_ticker(ticker) ``` --- ## 11. Watchlist Coordination -When the watchlist changes (via REST API or LLM chat), the market data source must be notified so it tracks the right set of tickers. - -### Flow: Adding a Ticker +### Flow: Adding a ticker ``` -User (or LLM) → POST /api/watchlist {ticker: "PYPL"} - → Insert into watchlist table (SQLite) +POST /api/watchlist {"ticker": "PYPL"} + → INSERT into watchlist table → await source.add_ticker("PYPL") - Simulator: adds to GBMSimulator, rebuilds Cholesky, seeds cache - Massive: appends to ticker list, appears on next poll - → Return success (ticker + current price if available) + Simulator: adds to GBMSimulator ($100 seed), rebuilds Cholesky, seeds cache + Massive: appends to ticker list, included on next poll + → Return watchlist entry with price (null if Massive hasn't polled yet) ``` -### Flow: Removing a Ticker +### Flow: Removing a ticker ``` -User (or LLM) → DELETE /api/watchlist/PYPL - → Delete from watchlist table (SQLite) - → await source.remove_ticker("PYPL") - Simulator: removes from GBMSimulator, rebuilds Cholesky, removes from cache - Massive: removes from ticker list, removes from cache +DELETE /api/watchlist/PYPL + → DELETE from watchlist table + → (check: does user hold PYPL?) + If no position → await source.remove_ticker("PYPL") + simulator stops tracking it, cache entry purged + If open position → keep tracking for portfolio valuation → Return success ``` -### Edge case: Ticker has an open position +### Edge case: open position -If the user removes a ticker from the watchlist but still holds shares, the ticker should remain in the data source so portfolio valuation stays accurate. The watchlist route should check for this: - -```python -@router.delete("/watchlist/{ticker}") -async def remove_from_watchlist( - ticker: str, - source: MarketDataSource = Depends(get_market_source), -): - # Remove from watchlist table - await db.delete_watchlist_entry(ticker) - - # Only stop tracking if no open position - position = await db.get_position(ticker) - if position is None or position.quantity == 0: - await source.remove_ticker(ticker) - - return {"status": "ok"} -``` +Removing a ticker from the watchlist while holding shares must not stop price tracking — portfolio valuation needs a current price for unrealized P&L. The route checks for an open position before calling `remove_ticker`. --- ## 12. Testing Strategy -### 12.1 Unit Tests for GBMSimulator - -**File: `backend/tests/market/test_simulator.py`** +### Unit tests: `GBMSimulator` ```python -import math +# backend/tests/market/test_simulator.py import pytest from app.market.simulator import GBMSimulator -from app.market.seed_prices import SEED_PRICES +from app.market.seed_data import SEED_PRICES, DEFAULT_SEED_PRICE class TestGBMSimulator: - """Unit tests for the GBM price simulator.""" def test_step_returns_all_tickers(self): - sim = GBMSimulator(tickers=["AAPL", "GOOGL"]) + sim = GBMSimulator(["AAPL", "GOOGL"]) result = sim.step() assert set(result.keys()) == {"AAPL", "GOOGL"} - def test_prices_are_positive(self): - """GBM prices can never go negative (exp() is always positive).""" - sim = GBMSimulator(tickers=["AAPL"]) + def test_prices_always_positive(self): + sim = GBMSimulator(["AAPL"]) for _ in range(10_000): - prices = sim.step() - assert prices["AAPL"] > 0 + assert sim.step()["AAPL"] > 0 - def test_initial_prices_match_seeds(self): - sim = GBMSimulator(tickers=["AAPL"]) - # Before any step, price should be the seed price + def test_initial_price_matches_seed(self): + sim = GBMSimulator(["AAPL"]) assert sim.get_price("AAPL") == SEED_PRICES["AAPL"] + def test_unknown_ticker_uses_default_seed(self): + sim = GBMSimulator(["ZZZZ"]) + assert sim.get_price("ZZZZ") == DEFAULT_SEED_PRICE + def test_add_ticker(self): - sim = GBMSimulator(tickers=["AAPL"]) + sim = GBMSimulator(["AAPL"]) sim.add_ticker("TSLA") - result = sim.step() - assert "TSLA" in result + assert "TSLA" in sim.step() def test_remove_ticker(self): - sim = GBMSimulator(tickers=["AAPL", "GOOGL"]) + sim = GBMSimulator(["AAPL", "GOOGL"]) sim.remove_ticker("GOOGL") result = sim.step() assert "GOOGL" not in result assert "AAPL" in result def test_add_duplicate_is_noop(self): - sim = GBMSimulator(tickers=["AAPL"]) + sim = GBMSimulator(["AAPL"]) sim.add_ticker("AAPL") assert len(sim._tickers) == 1 - def test_remove_nonexistent_is_noop(self): - sim = GBMSimulator(tickers=["AAPL"]) + def test_remove_absent_is_noop(self): + sim = GBMSimulator(["AAPL"]) sim.remove_ticker("NOPE") # Should not raise - def test_unknown_ticker_gets_random_seed_price(self): - sim = GBMSimulator(tickers=["ZZZZ"]) - price = sim.get_price("ZZZZ") - assert 50.0 <= price <= 300.0 - def test_empty_step(self): - sim = GBMSimulator(tickers=[]) - result = sim.step() - assert result == {} + sim = GBMSimulator([]) + assert sim.step() == {} - def test_prices_change_over_time(self): - """After many steps, prices should have drifted from their seeds.""" - sim = GBMSimulator(tickers=["AAPL"]) + def test_cholesky_none_for_single_ticker(self): + sim = GBMSimulator(["AAPL"]) + assert sim._cholesky is None + + def test_cholesky_built_for_two_tickers(self): + sim = GBMSimulator(["AAPL", "GOOGL"]) + assert sim._cholesky is not None + + def test_prices_drift_over_time(self): + sim = GBMSimulator(["AAPL"]) for _ in range(1000): sim.step() - # Price should have changed (extremely unlikely to be exactly the seed) assert sim.get_price("AAPL") != SEED_PRICES["AAPL"] - - def test_cholesky_rebuilds_on_add(self): - sim = GBMSimulator(tickers=["AAPL"]) - assert sim._cholesky is None # Only 1 ticker, no correlation matrix - sim.add_ticker("GOOGL") - assert sim._cholesky is not None # Now 2 tickers, matrix exists ``` -### 12.2 Unit Tests for PriceCache - -**File: `backend/tests/market/test_cache.py`** +### Unit tests: `PriceCache` ```python -import pytest +# backend/tests/market/test_cache.py from app.market.cache import PriceCache @@ -1216,21 +1076,36 @@ class TestPriceCache: cache = PriceCache() update = cache.update("AAPL", 190.50) assert update.direction == "flat" - assert update.previous_price == 190.50 + assert update.prev_price == 190.50 + assert update.open_price == 190.50 def test_direction_up(self): cache = PriceCache() cache.update("AAPL", 190.00) update = cache.update("AAPL", 191.00) assert update.direction == "up" - assert update.change == 1.00 + assert update.prev_price == 190.00 def test_direction_down(self): cache = PriceCache() cache.update("AAPL", 190.00) update = cache.update("AAPL", 189.00) assert update.direction == "down" - assert update.change == -1.00 + + def test_open_price_never_changes(self): + cache = PriceCache() + cache.update("AAPL", 190.00) + cache.update("AAPL", 195.00) + cache.update("AAPL", 185.00) + assert cache.get("AAPL").open_price == 190.00 + + def test_open_price_from_first_call(self): + cache = PriceCache() + cache.update("AAPL", 190.00, open_price=185.00) + assert cache.get("AAPL").open_price == 185.00 + # Second update with different open_price is ignored + cache.update("AAPL", 192.00, open_price=999.00) + assert cache.get("AAPL").open_price == 185.00 def test_remove(self): cache = PriceCache() @@ -1242,8 +1117,7 @@ class TestPriceCache: cache = PriceCache() cache.update("AAPL", 190.00) cache.update("GOOGL", 175.00) - all_prices = cache.get_all() - assert set(all_prices.keys()) == {"AAPL", "GOOGL"} + assert set(cache.get_all().keys()) == {"AAPL", "GOOGL"} def test_version_increments(self): cache = PriceCache() @@ -1252,19 +1126,12 @@ class TestPriceCache: assert cache.version == v0 + 1 cache.update("AAPL", 191.00) assert cache.version == v0 + 2 - - def test_get_price_convenience(self): - cache = PriceCache() - cache.update("AAPL", 190.50) - assert cache.get_price("AAPL") == 190.50 - assert cache.get_price("NOPE") is None ``` -### 12.3 Integration Test: SimulatorDataSource - -**File: `backend/tests/market/test_simulator_source.py`** +### Integration tests: `SimulatorDataSource` ```python +# backend/tests/market/test_simulator_source.py import asyncio import pytest from app.market.cache import PriceCache @@ -1274,74 +1141,52 @@ from app.market.simulator import SimulatorDataSource @pytest.mark.asyncio class TestSimulatorDataSource: - async def test_start_populates_cache(self): + async def test_start_seeds_cache_immediately(self): cache = PriceCache() source = SimulatorDataSource(price_cache=cache, update_interval=0.1) await source.start(["AAPL", "GOOGL"]) - - # Cache should have seed prices immediately (before first loop tick) + # Cache populated before first loop tick assert cache.get("AAPL") is not None assert cache.get("GOOGL") is not None - - await source.stop() - - async def test_prices_update_over_time(self): - cache = PriceCache() - source = SimulatorDataSource(price_cache=cache, update_interval=0.05) - await source.start(["AAPL"]) - - initial = cache.get("AAPL").price - await asyncio.sleep(0.3) # Several update cycles - current = cache.get("AAPL").price - - # Extremely unlikely to be identical after many steps - # (but not impossible, so this is a probabilistic test) - assert current != initial or True # Soft assertion - await source.stop() - async def test_stop_is_clean(self): + async def test_stop_is_idempotent(self): cache = PriceCache() - source = SimulatorDataSource(price_cache=cache, update_interval=0.1) + source = SimulatorDataSource(price_cache=cache) await source.start(["AAPL"]) await source.stop() - # Double stop should not raise - await source.stop() + await source.stop() # Should not raise async def test_add_and_remove_ticker(self): cache = PriceCache() source = SimulatorDataSource(price_cache=cache, update_interval=0.1) await source.start(["AAPL"]) - await source.add_ticker("TSLA") assert "TSLA" in source.get_tickers() assert cache.get("TSLA") is not None - await source.remove_ticker("TSLA") assert "TSLA" not in source.get_tickers() assert cache.get("TSLA") is None - await source.stop() ``` -### 12.4 Unit Test: MassiveDataSource (Mocked) - -**File: `backend/tests/market/test_massive.py`** +### Unit tests: `MassiveDataSource` (mocked) ```python -import asyncio +# backend/tests/market/test_massive.py from unittest.mock import MagicMock, patch import pytest from app.market.cache import PriceCache from app.market.massive_client import MassiveDataSource -def _make_snapshot(ticker: str, price: float, timestamp_ms: int) -> MagicMock: - """Create a mock Massive snapshot object.""" +def _snap(ticker: str, price: float, ts_ms: int) -> MagicMock: snap = MagicMock() snap.ticker = ticker snap.last_trade.price = price - snap.last_trade.timestamp = timestamp_ms + snap.last_trade.timestamp = ts_ms + snap.day.open = price * 0.99 + snap.prev_day.close = price * 0.98 return snap @@ -1350,18 +1195,14 @@ class TestMassiveDataSource: async def test_poll_updates_cache(self): cache = PriceCache() - source = MassiveDataSource( - api_key="test-key", - price_cache=cache, - poll_interval=60.0, # Long interval so the loop doesn't auto-poll - ) - - mock_snapshots = [ - _make_snapshot("AAPL", 190.50, 1707580800000), - _make_snapshot("GOOGL", 175.25, 1707580800000), - ] - - with patch.object(source, "_fetch_snapshots", return_value=mock_snapshots): + source = MassiveDataSource("test-key", cache, poll_interval=999) + source._tickers = ["AAPL", "GOOGL"] + source._client = MagicMock() + + with patch.object(source, "_fetch_snapshots", return_value=[ + _snap("AAPL", 190.50, 1707580800000), + _snap("GOOGL", 175.25, 1707580800000), + ]): await source._poll_once() assert cache.get_price("AAPL") == 190.50 @@ -1369,110 +1210,84 @@ class TestMassiveDataSource: async def test_malformed_snapshot_skipped(self): cache = PriceCache() - source = MassiveDataSource( - api_key="test-key", - price_cache=cache, - poll_interval=60.0, - ) + source = MassiveDataSource("test-key", cache, poll_interval=999) source._tickers = ["AAPL", "BAD"] + source._client = MagicMock() - good_snap = _make_snapshot("AAPL", 190.50, 1707580800000) - bad_snap = MagicMock() - bad_snap.ticker = "BAD" - bad_snap.last_trade = None # Will cause AttributeError + bad = MagicMock() + bad.ticker = "BAD" + bad.last_trade = None # AttributeError when accessing .price - with patch.object(source, "_fetch_snapshots", return_value=[good_snap, bad_snap]): + with patch.object(source, "_fetch_snapshots", return_value=[ + _snap("AAPL", 190.50, 1707580800000), bad + ]): await source._poll_once() - # Good ticker processed, bad one skipped assert cache.get_price("AAPL") == 190.50 assert cache.get_price("BAD") is None - async def test_api_error_does_not_crash(self): + async def test_api_error_does_not_raise(self): cache = PriceCache() - source = MassiveDataSource( - api_key="test-key", - price_cache=cache, - poll_interval=60.0, - ) + source = MassiveDataSource("test-key", cache, poll_interval=999) source._tickers = ["AAPL"] + source._client = MagicMock() with patch.object(source, "_fetch_snapshots", side_effect=Exception("network error")): await source._poll_once() # Should not raise - assert cache.get_price("AAPL") is None # No update happened + assert cache.get_price("AAPL") is None ``` --- ## 13. Error Handling & Edge Cases -### 13.1 Startup: Empty Watchlist - -If the database has no watchlist entries (user deleted everything), `start()` receives an empty list. Both data sources handle this gracefully — the simulator produces no prices, the Massive poller skips its API call. The SSE endpoint sends empty events. When the user adds a ticker, the source starts tracking it immediately. +### Empty watchlist at startup -### 13.2 Price Cache Miss During Trade +Both sources handle `start([])` gracefully: simulator produces no prices, Massive skips its API call. When a ticker is added later, tracking begins immediately. -If a user tries to trade a ticker that has no cached price (e.g., just added to watchlist, Massive hasn't polled yet): +### Price cache miss during trade ```python price = price_cache.get_price(ticker) if price is None: - raise HTTPException( - status_code=400, - detail=f"Price not yet available for {ticker}. Please wait a moment and try again.", - ) + raise HTTPException(400, f"No price available for {ticker}. Try again shortly.") ``` -The simulator avoids this by seeding the cache in `add_ticker()`. The Massive client may have a brief gap — the HTTP 400 with a clear message is the correct response. - -### 13.3 Massive API Key Invalid +The simulator seeds the cache in `add_ticker()` so this should be rare. The Massive client may have a brief gap after a ticker is added (until the next poll). -If the API key is set but invalid, the first poll will fail with a 401. The poller logs the error and keeps retrying. The SSE endpoint streams empty data. The user sees no prices and a connection status indicator showing "connected" (SSE is working, just no data). The fix is to correct the API key and restart. +### Massive API key invalid -### 13.4 Thread Safety Under Load +401 errors are logged; the poller keeps running. The SSE stream continues (sending empty data). Users see no prices until the key is corrected and the container restarted. -The `PriceCache` uses `threading.Lock` which is a mutex — only one thread can hold it at a time. Under normal load (10 tickers, 2 updates/sec), lock contention is negligible. The critical section is tiny (dict lookup + assignment). +### Thread safety -If this ever became a bottleneck (hundreds of tickers, many concurrent SSE readers), the fix would be a `ReadWriteLock` — but that level of optimization is unnecessary for this project. +`PriceCache` uses `threading.Lock`. Under normal load (10–50 tickers, 2 updates/sec), lock contention is negligible. The critical section is a single dict assignment. -### 13.5 Simulator Precision +### GBM numerical stability -GBM with tiny `dt` produces very small per-tick moves. Floating-point precision is not a concern because: +- Prices use `exp()` — always positive, never zero or negative - Prices are `round()`ed to 2 decimal places in `GBMSimulator.step()` -- The exponential formulation (`exp(drift + diffusion)`) is numerically stable -- Prices are always positive (exponential function) +- The exponential formulation is numerically stable for the tiny `dt` values used --- ## 14. Configuration Summary -All tunable parameters and their defaults: - -| Parameter | Location | Default | Description | -|-----------|----------|---------|-------------| -| `MASSIVE_API_KEY` | Environment variable | `""` (empty) | If set, use Massive API; otherwise use simulator | -| `update_interval` | `SimulatorDataSource.__init__` | `0.5` (seconds) | Time between simulator ticks | -| `poll_interval` | `MassiveDataSource.__init__` | `15.0` (seconds) | Time between Massive API polls | -| `event_probability` | `GBMSimulator.__init__` | `0.001` | Chance of a random shock event per ticker per tick | -| `dt` | `GBMSimulator.__init__` | `~8.5e-8` | GBM time step (fraction of a trading year) | -| SSE push interval | `_generate_events()` | `0.5` (seconds) | Time between SSE pushes to the client | -| SSE retry directive | `_generate_events()` | `1000` (ms) | Browser EventSource reconnection delay | +| Parameter | Where | Default | Description | +|-----------|-------|---------|-------------| +| `MASSIVE_API_KEY` | env var | `""` | Set to enable real market data; empty = simulator | +| `update_interval` | `SimulatorDataSource.__init__` | `0.5s` | Simulator tick rate | +| `poll_interval` | `MassiveDataSource.__init__` | `15.0s` | Massive API poll rate | +| `event_probability` | `GBMSimulator.__init__` | `0.001` | Random shock chance per ticker per tick | +| `dt` | `GBMSimulator` | `~8.48e-8` | GBM time step (fraction of trading year) | +| SSE push interval | `_generate_events()` | `0.5s` | How often SSE sends to clients | +| SSE retry | `_generate_events()` | `1000ms` | Browser reconnect delay | ### Package `__init__.py` -**File: `backend/app/market/__init__.py`** - ```python -"""Market data subsystem for FinAlly. - -Public API: - PriceUpdate - Immutable price snapshot dataclass - PriceCache - Thread-safe in-memory price store - MarketDataSource - Abstract interface for data providers - create_market_data_source - Factory that selects simulator or Massive - create_stream_router - FastAPI router factory for SSE endpoint -""" +# backend/app/market/__init__.py from .cache import PriceCache from .factory import create_market_data_source diff --git a/planning/MARKET_DATA_REVIEW.md b/planning/MARKET_DATA_REVIEW.md index 61b4d6bf..78d5ec3e 100644 --- a/planning/MARKET_DATA_REVIEW.md +++ b/planning/MARKET_DATA_REVIEW.md @@ -1,173 +1,232 @@ -# Market Data Backend — Code Review +# Market Data — Code Review -**Date:** 2026-02-10 -**Scope:** `backend/app/market/` (8 source files) and `backend/tests/market/` (6 test files) +Reviewed against: `PLAN.md`, `MARKET_INTERFACE.md`, `MARKET_SIMULATOR.md`, `MASSIVE_API.md`, `MARKET_DATA_DESIGN.md` +Implementation: `backend/app/market/` +Tests: `backend/tests/market/` --- -## 1. Test Results Summary +## Test Results -**73 tests collected, 68 passed, 5 failed.** +``` +73 passed in 4.87s +Linting: All checks passed (ruff) +``` + +**Coverage by module:** + +| File | Coverage | Missing lines | +|------|----------|---------------| +| `models.py` | 100% | — | +| `cache.py` | 100% | — | +| `factory.py` | 100% | — | +| `interface.py` | 100% | — | +| `seed_prices.py` | 100% | — | +| `simulator.py` | 98% | 149, 268–269 (exception branch in run loop) | +| `massive_client.py` | 94% | 85–87, 125 (poll loop body, fetch method) | +| `stream.py` | **33%** | 26–48, 62–87 (the entire SSE route and generator) | +| **Total** | **91%** | | + +All tests pass and linting is clean. The implementation is well-structured, readable, and the core modules are very well covered. The issues below are all actionable and none require architectural rework. + +--- -All failures are in `test_massive.py` and stem from the same root cause: the `massive` package is not installed in the test environment, so `patch("app.market.massive_client.RESTClient")` fails with `AttributeError` because the module-level name `RESTClient` was never imported (it is lazy-imported inside methods). This is an environment issue, not a logic bug — the tests are correctly structured but require the `massive` package to be available (or `create=True` on the patch) so that the mock target exists. +## Issues -Failing tests: -- `test_poll_updates_cache` — `asyncio.to_thread` fails because `_fetch_snapshots` is not properly mocked when `massive` is absent -- `test_malformed_snapshot_skipped` — same cause -- `test_timestamp_conversion` — same cause -- `test_stop_cancels_task` — `patch("app.market.massive_client.RESTClient")` fails because the name doesn't exist at module level -- `test_start_immediate_poll` — same as above +### 1. `open_price` is missing entirely — CRITICAL -The underlying `_poll_once()` logic itself is correct. The 3 tests that mock `source._fetch_snapshots` directly fail because `asyncio.to_thread(self._fetch_snapshots)` calls the real method which tries to import `massive`. The 2 tests that use `patch("app.market.massive_client.RESTClient")` fail because the name doesn't exist in the module's namespace (lazy import). Both issues resolve when the `massive` package is installed. +**PLAN.md §6** is explicit: -**Lint (ruff):** Source code passes clean. Tests have 5 unused-import warnings (`pytest`, `math`, `asyncio` imported but not used in some test files). +> The price cache holds `{price, prev_price, open_price, timestamp, direction}` per ticker. `open_price` is the seed price at session start and is the baseline for "daily change %" calculations on the frontend. -**Coverage:** 84% overall. -| Module | Coverage | Notes | -|---|---|---| -| models.py | 100% | | -| cache.py | 100% | | -| interface.py | 100% | | -| seed_prices.py | 100% | | -| factory.py | 100% | | -| simulator.py | 98% | Uncovered: `_add_ticker_internal` duplicate guard (L145), exception log in `_run_loop` (L264-265) | -| massive_client.py | 56% | Expected — real API methods can't run without the massive package | -| stream.py | 31% | Expected — SSE generator requires a running ASGI server to test | +The actual `PriceUpdate` dataclass has no `open_price` field, and `PriceCache.update()` has no `open_price` parameter. The frontend formula `(price - open_price) / open_price * 100` cannot be computed from the SSE stream. `GET /api/watchlist` is also specified to return `open_price` — it cannot without this field. + +**Impact:** Daily change % column in the watchlist panel will not work. + +**Fix:** Add `open_price: float` to `PriceUpdate`. Update `PriceCache.update()` to accept and store it (set on first update, never overwritten). Seed it in `SimulatorDataSource.start()` and `add_ticker()`. Pass `day.open` / `prev_day.close` from the Massive client. --- -## 2. Architecture Assessment +### 2. Field name `previous_price` vs `prev_price` — CRITICAL -The market data subsystem is well-designed. It follows a clean strategy pattern: +**PLAN.md §6** pins the SSE event field names: -``` -MarketDataSource (ABC) -├── SimulatorDataSource (GBM simulator) -└── MassiveDataSource (Polygon.io REST poller) - │ - ▼ - PriceCache (shared, thread-safe) - │ - ▼ - SSE stream → Frontend +```json +{"ticker": "AAPL", "price": 191.50, "prev_price": 191.32, "timestamp": "...", "direction": "up"} ``` -**Strengths:** -- Clear separation of concerns across 8 focused modules -- Factory pattern with lazy imports — the `massive` package is only needed when `MASSIVE_API_KEY` is set -- PriceCache as the single point of truth decouples producers from consumers -- Immutable `PriceUpdate` dataclass with `frozen=True, slots=True` is correct and efficient -- The GBM math is proper: log-normal price paths via `exp((mu - 0.5*sigma^2)*dt + sigma*sqrt(dt)*Z)` -- Correlated moves via Cholesky decomposition are a nice touch for realism -- All background tasks are properly cancellable and idempotent on stop() +The implementation uses `previous_price` throughout (`PriceUpdate`, `PriceCache`, `to_dict()`, all tests). The frontend `EventSource` handler will read `event.prev_price` and get `undefined`. + +**Impact:** Price flash animations and change calculations on the frontend will silently break. + +**Fix:** Rename `previous_price` → `prev_price` in `models.py`, `cache.py`, all tests, and `CLAUDE.md`. --- -## 3. Issues Found +### 3. Timestamp format in SSE events is wrong — CRITICAL + +**PLAN.md §6** requires ISO 8601: -### 3.1 Build Configuration Bug (Severity: High) +```json +{"timestamp": "2026-04-10T12:00:00.500Z"} +``` + +`PriceUpdate.to_dict()` returns a raw Unix float (`1234567890.0`). The frontend will receive a number, not a parseable date string. -`pyproject.toml` is missing the hatchling package discovery configuration. Running `uv sync` fails: +**Fix:** In `to_dict()` (or a dedicated `to_sse_dict()` method), convert `self.timestamp` to ISO format: +```python +from datetime import datetime, timezone +ts = datetime.fromtimestamp(self.timestamp, tz=timezone.utc).isoformat().replace("+00:00", "Z") ``` -ValueError: Unable to determine which files to ship inside the wheel + +--- + +### 4. `massive_client.py` has top-level imports — HIGH + +```python +# massive_client.py lines 8–9 +from massive import RESTClient +from massive.rest.models import SnapshotMarketType ``` -**Fix:** Add to `pyproject.toml`: -```toml -[tool.hatch.build.targets.wheel] -packages = ["app"] +`factory.py` also imports `MassiveDataSource` eagerly at module load: + +```python +# factory.py lines 10–11 +from .massive_client import MassiveDataSource +from .simulator import SimulatorDataSource ``` -This will block Docker builds and any fresh `uv sync` until fixed. +The design intent (and PLAN.md) was that `massive` is an optional dependency only needed when `MASSIVE_API_KEY` is set. The top-level imports mean the package is imported on every app startup regardless. Because `massive` is also listed as a hard dependency in `pyproject.toml`, this doesn't cause a runtime crash today — but it contradicts the spec and will cause issues in any deployment that doesn't install `massive` (e.g., students following a minimal setup, Docker builds that want a leaner image). -### 3.2 Massive Test Fragility (Severity: Medium) +**Fix:** Move imports to inside the functions that use them (lazy imports): -Five tests in `test_massive.py` fail when the `massive` package is not installed. The root cause is twofold: +```python +# factory.py +def create_market_data_source(price_cache): + api_key = os.environ.get("MASSIVE_API_KEY", "").strip() + if api_key: + from .massive_client import MassiveDataSource # lazy + return MassiveDataSource(...) + else: + from .simulator import SimulatorDataSource # lazy + return SimulatorDataSource(...) +``` -1. **`_poll_once` uses `asyncio.to_thread(self._fetch_snapshots)`** — even when `_fetch_snapshots` is patched on the instance, `to_thread` runs it in a thread executor. Three tests mock `_fetch_snapshots` as a `MagicMock` (synchronous), but `asyncio.to_thread` wraps it in `loop.run_in_executor`, which works... except that when `_fetch_snapshots` is NOT patched, the real method tries `from massive.rest.models import SnapshotMarketType` and fails. +Move `massive` to `[project.optional-dependencies]` in `pyproject.toml`. -2. **`patch("app.market.massive_client.RESTClient")`** targets a name that doesn't exist at module level because `massive_client.py` uses a lazy import inside `start()`. The patch needs `create=True` or the import needs to be at module level behind a `TYPE_CHECKING` guard. +--- -These tests pass when `massive>=1.0.0` is installed (as `pyproject.toml` declares it as a core dependency), so this is technically a test-environment issue, not a code bug. However, since the whole point of lazy imports is to make `massive` optional for simulator-only use, the tests should also work without it. +### 5. SSE wire format deviates from spec — HIGH -### 3.3 `_generate_events` Return Type Annotation (Severity: Low) +`stream.py` sends all tickers in a single JSON object per tick: -`stream.py:54` declares the return type as `-> None` but the function is an async generator (it uses `yield`). The correct annotation would be `-> AsyncGenerator[str, None]` or simply removing the annotation. This doesn't cause runtime issues but is misleading for type checkers and developers. +``` +data: {"AAPL": {"ticker": "AAPL", ...}, "GOOGL": {"ticker": "GOOGL", ...}} +``` -### 3.4 `version` Property Not Under Lock (Severity: Low) +PLAN.md §6 and `MARKET_INTERFACE.md` specify individual per-ticker events: -`PriceCache.version` reads `self._version` without acquiring `self._lock`: +``` +data: {"ticker": "AAPL", "price": 191.50, "prev_price": 191.32, ...} -```python -@property -def version(self) -> int: - return self._version +data: {"ticker": "GOOGL", "price": 175.12, ...} ``` -On CPython with the GIL, reading a single `int` is atomic, so this won't cause corruption. However, it's inconsistent with the rest of the class, and if the project ever runs on a no-GIL Python build (PEP 703, Python 3.13t+), this could become a race. A minor concern given the current context. +The frontend `EventSource.onmessage` handler will need to be written to match whichever format the backend produces. These are different enough that one handler cannot handle both. If the frontend is coded to the spec (individual events), it will silently ignore the batched format or need unwrapping. + +This is worth resolving before the Frontend agent starts work so there is one unambiguous contract. + +--- -### 3.5 `SimulatorDataSource.get_tickers` Accesses Private State (Severity: Low) +### 6. `stream.py` has 33% test coverage — HIGH + +The SSE streaming endpoint is the core real-time feature and has essentially no test coverage. The untested code includes: + +- The FastAPI route registration (`/api/stream/prices`) +- The `_generate_events` async generator — version-change detection, disconnect handling, event formatting + +There are no tests that verify the wire format of SSE events, that the `retry` directive is sent, or that the generator stops on client disconnect. This is the highest-value gap in the test suite. + +**Suggested tests:** Use `httpx` with `AsyncClient` and FastAPI's `TestClient` / async streaming to verify: (a) events are `text/event-stream`, (b) each event is valid JSON with the required fields, (c) version-based deduplication skips unchanged data. + +--- + +### 7. Unknown ticker seed price is random, not $100 — MEDIUM + +**PLAN.md §6:** + +> When a ticker not in the default seed list is added, it starts with a seed price of **$100.00**. + +**`simulator.py:151`:** -`simulator.py:254`: ```python -def get_tickers(self) -> list[str]: - return list(self._sim._tickers) if self._sim else [] +self._prices[ticker] = SEED_PRICES.get(ticker, random.uniform(50.0, 300.0)) ``` -This reaches into `GBMSimulator._tickers` (private attribute). `GBMSimulator` should expose a `get_tickers()` method or a `tickers` property to keep the boundary clean. - -### 3.6 Module-Level Router Instance (Severity: Low) +`seed_prices.py` correctly defines `DEFAULT_SEED_PRICE = 100.00`, but `simulator.py` does not import or use it — it uses an inline `random.uniform` instead. -`stream.py:16` creates a module-level `router` object, and `create_stream_router()` registers a route on it via closure. If `create_stream_router` were called twice (e.g., in tests), the `/prices` route would be registered twice on the same router. In practice this won't happen because the function is called once during app startup, but it's a latent footgun for testing. +**Impact:** Users adding custom tickers to the watchlist will see inconsistent, unpredictable starting prices rather than the specified $100. -### 3.7 Unused Imports in Tests (Severity: Trivial) +**Fix:** -Five lint warnings from `ruff`: -- `test_cache.py`: unused `pytest` -- `test_factory.py`: unused `pytest` -- `test_massive.py`: unused `asyncio` -- `test_simulator.py`: unused `math`, unused `pytest` +```python +from .seed_prices import DEFAULT_SEED_PRICE, SEED_PRICES, TICKER_PARAMS +# ... +self._prices[ticker] = SEED_PRICES.get(ticker, DEFAULT_SEED_PRICE) +``` --- -## 4. Design Observations +### 8. `create_stream_router` mutates a module-level router — MINOR -### 4.1 Things Done Well +```python +# stream.py +router = APIRouter(prefix="/api/stream", tags=["streaming"]) + +def create_stream_router(price_cache: PriceCache) -> APIRouter: + @router.get("/prices") # decorates the module-level router + async def stream_prices(...): + ... + return router +``` -- **GBM parameter tuning is thoughtful.** TSLA at sigma=0.50 vs V at 0.17 reflects real-world volatility differences. The shock event system (~0.1% per tick, producing visible moves every ~50s) adds visual drama without destabilizing prices. -- **Cholesky decomposition for correlated moves** is the mathematically correct approach. The sector-based correlation structure (tech 0.6, finance 0.5, cross 0.3) is reasonable. -- **Defensive error handling in both data sources.** Both `_run_loop` (simulator) and `_poll_once`/`_poll_loop` (massive) catch exceptions and continue, which is essential for a long-running background service. -- **SSE implementation is clean.** The version-based change detection avoids sending redundant payloads. The `retry: 1000\n\n` directive ensures browser auto-reconnect. Nginx buffering is proactively disabled. -- **Seed prices in the cache at start** means the frontend gets data on the first SSE poll, with no visible delay. -- **Thread-safe cache with Lock** is the right choice since the Massive client runs API calls via `asyncio.to_thread`. +Calling `create_stream_router()` more than once (e.g., in tests) will register duplicate routes on the same `router` object. The factory pattern should return a fresh router: -### 4.2 Missing Tests +```python +def create_stream_router(price_cache: PriceCache) -> APIRouter: + router = APIRouter(prefix="/api/stream", tags=["streaming"]) + @router.get("/prices") + async def stream_prices(...): + ... + return router +``` -- **SSE streaming (`stream.py`)** at 31% coverage has no dedicated tests. Testing SSE requires an ASGI test client (e.g., `httpx.AsyncClient` with `app`). Given that this is the primary consumer of PriceCache, even a basic integration test would add confidence. -- **No concurrent/thread-safety test for PriceCache.** The lock usage looks correct from inspection, but a test with multiple threads writing simultaneously would verify it empirically. -- **No test for `GBMSimulator` with all 10 default tickers.** Tests use 1-2 tickers. A test confirming the Cholesky decomposition succeeds for the full 10-ticker default set would catch correlation matrix issues. +--- -### 4.3 Potential Future Considerations +### 9. TSLA correlation constant inconsistency — MINOR -- The `PriceCache` doesn't cap history; it only stores the latest price per ticker, so memory is bounded at O(tickers). Good. -- The `DEFAULT_CORR` constant (0.3, `seed_prices.py:48`) is defined but never referenced in `_pairwise_correlation`. The static method returns `CROSS_GROUP_CORR` (also 0.3) as the fallback. This is semantically confusing — `DEFAULT_CORR` seems intended for tickers not in any group, but the code returns `CROSS_GROUP_CORR` for all non-matched pairs. Both happen to be 0.3, so behavior is correct, but the naming is misleading. +`seed_prices.py` defines `TSLA_CORR = 0.3`. `MARKET_SIMULATOR.md` specifies `0.25` for TSLA ("loner" behaviour). Both values are reasonable, but the documentation and code are inconsistent. If the design intent was 0.25, update `seed_prices.py`. If 0.3 is intentional, update the docs. --- -## 5. Verdict +## Summary -The market data backend is solid and well-structured. The GBM simulator, price cache, abstract interface, factory pattern, and SSE streaming all work correctly and follow good practices. The architecture will integrate cleanly with the rest of the application. +| # | Severity | Issue | +|---|----------|-------| +| 1 | Critical | `open_price` missing from `PriceUpdate` and `PriceCache` — breaks daily change % | +| 2 | Critical | Field named `previous_price`, spec requires `prev_price` — frontend contract broken | +| 3 | Critical | SSE timestamp is Unix float, spec requires ISO 8601 string | +| 4 | High | Top-level `massive` imports — breaks optional dependency design intent | +| 5 | High | SSE sends batched object, spec requires individual per-ticker events | +| 6 | High | `stream.py` has 33% coverage — SSE format/behaviour untested | +| 7 | Medium | Unknown ticker seeds at random price instead of $100 as specified | +| 8 | Minor | `create_stream_router` mutates a module-level router (duplicate route risk) | +| 9 | Minor | TSLA_CORR is 0.3 in code vs 0.25 in design docs | -**Must fix before proceeding:** -1. Add `[tool.hatch.build.targets.wheel] packages = ["app"]` to `pyproject.toml` — without this, `uv sync` and Docker builds fail. +**Issues 1, 2, 3, and 5** are a connected set — they all define the SSE/frontend contract. They should be resolved together before the Frontend agent starts consuming the stream, as they will otherwise require a coordinated frontend + backend change later. -**Should fix:** -2. Make the Massive tests resilient to the `massive` package being absent (use `create=True` on patches, or restructure mocks). -3. Fix the `_generate_events` return type annotation. -4. Remove unused imports in test files. +**Issue 4 (lazy imports)** is a clean-up item that should be addressed to align with the documented design intent, even though it doesn't cause a runtime failure with the current `pyproject.toml`. -**Nice to have:** -5. Add a `get_tickers()` public method to `GBMSimulator`. -6. Add at least one SSE integration test. -7. Clarify `DEFAULT_CORR` vs `CROSS_GROUP_CORR` naming. +**Issue 6 (stream.py coverage)** is the most impactful gap to address in the test suite. diff --git a/planning/MARKET_DATA_SUMMARY.md b/planning/MARKET_DATA_SUMMARY.md new file mode 100644 index 00000000..cd9ebf30 --- /dev/null +++ b/planning/MARKET_DATA_SUMMARY.md @@ -0,0 +1,104 @@ +# Market Data Backend — Summary + +**Status:** Complete, tested, reviewed, all issues resolved. + +## What Was Built + +A complete market data subsystem in `backend/app/market/` (8 modules, ~500 lines) providing live price simulation and real market data via a unified interface. + +### Architecture + +``` +MarketDataSource (ABC) +├── SimulatorDataSource → GBM simulator (default, no API key needed) +└── MassiveDataSource → Polygon.io REST poller (when MASSIVE_API_KEY set) + │ + ▼ + PriceCache (thread-safe, in-memory) + │ + ├──→ SSE stream endpoint (/api/stream/prices) + ├──→ Portfolio valuation + └──→ Trade execution +``` + +### Modules + +| File | Purpose | +|------|---------| +| `models.py` | `PriceUpdate` — immutable frozen dataclass (ticker, price, previous_price, timestamp, change, direction) | +| `interface.py` | `MarketDataSource` — abstract base class defining `start/stop/add_ticker/remove_ticker/get_tickers` | +| `cache.py` | `PriceCache` — thread-safe price store with version counter for SSE change detection | +| `seed_prices.py` | Realistic seed prices, per-ticker GBM params (drift/volatility), correlation groups | +| `simulator.py` | `GBMSimulator` (Geometric Brownian Motion with Cholesky-correlated moves) + `SimulatorDataSource` | +| `massive_client.py` | `MassiveDataSource` — REST polling client for Polygon.io via the `massive` package | +| `factory.py` | `create_market_data_source()` — selects simulator or Massive based on `MASSIVE_API_KEY` env var | +| `stream.py` | `create_stream_router()` — FastAPI SSE endpoint factory using version-based change detection | + +### Key Design Decisions + +- **Strategy pattern** — both data sources implement the same ABC; downstream code is source-agnostic +- **PriceCache as single point of truth** — producers write, consumers read; no direct coupling +- **GBM with correlated moves** — Cholesky decomposition of sector-based correlation matrix; tech stocks correlate at 0.6, finance at 0.5, cross-sector at 0.3 +- **Random shock events** — ~0.1% chance per tick per ticker of a 2-5% move for visual drama +- **SSE over WebSockets** — simpler, one-way push, universal browser support + +## Test Suite + +**73 tests, all passing.** 6 test modules in `backend/tests/market/`. + +| Module | Tests | Coverage | +|--------|-------|----------| +| test_models.py | 11 | models.py: 100% | +| test_cache.py | 13 | cache.py: 100% | +| test_simulator.py | 17 | simulator.py: 98% | +| test_simulator_source.py | 10 | (integration tests) | +| test_factory.py | 7 | factory.py: 100% | +| test_massive.py | 13 | massive_client.py: 56% (expected — API methods mocked) | + +Overall coverage: 84%. + +## Code Review & Fixes Applied + +A comprehensive code review identified 7 issues. All were resolved: + +1. **pyproject.toml build config** — added `[tool.hatch.build.targets.wheel] packages = ["app"]` +2. **Lazy imports removed** — `massive` is a core dependency; imports moved to top level +3. **SSE return type fixed** — `_generate_events` annotated as `AsyncGenerator[str, None]` +4. **Public `get_tickers()`** — added to `GBMSimulator` to avoid private attribute access +5. **Correlation constants cleaned up** — removed unused `DEFAULT_CORR`, consolidated into `CROSS_GROUP_CORR` +6. **Unused test imports removed** — `pytest`, `math`, `asyncio` cleaned from 4 test files +7. **Massive test mocks fixed** — `source._client` set in tests, patches target correct names + +## Demo + +A Rich terminal demo is available at `backend/market_data_demo.py`: + +```bash +cd backend +uv run market_data_demo.py +``` + +Displays a live-updating dashboard with all 10 tickers, sparklines, color-coded direction arrows, and an event log for notable price moves. Runs 60 seconds or until Ctrl+C. + +## Usage for Downstream Code + +```python +from app.market import PriceCache, create_market_data_source + +# Startup +cache = PriceCache() +source = create_market_data_source(cache) # Reads MASSIVE_API_KEY +await source.start(["AAPL", "GOOGL", "MSFT", ...]) + +# Read prices +update = cache.get("AAPL") # PriceUpdate or None +price = cache.get_price("AAPL") # float or None +all_prices = cache.get_all() # dict[str, PriceUpdate] + +# Dynamic watchlist +await source.add_ticker("TSLA") +await source.remove_ticker("GOOGL") + +# Shutdown +await source.stop() +``` \ No newline at end of file diff --git a/planning/New Text Document.txt b/planning/New Text Document.txt new file mode 100644 index 00000000..e69de29b diff --git a/planning/PLAN.md b/planning/PLAN.md index bc1811b3..25a48a14 100644 --- a/planning/PLAN.md +++ b/planning/PLAN.md @@ -130,6 +130,9 @@ MASSIVE_API_KEY= # Optional: Set to "true" for deterministic mock LLM responses (testing) LLM_MOCK=false + +# Optional: Portfolio snapshot recording interval in seconds (default: 60) +PORTFOLIO_SNAPSHOT_INTERVAL_SECONDS=60 ``` ### Behavior @@ -150,11 +153,12 @@ Both the simulator and the Massive client implement the same abstract interface. ### Simulator (Default) - Generates prices using geometric Brownian motion (GBM) with configurable drift and volatility per ticker -- Updates at ~500ms intervals +- Updates at ~500ms intervals — **a single update loop drives both simulation and SSE delivery; there are no two independent timers** - Correlated moves across tickers (e.g., tech stocks move together) - Occasional random "events" — sudden 2-5% moves on a ticker for drama - Starts from realistic seed prices (e.g., AAPL ~$190, GOOGL ~$175, etc.) - Runs as an in-process background task — no external dependencies +- **The active ticker set mirrors the watchlist**: tickers are added to simulation when added to the watchlist; removed immediately when removed. When a ticker not in the default seed list is added, it starts with a seed price of `$100.00` and standard volatility parameters. ### Massive API (Optional) @@ -175,8 +179,22 @@ Both the simulator and the Massive client implement the same abstract interface. - Endpoint: `GET /api/stream/prices` - Long-lived SSE connection; client uses native `EventSource` API -- Server pushes price updates for all tickers known to the system at a regular cadence (~500ms) — in the single-user model this is equivalent to the user's watchlist -- Each SSE event contains ticker, price, previous price, timestamp, and change direction +- Server pushes price updates for **all tickers in the user's current watchlist** at each simulator tick (~500ms) +- Each SSE event is a JSON object with these exact fields: + +```json +{ + "ticker": "AAPL", + "price": 191.50, + "prev_price": 191.32, + "timestamp": "2026-04-10T12:00:00.500Z", + "direction": "up" +} +``` + + `direction` is one of `"up"`, `"down"`, or `"flat"`. + +- The **price cache** holds `{price, prev_price, open_price, timestamp, direction}` per ticker. `open_price` is the seed price at session start and is the baseline for "daily change %" calculations on the frontend. - Client handles reconnection automatically (EventSource has built-in retry) --- @@ -225,7 +243,7 @@ All tables include a `user_id` column defaulting to `"default"`. This is hardcod - `price` REAL - `executed_at` TEXT (ISO timestamp) -**portfolio_snapshots** — Portfolio value over time (for P&L chart). Recorded every 30 seconds by a background task, and immediately after each trade execution. +**portfolio_snapshots** — Portfolio value over time (for P&L chart). Owned by the **portfolio module**. Recorded at three points: (1) once immediately when the database is first initialized (`total_value = 10000.0`), (2) on a repeating interval configured by `PORTFOLIO_SNAPSHOT_INTERVAL_SECONDS` (default 60s), and (3) immediately after each trade execution. - `id` TEXT PRIMARY KEY (UUID) - `user_id` TEXT (default: `"default"`) - `total_value` REAL @@ -236,7 +254,7 @@ All tables include a `user_id` column defaulting to `"default"`. This is hardcod - `user_id` TEXT (default: `"default"`) - `role` TEXT (`"user"` or `"assistant"`) - `content` TEXT -- `actions` TEXT (JSON — trades executed, watchlist changes made; null for user messages) +- `actions` TEXT (JSON — the **backend-enriched** post-execution result: array of trade and watchlist_change objects with `status`, `price`, `executed_at`, `error` fields; null for user messages and assistant messages with no actions) - `created_at` TEXT (ISO timestamp) ### Default Seed Data @@ -244,6 +262,12 @@ All tables include a `user_id` column defaulting to `"default"`. This is hardcod - One user profile: `id="default"`, `cash_balance=10000.0` - Ten watchlist entries: AAPL, GOOGL, MSFT, AMZN, TSLA, NVDA, META, JPM, V, NFLX +### Implementation Notes + +- **`user_id` access**: All DB helper/repository functions accept `user_id` as a parameter defaulting to `"default"`. Raw SQL queries never hard-code `"default"` inline — the repository layer owns that default. +- **Position update semantics**: Sells update the `positions` row in-place (decrement `quantity`, leave `avg_cost` unchanged). The row is deleted only when `quantity` reaches exactly zero. A sell on a ticker with no position row returns the same validation error as selling more than owned. +- **`avg_cost` on buy**: Weighted average — `(old_qty * old_avg_cost + new_qty * new_price) / (old_qty + new_qty)`. No change to `avg_cost` on sells. + --- ## 8. API Endpoints @@ -267,9 +291,25 @@ All tables include a `user_id` column defaulting to `"default"`. This is hardcod | POST | `/api/watchlist` | Add a ticker: `{ticker}` | | DELETE | `/api/watchlist/{ticker}` | Remove a ticker | +`GET /api/watchlist` returns an array of objects: +```json +[ + { + "ticker": "AAPL", + "price": 191.50, + "prev_price": 191.32, + "open_price": 190.00, + "direction": "up", + "timestamp": "2026-04-10T12:00:00.500Z" + } +] +``` +`price`, `prev_price`, `open_price`, `direction`, and `timestamp` are `null` for tickers added so recently that no cache entry exists yet. The frontend must handle nulls gracefully. + ### Chat | Method | Path | Description | |--------|------|-------------| +| GET | `/api/chat/history` | Last 20 chat messages for the user (for restoring conversation on page load) | | POST | `/api/chat` | Send a message, receive complete JSON response (message + executed actions) | ### System @@ -300,7 +340,7 @@ When the user sends a chat message, the backend: ### Structured Output Schema -The LLM is instructed to respond with JSON matching this schema: +**LLM output** (what the model returns — minimal, clean): ```json { @@ -316,7 +356,39 @@ The LLM is instructed to respond with JSON matching this schema: - `message` (required): The conversational text shown to the user - `trades` (optional): Array of trades to auto-execute. Each trade goes through the same validation as manual trades (sufficient cash for buys, sufficient shares for sells) -- `watchlist_changes` (optional): Array of watchlist modifications +- `watchlist_changes` (optional): Array of watchlist modifications. `action` is `"add"` or `"remove"`. + +**API response** (what `POST /api/chat` returns to the frontend — backend enriches after execution): + +```json +{ + "message": "Bought 10 AAPL at $191.50.", + "trades": [ + { + "ticker": "AAPL", + "side": "buy", + "quantity": 10, + "status": "executed", + "price": 191.50, + "notional": 1915.00, + "executed_at": "2026-04-10T12:00:00.500Z", + "error": null + } + ], + "watchlist_changes": [ + { + "ticker": "PYPL", + "action": "add", + "status": "executed", + "error": null + } + ] +} +``` + +Failed items use the same shape: `status: "rejected"`, `price`/`notional`/`executed_at` set to `null`, `error` populated with a human-readable string. **Failed items are never stripped from the arrays** — the frontend renders them inline as attempted-but-failed actions. This enriched shape is also what is stored in `chat_messages.actions`. + +The LLM context includes the **last 20 `chat_messages` rows** from the database. ### Auto-Execution @@ -325,7 +397,7 @@ Trades specified by the LLM execute automatically — no confirmation dialog. Th - It creates an impressive, fluid demo experience - It demonstrates agentic AI capabilities — the core theme of the course -If a trade fails validation (e.g., insufficient cash), the error is included in the chat response so the LLM can inform the user. +If a trade fails validation (e.g., insufficient cash), it remains in the `trades` array with `status: "rejected"` and a populated `error` field so the frontend can render it inline. ### System Prompt Guidance @@ -344,6 +416,29 @@ When `LLM_MOCK=true`, the backend returns deterministic mock responses instead o - Development without an API key - CI/CD pipelines +The pinned mock response (stored in `test/fixtures/llm_mock_response.json` and used by both backend and E2E tests): + +```json +{ + "message": "Bought 10 AAPL at $185.25.", + "trades": [ + { + "ticker": "AAPL", + "side": "buy", + "quantity": 10, + "status": "executed", + "price": 185.25, + "notional": 1852.50, + "executed_at": "2026-01-01T00:00:00.000Z", + "error": null + } + ], + "watchlist_changes": [] +} +``` + +For rejected trades in mock mode, use `status: "rejected"`, `price: null`, `notional: null`, `executed_at: null`, and a non-null `error` string. + --- ## 10. Frontend Design @@ -352,8 +447,8 @@ When `LLM_MOCK=true`, the backend returns deterministic mock responses instead o The frontend is a single-page application with a dense, terminal-inspired layout. The specific component architecture and layout system is up to the Frontend Engineer, but the UI should include these elements: -- **Watchlist panel** — grid/table of watched tickers with: ticker symbol, current price (flashing green/red on change), daily change %, and a sparkline mini-chart (accumulated from SSE since page load) -- **Main chart area** — larger chart for the currently selected ticker, with at minimum price over time. Clicking a ticker in the watchlist selects it here. +- **Watchlist panel** — grid/table of watched tickers with: ticker symbol, current price (flashing green/red on change), "daily change %" (computed as `(price - open_price) / open_price * 100` where `open_price` is the session-start seed price from the SSE event), and a sparkline mini-chart (accumulated from SSE since page load) +- **Main chart area** — larger chart for the currently selected ticker showing price over time, **data accumulated from the SSE stream since page load** (same data as sparklines, no separate historical API). Clicking a ticker in the watchlist selects it here. - **Portfolio heatmap** — treemap visualization where each rectangle is a position, sized by portfolio weight, colored by P&L (green = profit, red = loss) - **P&L chart** — line chart showing total portfolio value over time, using data from `portfolio_snapshots` - **Positions table** — tabular view of all positions: ticker, quantity, avg cost, current price, unrealized P&L, % change @@ -368,6 +463,7 @@ The frontend is a single-page application with a dense, terminal-inspired layout - Price flash effect: on receiving a new price, briefly apply a CSS class with background color transition, then remove it - All API calls go to the same origin (`/api/*`) — no CORS configuration needed - Tailwind CSS for styling with a custom dark theme +- **Route constraint**: Next.js `output: 'export'` prohibits SSR-style dynamic routes. For v1, all navigation uses a single static page with client-side state or query params (e.g., `?ticker=AAPL`). No `getServerSideProps`, no per-ticker static paths. --- @@ -391,6 +487,17 @@ Stage 2: Python 3.12 slim FastAPI serves the static frontend files and all API routes on port 8000. +### Deployment Contract + +| Item | Value | +|------|-------| +| Next.js build output | `frontend/out/` | +| Copied into container at | `/app/static/` | +| FastAPI static mount | `StaticFiles(directory="/app/static", html=True)` mounted at path `/` | +| `/_next/` assets | Served automatically by `StaticFiles` (no special config needed) | +| SPA fallback | A catch-all route (`@app.get("/{full_path:path}")`) placed **after** all `/api/` routes returns `index.html` for any path not matched by the static file mount | +| API routes | All mounted under `/api/` — matched before the static mount and catch-all | + ### Docker Volume The SQLite database persists via a named Docker volume: @@ -454,3 +561,35 @@ The container is designed to deploy to AWS App Runner, Render, or any container - Portfolio visualization: heatmap renders with correct colors, P&L chart has data points - AI chat (mocked): send a message, receive a response, trade execution appears inline - SSE resilience: disconnect and verify reconnection + +--- + +## 13. Resolved Decisions + +All items below have been incorporated into the relevant sections above. This section is a traceability index only. + +| Decision | Incorporated Into | +|----------|-------------------| +| Watchlist is the canonical ticker source; simulation stops immediately on removal | §6 Simulator, §6 SSE Streaming | +| New tickers not in seed list start at $100.00 with standard volatility | §6 Simulator | +| Portfolio module owns the snapshot background task | §7 portfolio_snapshots | +| Snapshot interval configurable via `PORTFOLIO_SNAPSHOT_INTERVAL_SECONDS` (default 60s) | §5, §7 | +| Initial snapshot inserted at DB initialization ($10,000, no positions) | §7 portfolio_snapshots | +| Single update loop drives both simulation and SSE (no two independent timers) | §6 Simulator | +| SSE event field names pinned: `ticker, price, prev_price, open_price, timestamp, direction` | §6 SSE Streaming | +| `daily change %` = `(price - open_price) / open_price * 100` using session-start seed | §6 SSE, §10 Watchlist | +| Main chart uses SSE-accumulated data (no historical API endpoint for v1) | §10 Main chart | +| LLM returns minimal schema; backend enriches with `status/price/notional/executed_at/error` | §9 Structured Output | +| Failed trades stay in `trades` array with `status: "rejected"` — never stripped | §9 Auto-Execution | +| `watchlist_changes` enriched with `status` and `error` in API response | §9 Structured Output | +| LLM mock fixture pinned in `test/fixtures/llm_mock_response.json` (snake_case fields) | §9 LLM Mock Mode | +| LLM context window: last 20 `chat_messages` rows | §9 Structured Output | +| Position sells: update in-place; delete row only at exactly zero quantity | §7 Implementation Notes | +| Sell on missing position row: same error as insufficient shares | §7 Implementation Notes | +| `avg_cost`: weighted average on buy; unchanged on sell | §7 Implementation Notes | +| `user_id` hidden behind repository functions; never hard-coded in raw queries | §7 Implementation Notes | +| `chat_messages.actions` stores backend-enriched post-execution result | §7 chat_messages | +| `GET /api/watchlist` returns `null` price fields for newly added tickers | §8 Watchlist | +| `GET /api/chat/history` endpoint added (last 20 messages for page load restore) | §8 Chat | +| No SSR dynamic routes; use query params (e.g., `?ticker=AAPL`) | §10 Technical Notes | +| Deployment contract defined: `frontend/out/` → `/app/static/`, SPA fallback catch-all | §11 Deployment Contract | \ No newline at end of file diff --git a/planning/archive/MARKET_DATA_DESIGN.md b/planning/archive/MARKET_DATA_DESIGN.md new file mode 100644 index 00000000..0d2cfd5f --- /dev/null +++ b/planning/archive/MARKET_DATA_DESIGN.md @@ -0,0 +1,1490 @@ +# Market Data Backend — Detailed Design + +Implementation-ready design for the FinAlly market data subsystem. Covers the unified interface, in-memory price cache, GBM simulator, Massive API client, SSE streaming endpoint, and FastAPI lifecycle integration. + +Everything in this document lives under `backend/app/market/`. + +--- + +## Table of Contents + +1. [File Structure](#1-file-structure) +2. [Data Model — `models.py`](#2-data-model) +3. [Price Cache — `cache.py`](#3-price-cache) +4. [Abstract Interface — `interface.py`](#4-abstract-interface) +5. [Seed Prices & Ticker Parameters — `seed_prices.py`](#5-seed-prices--ticker-parameters) +6. [GBM Simulator — `simulator.py`](#6-gbm-simulator) +7. [Massive API Client — `massive_client.py`](#7-massive-api-client) +8. [Factory — `factory.py`](#8-factory) +9. [SSE Streaming Endpoint — `stream.py`](#9-sse-streaming-endpoint) +10. [FastAPI Lifecycle Integration](#10-fastapi-lifecycle-integration) +11. [Watchlist Coordination](#11-watchlist-coordination) +12. [Testing Strategy](#12-testing-strategy) +13. [Error Handling & Edge Cases](#13-error-handling--edge-cases) +14. [Configuration Summary](#14-configuration-summary) + +--- + +## 1. File Structure + +``` +backend/ + app/ + market/ + __init__.py # Re-exports: PriceUpdate, PriceCache, MarketDataSource, create_market_data_source + models.py # PriceUpdate dataclass + cache.py # PriceCache (thread-safe in-memory store) + interface.py # MarketDataSource ABC + seed_prices.py # SEED_PRICES, TICKER_PARAMS, DEFAULT_PARAMS, CORRELATION_GROUPS + simulator.py # GBMSimulator + SimulatorDataSource + massive_client.py # MassiveDataSource + factory.py # create_market_data_source() + stream.py # SSE endpoint (FastAPI router) +``` + +Each file has a single responsibility. The `__init__.py` re-exports the public API so that the rest of the backend imports from `app.market` without reaching into submodules. + +--- + +## 2. Data Model + +**File: `backend/app/market/models.py`** + +`PriceUpdate` is the only data structure that leaves the market data layer. Every downstream consumer — SSE streaming, portfolio valuation, trade execution — works exclusively with this type. + +```python +from __future__ import annotations + +import time +from dataclasses import dataclass, field + + +@dataclass(frozen=True, slots=True) +class PriceUpdate: + """Immutable snapshot of a single ticker's price at a point in time.""" + + ticker: str + price: float + previous_price: float + timestamp: float = field(default_factory=time.time) # Unix seconds + + @property + def change(self) -> float: + """Absolute price change from previous update.""" + return round(self.price - self.previous_price, 4) + + @property + def change_percent(self) -> float: + """Percentage change from previous update.""" + if self.previous_price == 0: + return 0.0 + return round((self.price - self.previous_price) / self.previous_price * 100, 4) + + @property + def direction(self) -> str: + """'up', 'down', or 'flat'.""" + if self.price > self.previous_price: + return "up" + elif self.price < self.previous_price: + return "down" + return "flat" + + def to_dict(self) -> dict: + """Serialize for JSON / SSE transmission.""" + return { + "ticker": self.ticker, + "price": self.price, + "previous_price": self.previous_price, + "timestamp": self.timestamp, + "change": self.change, + "change_percent": self.change_percent, + "direction": self.direction, + } +``` + +### Design decisions + +- **`frozen=True`**: Price updates are immutable value objects. Once created they never change, which makes them safe to share across async tasks without copying. +- **`slots=True`**: Minor memory optimization — we create many of these per second. +- **Computed properties** (`change`, `direction`, `change_percent`): Derived from `price` and `previous_price` so they can never be inconsistent. No risk of a stale `direction` field. +- **`to_dict()`**: Single serialization point used by both the SSE endpoint and REST API responses. + +--- + +## 3. Price Cache + +**File: `backend/app/market/cache.py`** + +The price cache is the central data hub. Data sources write to it; SSE streaming and portfolio valuation read from it. It must be thread-safe because the simulator/poller may run in a thread pool executor while SSE reads happen on the async event loop. + +```python +from __future__ import annotations + +import asyncio +import time +from threading import Lock +from typing import Callable + +from .models import PriceUpdate + + +class PriceCache: + """Thread-safe in-memory cache of the latest price for each ticker. + + Writers: SimulatorDataSource or MassiveDataSource (one at a time). + Readers: SSE streaming endpoint, portfolio valuation, trade execution. + """ + + def __init__(self) -> None: + self._prices: dict[str, PriceUpdate] = {} + self._lock = Lock() + self._version: int = 0 # Monotonically increasing; bumped on every update + + def update(self, ticker: str, price: float, timestamp: float | None = None) -> PriceUpdate: + """Record a new price for a ticker. Returns the created PriceUpdate. + + Automatically computes direction and change from the previous price. + If this is the first update for the ticker, previous_price == price (direction='flat'). + """ + with self._lock: + ts = timestamp or time.time() + prev = self._prices.get(ticker) + previous_price = prev.price if prev else price + + update = PriceUpdate( + ticker=ticker, + price=round(price, 2), + previous_price=round(previous_price, 2), + timestamp=ts, + ) + self._prices[ticker] = update + self._version += 1 + return update + + def get(self, ticker: str) -> PriceUpdate | None: + """Get the latest price for a single ticker, or None if unknown.""" + with self._lock: + return self._prices.get(ticker) + + def get_all(self) -> dict[str, PriceUpdate]: + """Snapshot of all current prices. Returns a shallow copy.""" + with self._lock: + return dict(self._prices) + + def get_price(self, ticker: str) -> float | None: + """Convenience: get just the price float, or None.""" + update = self.get(ticker) + return update.price if update else None + + def remove(self, ticker: str) -> None: + """Remove a ticker from the cache (e.g., when removed from watchlist).""" + with self._lock: + self._prices.pop(ticker, None) + + @property + def version(self) -> int: + """Current version counter. Useful for SSE change detection.""" + return self._version + + def __len__(self) -> int: + with self._lock: + return len(self._prices) + + def __contains__(self, ticker: str) -> bool: + with self._lock: + return ticker in self._prices +``` + +### Why a version counter? + +The SSE streaming loop polls the cache every ~500ms. Without a version counter, it would serialize and send all prices every tick even if nothing changed (e.g., Massive API only updates every 15s). The version counter lets the SSE loop skip sends when nothing is new: + +```python +last_version = -1 +while True: + if price_cache.version != last_version: + last_version = price_cache.version + yield format_sse(price_cache.get_all()) + await asyncio.sleep(0.5) +``` + +### Thread safety rationale + +The `threading.Lock` is used instead of `asyncio.Lock` because: +- The Massive client's synchronous `get_snapshot_all()` runs in `asyncio.to_thread()`, which operates in a real OS thread — `asyncio.Lock` would not protect against that. +- The GBM simulator's `step()` is CPU-bound and could also be offloaded to a thread for fairness. +- `threading.Lock` works correctly from both sync threads and the async event loop. + +--- + +## 4. Abstract Interface + +**File: `backend/app/market/interface.py`** + +```python +from __future__ import annotations + +from abc import ABC, abstractmethod + + +class MarketDataSource(ABC): + """Contract for market data providers. + + Implementations push price updates into a shared PriceCache on their own + schedule. Downstream code never calls the data source directly for prices — + it reads from the cache. + + Lifecycle: + source = create_market_data_source(cache) + await source.start(["AAPL", "GOOGL", ...]) + # ... app runs ... + await source.add_ticker("TSLA") + await source.remove_ticker("GOOGL") + # ... app shutting down ... + await source.stop() + """ + + @abstractmethod + async def start(self, tickers: list[str]) -> None: + """Begin producing price updates for the given tickers. + + Starts a background task that periodically writes to the PriceCache. + Must be called exactly once. Calling start() twice is undefined behavior. + """ + + @abstractmethod + async def stop(self) -> None: + """Stop the background task and release resources. + + Safe to call multiple times. After stop(), the source will not write + to the cache again. + """ + + @abstractmethod + async def add_ticker(self, ticker: str) -> None: + """Add a ticker to the active set. No-op if already present. + + The next update cycle will include this ticker. + """ + + @abstractmethod + async def remove_ticker(self, ticker: str) -> None: + """Remove a ticker from the active set. No-op if not present. + + Also removes the ticker from the PriceCache. + """ + + @abstractmethod + def get_tickers(self) -> list[str]: + """Return the current list of actively tracked tickers.""" +``` + +### Why the source writes to the cache instead of returning prices + +This push model decouples timing. The simulator ticks at 500ms, Massive polls at 15s, but SSE always reads from the cache at its own 500ms cadence. There is no need for the SSE layer to know which data source is active or what its update interval is. + +--- + +## 5. Seed Prices & Ticker Parameters + +**File: `backend/app/market/seed_prices.py`** + +Constants only — no logic, no imports beyond stdlib. This file is shared by both the simulator (for initial prices and GBM parameters) and potentially by the Massive client (as fallback prices if the API hasn't responded yet). + +```python +"""Seed prices and per-ticker parameters for the market simulator.""" + +# Realistic starting prices for the default watchlist (as of project creation) +SEED_PRICES: dict[str, float] = { + "AAPL": 190.00, + "GOOGL": 175.00, + "MSFT": 420.00, + "AMZN": 185.00, + "TSLA": 250.00, + "NVDA": 800.00, + "META": 500.00, + "JPM": 195.00, + "V": 280.00, + "NFLX": 600.00, +} + +# Per-ticker GBM parameters +# sigma: annualized volatility (higher = more price movement) +# mu: annualized drift / expected return +TICKER_PARAMS: dict[str, dict[str, float]] = { + "AAPL": {"sigma": 0.22, "mu": 0.05}, + "GOOGL": {"sigma": 0.25, "mu": 0.05}, + "MSFT": {"sigma": 0.20, "mu": 0.05}, + "AMZN": {"sigma": 0.28, "mu": 0.05}, + "TSLA": {"sigma": 0.50, "mu": 0.03}, # High volatility + "NVDA": {"sigma": 0.40, "mu": 0.08}, # High volatility, strong drift + "META": {"sigma": 0.30, "mu": 0.05}, + "JPM": {"sigma": 0.18, "mu": 0.04}, # Low volatility (bank) + "V": {"sigma": 0.17, "mu": 0.04}, # Low volatility (payments) + "NFLX": {"sigma": 0.35, "mu": 0.05}, +} + +# Default parameters for tickers not in the list above (dynamically added) +DEFAULT_PARAMS: dict[str, float] = {"sigma": 0.25, "mu": 0.05} + +# Correlation groups for the simulator's Cholesky decomposition +# Tickers in the same group have higher intra-group correlation +CORRELATION_GROUPS: dict[str, set[str]] = { + "tech": {"AAPL", "GOOGL", "MSFT", "AMZN", "META", "NVDA", "NFLX"}, + "finance": {"JPM", "V"}, +} + +# Correlation coefficients +INTRA_TECH_CORR = 0.6 # Tech stocks move together +INTRA_FINANCE_CORR = 0.5 # Finance stocks move together +CROSS_GROUP_CORR = 0.3 # Between sectors +TSLA_CORR = 0.3 # TSLA does its own thing +DEFAULT_CORR = 0.3 # Unknown tickers +``` + +--- + +## 6. GBM Simulator + +**File: `backend/app/market/simulator.py`** + +This file contains two classes: +- `GBMSimulator`: Pure math engine. Stateful — holds current prices and advances them one step at a time. +- `SimulatorDataSource`: The `MarketDataSource` implementation that wraps `GBMSimulator` in an async loop and writes to the `PriceCache`. + +### 6.1 GBMSimulator — The Math Engine + +```python +from __future__ import annotations + +import asyncio +import logging +import math +import random + +import numpy as np + +from .cache import PriceCache +from .interface import MarketDataSource +from .seed_prices import ( + CORRELATION_GROUPS, + CROSS_GROUP_CORR, + DEFAULT_CORR, + DEFAULT_PARAMS, + INTRA_FINANCE_CORR, + INTRA_TECH_CORR, + SEED_PRICES, + TICKER_PARAMS, + TSLA_CORR, +) + +logger = logging.getLogger(__name__) + + +class GBMSimulator: + """Geometric Brownian Motion simulator for correlated stock prices. + + Math: + S(t+dt) = S(t) * exp((mu - sigma^2/2) * dt + sigma * sqrt(dt) * Z) + + Where: + S(t) = current price + mu = annualized drift (expected return) + sigma = annualized volatility + dt = time step as fraction of a trading year + Z = correlated standard normal random variable + + The tiny dt (~8.5e-8 for 500ms ticks over 252 trading days * 6.5h/day) + produces sub-cent moves per tick that accumulate naturally over time. + """ + + # 500ms expressed as a fraction of a trading year + # 252 trading days * 6.5 hours/day * 3600 seconds/hour = 5,896,800 seconds + TRADING_SECONDS_PER_YEAR = 252 * 6.5 * 3600 # 5,896,800 + DEFAULT_DT = 0.5 / TRADING_SECONDS_PER_YEAR # ~8.48e-8 + + def __init__( + self, + tickers: list[str], + dt: float = DEFAULT_DT, + event_probability: float = 0.001, + ) -> None: + self._dt = dt + self._event_prob = event_probability + + # Per-ticker state + self._tickers: list[str] = [] + self._prices: dict[str, float] = {} + self._params: dict[str, dict[str, float]] = {} + + # Cholesky decomposition of the correlation matrix (for correlated moves) + self._cholesky: np.ndarray | None = None + + # Initialize all starting tickers + for ticker in tickers: + self._add_ticker_internal(ticker) + self._rebuild_cholesky() + + # --- Public API --- + + def step(self) -> dict[str, float]: + """Advance all tickers by one time step. Returns {ticker: new_price}. + + This is the hot path — called every 500ms. Keep it fast. + """ + n = len(self._tickers) + if n == 0: + return {} + + # Generate n independent standard normal draws + z_independent = np.random.standard_normal(n) + + # Apply Cholesky to get correlated draws + if self._cholesky is not None: + z_correlated = self._cholesky @ z_independent + else: + z_correlated = z_independent + + result: dict[str, float] = {} + for i, ticker in enumerate(self._tickers): + params = self._params[ticker] + mu = params["mu"] + sigma = params["sigma"] + + # GBM: S(t+dt) = S(t) * exp((mu - 0.5*sigma^2)*dt + sigma*sqrt(dt)*Z) + drift = (mu - 0.5 * sigma ** 2) * self._dt + diffusion = sigma * math.sqrt(self._dt) * z_correlated[i] + self._prices[ticker] *= math.exp(drift + diffusion) + + # Random event: ~0.1% chance per tick per ticker + # With 10 tickers at 2 ticks/sec, expect an event ~every 50 seconds + if random.random() < self._event_prob: + shock_magnitude = random.uniform(0.02, 0.05) + shock_sign = random.choice([-1, 1]) + self._prices[ticker] *= 1 + shock_magnitude * shock_sign + logger.debug( + "Random event on %s: %.1f%% %s", + ticker, + shock_magnitude * 100, + "up" if shock_sign > 0 else "down", + ) + + result[ticker] = round(self._prices[ticker], 2) + + return result + + def add_ticker(self, ticker: str) -> None: + """Add a ticker to the simulation. Rebuilds the correlation matrix.""" + if ticker in self._prices: + return + self._add_ticker_internal(ticker) + self._rebuild_cholesky() + + def remove_ticker(self, ticker: str) -> None: + """Remove a ticker from the simulation. Rebuilds the correlation matrix.""" + if ticker not in self._prices: + return + self._tickers.remove(ticker) + del self._prices[ticker] + del self._params[ticker] + self._rebuild_cholesky() + + def get_price(self, ticker: str) -> float | None: + """Current price for a ticker, or None if not tracked.""" + return self._prices.get(ticker) + + # --- Internals --- + + def _add_ticker_internal(self, ticker: str) -> None: + """Add a ticker without rebuilding Cholesky (for batch initialization).""" + if ticker in self._prices: + return + self._tickers.append(ticker) + self._prices[ticker] = SEED_PRICES.get(ticker, random.uniform(50.0, 300.0)) + self._params[ticker] = TICKER_PARAMS.get(ticker, dict(DEFAULT_PARAMS)) + + def _rebuild_cholesky(self) -> None: + """Rebuild the Cholesky decomposition of the ticker correlation matrix. + + Called whenever tickers are added or removed. O(n^2) but n < 50. + """ + n = len(self._tickers) + if n <= 1: + self._cholesky = None + return + + # Build the correlation matrix + corr = np.eye(n) + for i in range(n): + for j in range(i + 1, n): + rho = self._pairwise_correlation(self._tickers[i], self._tickers[j]) + corr[i, j] = rho + corr[j, i] = rho + + self._cholesky = np.linalg.cholesky(corr) + + @staticmethod + def _pairwise_correlation(t1: str, t2: str) -> float: + """Determine correlation between two tickers based on sector grouping. + + Correlation structure: + - Same tech sector: 0.6 + - Same finance sector: 0.5 + - TSLA with anything: 0.3 (it does its own thing) + - Cross-sector: 0.3 + - Unknown tickers: 0.3 + """ + tech = CORRELATION_GROUPS["tech"] + finance = CORRELATION_GROUPS["finance"] + + # TSLA is in tech set but behaves independently + if t1 == "TSLA" or t2 == "TSLA": + return TSLA_CORR + + if t1 in tech and t2 in tech: + return INTRA_TECH_CORR + if t1 in finance and t2 in finance: + return INTRA_FINANCE_CORR + + return CROSS_GROUP_CORR +``` + +### 6.2 SimulatorDataSource — Async Wrapper + +```python +class SimulatorDataSource(MarketDataSource): + """MarketDataSource backed by the GBM simulator. + + Runs a background asyncio task that calls GBMSimulator.step() every + `update_interval` seconds and writes results to the PriceCache. + """ + + def __init__( + self, + price_cache: PriceCache, + update_interval: float = 0.5, + event_probability: float = 0.001, + ) -> None: + self._cache = price_cache + self._interval = update_interval + self._event_prob = event_probability + self._sim: GBMSimulator | None = None + self._task: asyncio.Task | None = None + + async def start(self, tickers: list[str]) -> None: + self._sim = GBMSimulator( + tickers=tickers, + event_probability=self._event_prob, + ) + # Seed the cache with initial prices so SSE has data immediately + for ticker in tickers: + price = self._sim.get_price(ticker) + if price is not None: + self._cache.update(ticker=ticker, price=price) + self._task = asyncio.create_task(self._run_loop(), name="simulator-loop") + logger.info("Simulator started with %d tickers", len(tickers)) + + async def stop(self) -> None: + if self._task and not self._task.done(): + self._task.cancel() + try: + await self._task + except asyncio.CancelledError: + pass + self._task = None + logger.info("Simulator stopped") + + async def add_ticker(self, ticker: str) -> None: + if self._sim: + self._sim.add_ticker(ticker) + # Seed cache immediately so the ticker has a price right away + price = self._sim.get_price(ticker) + if price is not None: + self._cache.update(ticker=ticker, price=price) + logger.info("Simulator: added ticker %s", ticker) + + async def remove_ticker(self, ticker: str) -> None: + if self._sim: + self._sim.remove_ticker(ticker) + self._cache.remove(ticker) + logger.info("Simulator: removed ticker %s", ticker) + + def get_tickers(self) -> list[str]: + return list(self._sim._tickers) if self._sim else [] + + async def _run_loop(self) -> None: + """Core loop: step the simulation, write to cache, sleep.""" + while True: + try: + if self._sim: + prices = self._sim.step() + for ticker, price in prices.items(): + self._cache.update(ticker=ticker, price=price) + except Exception: + logger.exception("Simulator step failed") + await asyncio.sleep(self._interval) +``` + +### Key behaviors + +- **Immediate seeding**: When `start()` is called, the cache is populated with seed prices *before* the loop begins. This means the SSE endpoint has data to send on its very first tick, with no blank-screen delay. +- **Graceful cancellation**: `stop()` cancels the task and awaits it, catching `CancelledError`. This ensures clean shutdown during FastAPI lifespan teardown. +- **Exception resilience**: The loop catches exceptions per-step so a single bad tick doesn't kill the entire data feed. + +--- + +## 7. Massive API Client + +**File: `backend/app/market/massive_client.py`** + +Polls the Massive (formerly Polygon.io) REST API snapshot endpoint on a configurable interval. The synchronous Massive client runs in `asyncio.to_thread()` to avoid blocking the event loop. + +```python +from __future__ import annotations + +import asyncio +import logging +from typing import Any + +from .cache import PriceCache +from .interface import MarketDataSource + +logger = logging.getLogger(__name__) + + +class MassiveDataSource(MarketDataSource): + """MarketDataSource backed by the Massive (Polygon.io) REST API. + + Polls GET /v2/snapshot/locale/us/markets/stocks/tickers for all watched + tickers in a single API call, then writes results to the PriceCache. + + Rate limits: + - Free tier: 5 req/min → poll every 15s (default) + - Paid tiers: higher limits → poll every 2-5s + """ + + def __init__( + self, + api_key: str, + price_cache: PriceCache, + poll_interval: float = 15.0, + ) -> None: + self._api_key = api_key + self._cache = price_cache + self._interval = poll_interval + self._tickers: list[str] = [] + self._task: asyncio.Task | None = None + self._client: Any = None # Lazy import to avoid hard dependency + + async def start(self, tickers: list[str]) -> None: + # Lazy import: only import massive when actually using real market data. + # This means the massive package is not required when using the simulator. + from massive import RESTClient + + self._client = RESTClient(api_key=self._api_key) + self._tickers = list(tickers) + + # Do an immediate first poll so the cache has data right away + await self._poll_once() + + self._task = asyncio.create_task(self._poll_loop(), name="massive-poller") + logger.info( + "Massive poller started: %d tickers, %.1fs interval", + len(tickers), + self._interval, + ) + + async def stop(self) -> None: + if self._task and not self._task.done(): + self._task.cancel() + try: + await self._task + except asyncio.CancelledError: + pass + self._task = None + self._client = None + logger.info("Massive poller stopped") + + async def add_ticker(self, ticker: str) -> None: + ticker = ticker.upper().strip() + if ticker not in self._tickers: + self._tickers.append(ticker) + logger.info("Massive: added ticker %s (will appear on next poll)", ticker) + + async def remove_ticker(self, ticker: str) -> None: + ticker = ticker.upper().strip() + self._tickers = [t for t in self._tickers if t != ticker] + self._cache.remove(ticker) + logger.info("Massive: removed ticker %s", ticker) + + def get_tickers(self) -> list[str]: + return list(self._tickers) + + # --- Internal --- + + async def _poll_loop(self) -> None: + """Poll on interval. First poll already happened in start().""" + while True: + await asyncio.sleep(self._interval) + await self._poll_once() + + async def _poll_once(self) -> None: + """Execute one poll cycle: fetch snapshots, update cache.""" + if not self._tickers or not self._client: + return + + try: + # The Massive RESTClient is synchronous — run in a thread to + # avoid blocking the event loop. + snapshots = await asyncio.to_thread(self._fetch_snapshots) + processed = 0 + for snap in snapshots: + try: + price = snap.last_trade.price + # Massive timestamps are Unix milliseconds → convert to seconds + timestamp = snap.last_trade.timestamp / 1000.0 + self._cache.update( + ticker=snap.ticker, + price=price, + timestamp=timestamp, + ) + processed += 1 + except (AttributeError, TypeError) as e: + logger.warning( + "Skipping snapshot for %s: %s", + getattr(snap, "ticker", "???"), + e, + ) + logger.debug("Massive poll: updated %d/%d tickers", processed, len(self._tickers)) + + except Exception as e: + logger.error("Massive poll failed: %s", e) + # Don't re-raise — the loop will retry on the next interval. + # Common failures: 401 (bad key), 429 (rate limit), network errors. + + def _fetch_snapshots(self) -> list: + """Synchronous call to the Massive REST API. Runs in a thread.""" + from massive.rest.models import SnapshotMarketType + + return self._client.get_snapshot_all( + market_type=SnapshotMarketType.STOCKS, + tickers=self._tickers, + ) +``` + +### Error handling philosophy + +The Massive poller is intentionally resilient: + +| Error | Behavior | +|-------|----------| +| **401 Unauthorized** | Logged as error. Poller keeps running (user might fix `.env` and restart). | +| **429 Rate Limited** | Logged as error. Next poll retries after `poll_interval` seconds. | +| **Network timeout** | Logged as error. Retries automatically on next cycle. | +| **Malformed snapshot** | Individual ticker skipped with warning. Other tickers still processed. | +| **All tickers fail** | Cache retains last-known prices. SSE keeps streaming stale data (better than no data). | + +### Lazy import strategy + +`from massive import RESTClient` happens inside `start()`, not at module import time. This means: +- The `massive` package is only required when `MASSIVE_API_KEY` is set. +- Students who don't have a Massive API key don't need the package installed at all. +- The simulator path has zero external dependencies beyond `numpy`. + +--- + +## 8. Factory + +**File: `backend/app/market/factory.py`** + +```python +from __future__ import annotations + +import logging +import os + +from .cache import PriceCache +from .interface import MarketDataSource + +logger = logging.getLogger(__name__) + + +def create_market_data_source(price_cache: PriceCache) -> MarketDataSource: + """Create the appropriate market data source based on environment variables. + + - MASSIVE_API_KEY set and non-empty → MassiveDataSource (real market data) + - Otherwise → SimulatorDataSource (GBM simulation) + + Returns an unstarted source. Caller must await source.start(tickers). + """ + api_key = os.environ.get("MASSIVE_API_KEY", "").strip() + + if api_key: + from .massive_client import MassiveDataSource + + logger.info("Market data source: Massive API (real data)") + return MassiveDataSource(api_key=api_key, price_cache=price_cache) + else: + from .simulator import SimulatorDataSource + + logger.info("Market data source: GBM Simulator") + return SimulatorDataSource(price_cache=price_cache) +``` + +### Usage at app startup + +```python +price_cache = PriceCache() +source = create_market_data_source(price_cache) +await source.start(initial_tickers) # e.g., ["AAPL", "GOOGL", ...] +``` + +--- + +## 9. SSE Streaming Endpoint + +**File: `backend/app/market/stream.py`** + +The SSE endpoint is a FastAPI route that holds open a long-lived HTTP connection and pushes price updates to the client as `text/event-stream`. + +```python +from __future__ import annotations + +import asyncio +import json +import logging +import time + +from fastapi import APIRouter, Request +from fastapi.responses import StreamingResponse + +from .cache import PriceCache + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/api/stream", tags=["streaming"]) + + +def create_stream_router(price_cache: PriceCache) -> APIRouter: + """Create the SSE streaming router with a reference to the price cache. + + This factory pattern lets us inject the PriceCache without globals. + """ + + @router.get("/prices") + async def stream_prices(request: Request) -> StreamingResponse: + """SSE endpoint for live price updates. + + Streams all tracked ticker prices every ~500ms. The client connects + with EventSource and receives events in the format: + + data: {"AAPL": {"ticker": "AAPL", "price": 190.50, ...}, ...} + + Includes a retry directive so the browser auto-reconnects on + disconnection (EventSource built-in behavior). + """ + return StreamingResponse( + _generate_events(price_cache, request), + media_type="text/event-stream", + headers={ + "Cache-Control": "no-cache", + "Connection": "keep-alive", + "X-Accel-Buffering": "no", # Disable nginx buffering if proxied + }, + ) + + return router + + +async def _generate_events( + price_cache: PriceCache, + request: Request, + interval: float = 0.5, +) -> None: + """Async generator that yields SSE-formatted price events. + + Sends all prices every `interval` seconds. Stops when the client + disconnects (detected via request.is_disconnected()). + """ + # Tell the client to retry after 1 second if the connection drops + yield "retry: 1000\n\n" + + last_version = -1 + client_ip = request.client.host if request.client else "unknown" + logger.info("SSE client connected: %s", client_ip) + + try: + while True: + # Check for client disconnect + if await request.is_disconnected(): + logger.info("SSE client disconnected: %s", client_ip) + break + + current_version = price_cache.version + if current_version != last_version: + last_version = current_version + prices = price_cache.get_all() + + if prices: + data = { + ticker: update.to_dict() + for ticker, update in prices.items() + } + payload = json.dumps(data) + yield f"data: {payload}\n\n" + + await asyncio.sleep(interval) + except asyncio.CancelledError: + logger.info("SSE stream cancelled for: %s", client_ip) +``` + +### SSE wire format + +Each event the client receives looks like this: + +``` +data: {"AAPL":{"ticker":"AAPL","price":190.50,"previous_price":190.42,"timestamp":1707580800.5,"change":0.08,"change_percent":0.042,"direction":"up"},"GOOGL":{"ticker":"GOOGL","price":175.12,...}} + +``` + +The client parses this with: + +```javascript +const eventSource = new EventSource('/api/stream/prices'); +eventSource.onmessage = (event) => { + const prices = JSON.parse(event.data); + // prices is { "AAPL": { ticker, price, previous_price, ... }, ... } +}; +``` + +### Why poll-and-push instead of event-driven? + +The SSE endpoint polls the cache on a fixed interval rather than being notified by the data source. This is simpler and produces predictable, evenly-spaced updates for the frontend. The frontend accumulates these into sparkline charts — regular spacing is important for clean visualization. + +--- + +## 10. FastAPI Lifecycle Integration + +The market data system starts and stops with the FastAPI application using the `lifespan` context manager pattern. + +**In `backend/app/main.py`:** + +```python +from contextlib import asynccontextmanager + +from fastapi import FastAPI + +from app.market.cache import PriceCache +from app.market.factory import create_market_data_source +from app.market.interface import MarketDataSource +from app.market.stream import create_stream_router + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Manage startup and shutdown of background services.""" + + # --- STARTUP --- + + # 1. Create the shared price cache + price_cache = PriceCache() + app.state.price_cache = price_cache + + # 2. Create and start the market data source + source = create_market_data_source(price_cache) + app.state.market_source = source + + # 3. Load initial tickers from the database watchlist + initial_tickers = await load_watchlist_tickers() # reads from SQLite + await source.start(initial_tickers) + + # 4. Register the SSE streaming router + stream_router = create_stream_router(price_cache) + app.include_router(stream_router) + + yield # App is running + + # --- SHUTDOWN --- + await source.stop() + + +app = FastAPI(title="FinAlly", lifespan=lifespan) + + +# Dependency for injecting the price cache into route handlers +def get_price_cache() -> PriceCache: + return app.state.price_cache + + +def get_market_source() -> MarketDataSource: + return app.state.market_source +``` + +### Accessing market data from other routes + +Other parts of the backend (trade execution, portfolio valuation, watchlist management) access the price cache and data source via FastAPI's dependency injection: + +```python +from fastapi import APIRouter, Depends + +router = APIRouter(prefix="/api") + +@router.post("/portfolio/trade") +async def execute_trade( + trade: TradeRequest, + price_cache: PriceCache = Depends(get_price_cache), +): + current_price = price_cache.get_price(trade.ticker) + if current_price is None: + raise HTTPException(404, f"No price available for {trade.ticker}") + # ... execute trade at current_price ... + + +@router.post("/watchlist") +async def add_to_watchlist( + payload: WatchlistAdd, + source: MarketDataSource = Depends(get_market_source), + price_cache: PriceCache = Depends(get_price_cache), +): + # Add to database ... + # Then tell the data source to start tracking it + await source.add_ticker(payload.ticker) + # ... + + +@router.delete("/watchlist/{ticker}") +async def remove_from_watchlist( + ticker: str, + source: MarketDataSource = Depends(get_market_source), +): + # Remove from database ... + # Then stop tracking + await source.remove_ticker(ticker) + # ... +``` + +--- + +## 11. Watchlist Coordination + +When the watchlist changes (via REST API or LLM chat), the market data source must be notified so it tracks the right set of tickers. + +### Flow: Adding a Ticker + +``` +User (or LLM) → POST /api/watchlist {ticker: "PYPL"} + → Insert into watchlist table (SQLite) + → await source.add_ticker("PYPL") + Simulator: adds to GBMSimulator, rebuilds Cholesky, seeds cache + Massive: appends to ticker list, appears on next poll + → Return success (ticker + current price if available) +``` + +### Flow: Removing a Ticker + +``` +User (or LLM) → DELETE /api/watchlist/PYPL + → Delete from watchlist table (SQLite) + → await source.remove_ticker("PYPL") + Simulator: removes from GBMSimulator, rebuilds Cholesky, removes from cache + Massive: removes from ticker list, removes from cache + → Return success +``` + +### Edge case: Ticker has an open position + +If the user removes a ticker from the watchlist but still holds shares, the ticker should remain in the data source so portfolio valuation stays accurate. The watchlist route should check for this: + +```python +@router.delete("/watchlist/{ticker}") +async def remove_from_watchlist( + ticker: str, + source: MarketDataSource = Depends(get_market_source), +): + # Remove from watchlist table + await db.delete_watchlist_entry(ticker) + + # Only stop tracking if no open position + position = await db.get_position(ticker) + if position is None or position.quantity == 0: + await source.remove_ticker(ticker) + + return {"status": "ok"} +``` + +--- + +## 12. Testing Strategy + +### 12.1 Unit Tests for GBMSimulator + +**File: `backend/tests/market/test_simulator.py`** + +```python +import math +import pytest +from app.market.simulator import GBMSimulator +from app.market.seed_prices import SEED_PRICES + + +class TestGBMSimulator: + """Unit tests for the GBM price simulator.""" + + def test_step_returns_all_tickers(self): + sim = GBMSimulator(tickers=["AAPL", "GOOGL"]) + result = sim.step() + assert set(result.keys()) == {"AAPL", "GOOGL"} + + def test_prices_are_positive(self): + """GBM prices can never go negative (exp() is always positive).""" + sim = GBMSimulator(tickers=["AAPL"]) + for _ in range(10_000): + prices = sim.step() + assert prices["AAPL"] > 0 + + def test_initial_prices_match_seeds(self): + sim = GBMSimulator(tickers=["AAPL"]) + # Before any step, price should be the seed price + assert sim.get_price("AAPL") == SEED_PRICES["AAPL"] + + def test_add_ticker(self): + sim = GBMSimulator(tickers=["AAPL"]) + sim.add_ticker("TSLA") + result = sim.step() + assert "TSLA" in result + + def test_remove_ticker(self): + sim = GBMSimulator(tickers=["AAPL", "GOOGL"]) + sim.remove_ticker("GOOGL") + result = sim.step() + assert "GOOGL" not in result + assert "AAPL" in result + + def test_add_duplicate_is_noop(self): + sim = GBMSimulator(tickers=["AAPL"]) + sim.add_ticker("AAPL") + assert len(sim._tickers) == 1 + + def test_remove_nonexistent_is_noop(self): + sim = GBMSimulator(tickers=["AAPL"]) + sim.remove_ticker("NOPE") # Should not raise + + def test_unknown_ticker_gets_random_seed_price(self): + sim = GBMSimulator(tickers=["ZZZZ"]) + price = sim.get_price("ZZZZ") + assert 50.0 <= price <= 300.0 + + def test_empty_step(self): + sim = GBMSimulator(tickers=[]) + result = sim.step() + assert result == {} + + def test_prices_change_over_time(self): + """After many steps, prices should have drifted from their seeds.""" + sim = GBMSimulator(tickers=["AAPL"]) + for _ in range(1000): + sim.step() + # Price should have changed (extremely unlikely to be exactly the seed) + assert sim.get_price("AAPL") != SEED_PRICES["AAPL"] + + def test_cholesky_rebuilds_on_add(self): + sim = GBMSimulator(tickers=["AAPL"]) + assert sim._cholesky is None # Only 1 ticker, no correlation matrix + sim.add_ticker("GOOGL") + assert sim._cholesky is not None # Now 2 tickers, matrix exists +``` + +### 12.2 Unit Tests for PriceCache + +**File: `backend/tests/market/test_cache.py`** + +```python +import pytest +from app.market.cache import PriceCache + + +class TestPriceCache: + + def test_update_and_get(self): + cache = PriceCache() + update = cache.update("AAPL", 190.50) + assert update.ticker == "AAPL" + assert update.price == 190.50 + assert cache.get("AAPL") == update + + def test_first_update_is_flat(self): + cache = PriceCache() + update = cache.update("AAPL", 190.50) + assert update.direction == "flat" + assert update.previous_price == 190.50 + + def test_direction_up(self): + cache = PriceCache() + cache.update("AAPL", 190.00) + update = cache.update("AAPL", 191.00) + assert update.direction == "up" + assert update.change == 1.00 + + def test_direction_down(self): + cache = PriceCache() + cache.update("AAPL", 190.00) + update = cache.update("AAPL", 189.00) + assert update.direction == "down" + assert update.change == -1.00 + + def test_remove(self): + cache = PriceCache() + cache.update("AAPL", 190.00) + cache.remove("AAPL") + assert cache.get("AAPL") is None + + def test_get_all(self): + cache = PriceCache() + cache.update("AAPL", 190.00) + cache.update("GOOGL", 175.00) + all_prices = cache.get_all() + assert set(all_prices.keys()) == {"AAPL", "GOOGL"} + + def test_version_increments(self): + cache = PriceCache() + v0 = cache.version + cache.update("AAPL", 190.00) + assert cache.version == v0 + 1 + cache.update("AAPL", 191.00) + assert cache.version == v0 + 2 + + def test_get_price_convenience(self): + cache = PriceCache() + cache.update("AAPL", 190.50) + assert cache.get_price("AAPL") == 190.50 + assert cache.get_price("NOPE") is None +``` + +### 12.3 Integration Test: SimulatorDataSource + +**File: `backend/tests/market/test_simulator_source.py`** + +```python +import asyncio +import pytest +from app.market.cache import PriceCache +from app.market.simulator import SimulatorDataSource + + +@pytest.mark.asyncio +class TestSimulatorDataSource: + + async def test_start_populates_cache(self): + cache = PriceCache() + source = SimulatorDataSource(price_cache=cache, update_interval=0.1) + await source.start(["AAPL", "GOOGL"]) + + # Cache should have seed prices immediately (before first loop tick) + assert cache.get("AAPL") is not None + assert cache.get("GOOGL") is not None + + await source.stop() + + async def test_prices_update_over_time(self): + cache = PriceCache() + source = SimulatorDataSource(price_cache=cache, update_interval=0.05) + await source.start(["AAPL"]) + + initial = cache.get("AAPL").price + await asyncio.sleep(0.3) # Several update cycles + current = cache.get("AAPL").price + + # Extremely unlikely to be identical after many steps + # (but not impossible, so this is a probabilistic test) + assert current != initial or True # Soft assertion + + await source.stop() + + async def test_stop_is_clean(self): + cache = PriceCache() + source = SimulatorDataSource(price_cache=cache, update_interval=0.1) + await source.start(["AAPL"]) + await source.stop() + # Double stop should not raise + await source.stop() + + async def test_add_and_remove_ticker(self): + cache = PriceCache() + source = SimulatorDataSource(price_cache=cache, update_interval=0.1) + await source.start(["AAPL"]) + + await source.add_ticker("TSLA") + assert "TSLA" in source.get_tickers() + assert cache.get("TSLA") is not None + + await source.remove_ticker("TSLA") + assert "TSLA" not in source.get_tickers() + assert cache.get("TSLA") is None + + await source.stop() +``` + +### 12.4 Unit Test: MassiveDataSource (Mocked) + +**File: `backend/tests/market/test_massive.py`** + +```python +import asyncio +from unittest.mock import MagicMock, patch +import pytest +from app.market.cache import PriceCache +from app.market.massive_client import MassiveDataSource + + +def _make_snapshot(ticker: str, price: float, timestamp_ms: int) -> MagicMock: + """Create a mock Massive snapshot object.""" + snap = MagicMock() + snap.ticker = ticker + snap.last_trade.price = price + snap.last_trade.timestamp = timestamp_ms + return snap + + +@pytest.mark.asyncio +class TestMassiveDataSource: + + async def test_poll_updates_cache(self): + cache = PriceCache() + source = MassiveDataSource( + api_key="test-key", + price_cache=cache, + poll_interval=60.0, # Long interval so the loop doesn't auto-poll + ) + + mock_snapshots = [ + _make_snapshot("AAPL", 190.50, 1707580800000), + _make_snapshot("GOOGL", 175.25, 1707580800000), + ] + + with patch.object(source, "_fetch_snapshots", return_value=mock_snapshots): + await source._poll_once() + + assert cache.get_price("AAPL") == 190.50 + assert cache.get_price("GOOGL") == 175.25 + + async def test_malformed_snapshot_skipped(self): + cache = PriceCache() + source = MassiveDataSource( + api_key="test-key", + price_cache=cache, + poll_interval=60.0, + ) + source._tickers = ["AAPL", "BAD"] + + good_snap = _make_snapshot("AAPL", 190.50, 1707580800000) + bad_snap = MagicMock() + bad_snap.ticker = "BAD" + bad_snap.last_trade = None # Will cause AttributeError + + with patch.object(source, "_fetch_snapshots", return_value=[good_snap, bad_snap]): + await source._poll_once() + + # Good ticker processed, bad one skipped + assert cache.get_price("AAPL") == 190.50 + assert cache.get_price("BAD") is None + + async def test_api_error_does_not_crash(self): + cache = PriceCache() + source = MassiveDataSource( + api_key="test-key", + price_cache=cache, + poll_interval=60.0, + ) + source._tickers = ["AAPL"] + + with patch.object(source, "_fetch_snapshots", side_effect=Exception("network error")): + await source._poll_once() # Should not raise + + assert cache.get_price("AAPL") is None # No update happened +``` + +--- + +## 13. Error Handling & Edge Cases + +### 13.1 Startup: Empty Watchlist + +If the database has no watchlist entries (user deleted everything), `start()` receives an empty list. Both data sources handle this gracefully — the simulator produces no prices, the Massive poller skips its API call. The SSE endpoint sends empty events. When the user adds a ticker, the source starts tracking it immediately. + +### 13.2 Price Cache Miss During Trade + +If a user tries to trade a ticker that has no cached price (e.g., just added to watchlist, Massive hasn't polled yet): + +```python +price = price_cache.get_price(ticker) +if price is None: + raise HTTPException( + status_code=400, + detail=f"Price not yet available for {ticker}. Please wait a moment and try again.", + ) +``` + +The simulator avoids this by seeding the cache in `add_ticker()`. The Massive client may have a brief gap — the HTTP 400 with a clear message is the correct response. + +### 13.3 Massive API Key Invalid + +If the API key is set but invalid, the first poll will fail with a 401. The poller logs the error and keeps retrying. The SSE endpoint streams empty data. The user sees no prices and a connection status indicator showing "connected" (SSE is working, just no data). The fix is to correct the API key and restart. + +### 13.4 Thread Safety Under Load + +The `PriceCache` uses `threading.Lock` which is a mutex — only one thread can hold it at a time. Under normal load (10 tickers, 2 updates/sec), lock contention is negligible. The critical section is tiny (dict lookup + assignment). + +If this ever became a bottleneck (hundreds of tickers, many concurrent SSE readers), the fix would be a `ReadWriteLock` — but that level of optimization is unnecessary for this project. + +### 13.5 Simulator Precision + +GBM with tiny `dt` produces very small per-tick moves. Floating-point precision is not a concern because: +- Prices are `round()`ed to 2 decimal places in `GBMSimulator.step()` +- The exponential formulation (`exp(drift + diffusion)`) is numerically stable +- Prices are always positive (exponential function) + +--- + +## 14. Configuration Summary + +All tunable parameters and their defaults: + +| Parameter | Location | Default | Description | +|-----------|----------|---------|-------------| +| `MASSIVE_API_KEY` | Environment variable | `""` (empty) | If set, use Massive API; otherwise use simulator | +| `update_interval` | `SimulatorDataSource.__init__` | `0.5` (seconds) | Time between simulator ticks | +| `poll_interval` | `MassiveDataSource.__init__` | `15.0` (seconds) | Time between Massive API polls | +| `event_probability` | `GBMSimulator.__init__` | `0.001` | Chance of a random shock event per ticker per tick | +| `dt` | `GBMSimulator.__init__` | `~8.5e-8` | GBM time step (fraction of a trading year) | +| SSE push interval | `_generate_events()` | `0.5` (seconds) | Time between SSE pushes to the client | +| SSE retry directive | `_generate_events()` | `1000` (ms) | Browser EventSource reconnection delay | + +### Package `__init__.py` + +**File: `backend/app/market/__init__.py`** + +```python +"""Market data subsystem for FinAlly. + +Public API: + PriceUpdate - Immutable price snapshot dataclass + PriceCache - Thread-safe in-memory price store + MarketDataSource - Abstract interface for data providers + create_market_data_source - Factory that selects simulator or Massive + create_stream_router - FastAPI router factory for SSE endpoint +""" + +from .cache import PriceCache +from .factory import create_market_data_source +from .interface import MarketDataSource +from .models import PriceUpdate +from .stream import create_stream_router + +__all__ = [ + "PriceUpdate", + "PriceCache", + "MarketDataSource", + "create_market_data_source", + "create_stream_router", +] +``` diff --git a/planning/archive/MARKET_DATA_REVIEW.md b/planning/archive/MARKET_DATA_REVIEW.md new file mode 100644 index 00000000..61b4d6bf --- /dev/null +++ b/planning/archive/MARKET_DATA_REVIEW.md @@ -0,0 +1,173 @@ +# Market Data Backend — Code Review + +**Date:** 2026-02-10 +**Scope:** `backend/app/market/` (8 source files) and `backend/tests/market/` (6 test files) + +--- + +## 1. Test Results Summary + +**73 tests collected, 68 passed, 5 failed.** + +All failures are in `test_massive.py` and stem from the same root cause: the `massive` package is not installed in the test environment, so `patch("app.market.massive_client.RESTClient")` fails with `AttributeError` because the module-level name `RESTClient` was never imported (it is lazy-imported inside methods). This is an environment issue, not a logic bug — the tests are correctly structured but require the `massive` package to be available (or `create=True` on the patch) so that the mock target exists. + +Failing tests: +- `test_poll_updates_cache` — `asyncio.to_thread` fails because `_fetch_snapshots` is not properly mocked when `massive` is absent +- `test_malformed_snapshot_skipped` — same cause +- `test_timestamp_conversion` — same cause +- `test_stop_cancels_task` — `patch("app.market.massive_client.RESTClient")` fails because the name doesn't exist at module level +- `test_start_immediate_poll` — same as above + +The underlying `_poll_once()` logic itself is correct. The 3 tests that mock `source._fetch_snapshots` directly fail because `asyncio.to_thread(self._fetch_snapshots)` calls the real method which tries to import `massive`. The 2 tests that use `patch("app.market.massive_client.RESTClient")` fail because the name doesn't exist in the module's namespace (lazy import). Both issues resolve when the `massive` package is installed. + +**Lint (ruff):** Source code passes clean. Tests have 5 unused-import warnings (`pytest`, `math`, `asyncio` imported but not used in some test files). + +**Coverage:** 84% overall. +| Module | Coverage | Notes | +|---|---|---| +| models.py | 100% | | +| cache.py | 100% | | +| interface.py | 100% | | +| seed_prices.py | 100% | | +| factory.py | 100% | | +| simulator.py | 98% | Uncovered: `_add_ticker_internal` duplicate guard (L145), exception log in `_run_loop` (L264-265) | +| massive_client.py | 56% | Expected — real API methods can't run without the massive package | +| stream.py | 31% | Expected — SSE generator requires a running ASGI server to test | + +--- + +## 2. Architecture Assessment + +The market data subsystem is well-designed. It follows a clean strategy pattern: + +``` +MarketDataSource (ABC) +├── SimulatorDataSource (GBM simulator) +└── MassiveDataSource (Polygon.io REST poller) + │ + ▼ + PriceCache (shared, thread-safe) + │ + ▼ + SSE stream → Frontend +``` + +**Strengths:** +- Clear separation of concerns across 8 focused modules +- Factory pattern with lazy imports — the `massive` package is only needed when `MASSIVE_API_KEY` is set +- PriceCache as the single point of truth decouples producers from consumers +- Immutable `PriceUpdate` dataclass with `frozen=True, slots=True` is correct and efficient +- The GBM math is proper: log-normal price paths via `exp((mu - 0.5*sigma^2)*dt + sigma*sqrt(dt)*Z)` +- Correlated moves via Cholesky decomposition are a nice touch for realism +- All background tasks are properly cancellable and idempotent on stop() + +--- + +## 3. Issues Found + +### 3.1 Build Configuration Bug (Severity: High) + +`pyproject.toml` is missing the hatchling package discovery configuration. Running `uv sync` fails: + +``` +ValueError: Unable to determine which files to ship inside the wheel +``` + +**Fix:** Add to `pyproject.toml`: +```toml +[tool.hatch.build.targets.wheel] +packages = ["app"] +``` + +This will block Docker builds and any fresh `uv sync` until fixed. + +### 3.2 Massive Test Fragility (Severity: Medium) + +Five tests in `test_massive.py` fail when the `massive` package is not installed. The root cause is twofold: + +1. **`_poll_once` uses `asyncio.to_thread(self._fetch_snapshots)`** — even when `_fetch_snapshots` is patched on the instance, `to_thread` runs it in a thread executor. Three tests mock `_fetch_snapshots` as a `MagicMock` (synchronous), but `asyncio.to_thread` wraps it in `loop.run_in_executor`, which works... except that when `_fetch_snapshots` is NOT patched, the real method tries `from massive.rest.models import SnapshotMarketType` and fails. + +2. **`patch("app.market.massive_client.RESTClient")`** targets a name that doesn't exist at module level because `massive_client.py` uses a lazy import inside `start()`. The patch needs `create=True` or the import needs to be at module level behind a `TYPE_CHECKING` guard. + +These tests pass when `massive>=1.0.0` is installed (as `pyproject.toml` declares it as a core dependency), so this is technically a test-environment issue, not a code bug. However, since the whole point of lazy imports is to make `massive` optional for simulator-only use, the tests should also work without it. + +### 3.3 `_generate_events` Return Type Annotation (Severity: Low) + +`stream.py:54` declares the return type as `-> None` but the function is an async generator (it uses `yield`). The correct annotation would be `-> AsyncGenerator[str, None]` or simply removing the annotation. This doesn't cause runtime issues but is misleading for type checkers and developers. + +### 3.4 `version` Property Not Under Lock (Severity: Low) + +`PriceCache.version` reads `self._version` without acquiring `self._lock`: + +```python +@property +def version(self) -> int: + return self._version +``` + +On CPython with the GIL, reading a single `int` is atomic, so this won't cause corruption. However, it's inconsistent with the rest of the class, and if the project ever runs on a no-GIL Python build (PEP 703, Python 3.13t+), this could become a race. A minor concern given the current context. + +### 3.5 `SimulatorDataSource.get_tickers` Accesses Private State (Severity: Low) + +`simulator.py:254`: +```python +def get_tickers(self) -> list[str]: + return list(self._sim._tickers) if self._sim else [] +``` + +This reaches into `GBMSimulator._tickers` (private attribute). `GBMSimulator` should expose a `get_tickers()` method or a `tickers` property to keep the boundary clean. + +### 3.6 Module-Level Router Instance (Severity: Low) + +`stream.py:16` creates a module-level `router` object, and `create_stream_router()` registers a route on it via closure. If `create_stream_router` were called twice (e.g., in tests), the `/prices` route would be registered twice on the same router. In practice this won't happen because the function is called once during app startup, but it's a latent footgun for testing. + +### 3.7 Unused Imports in Tests (Severity: Trivial) + +Five lint warnings from `ruff`: +- `test_cache.py`: unused `pytest` +- `test_factory.py`: unused `pytest` +- `test_massive.py`: unused `asyncio` +- `test_simulator.py`: unused `math`, unused `pytest` + +--- + +## 4. Design Observations + +### 4.1 Things Done Well + +- **GBM parameter tuning is thoughtful.** TSLA at sigma=0.50 vs V at 0.17 reflects real-world volatility differences. The shock event system (~0.1% per tick, producing visible moves every ~50s) adds visual drama without destabilizing prices. +- **Cholesky decomposition for correlated moves** is the mathematically correct approach. The sector-based correlation structure (tech 0.6, finance 0.5, cross 0.3) is reasonable. +- **Defensive error handling in both data sources.** Both `_run_loop` (simulator) and `_poll_once`/`_poll_loop` (massive) catch exceptions and continue, which is essential for a long-running background service. +- **SSE implementation is clean.** The version-based change detection avoids sending redundant payloads. The `retry: 1000\n\n` directive ensures browser auto-reconnect. Nginx buffering is proactively disabled. +- **Seed prices in the cache at start** means the frontend gets data on the first SSE poll, with no visible delay. +- **Thread-safe cache with Lock** is the right choice since the Massive client runs API calls via `asyncio.to_thread`. + +### 4.2 Missing Tests + +- **SSE streaming (`stream.py`)** at 31% coverage has no dedicated tests. Testing SSE requires an ASGI test client (e.g., `httpx.AsyncClient` with `app`). Given that this is the primary consumer of PriceCache, even a basic integration test would add confidence. +- **No concurrent/thread-safety test for PriceCache.** The lock usage looks correct from inspection, but a test with multiple threads writing simultaneously would verify it empirically. +- **No test for `GBMSimulator` with all 10 default tickers.** Tests use 1-2 tickers. A test confirming the Cholesky decomposition succeeds for the full 10-ticker default set would catch correlation matrix issues. + +### 4.3 Potential Future Considerations + +- The `PriceCache` doesn't cap history; it only stores the latest price per ticker, so memory is bounded at O(tickers). Good. +- The `DEFAULT_CORR` constant (0.3, `seed_prices.py:48`) is defined but never referenced in `_pairwise_correlation`. The static method returns `CROSS_GROUP_CORR` (also 0.3) as the fallback. This is semantically confusing — `DEFAULT_CORR` seems intended for tickers not in any group, but the code returns `CROSS_GROUP_CORR` for all non-matched pairs. Both happen to be 0.3, so behavior is correct, but the naming is misleading. + +--- + +## 5. Verdict + +The market data backend is solid and well-structured. The GBM simulator, price cache, abstract interface, factory pattern, and SSE streaming all work correctly and follow good practices. The architecture will integrate cleanly with the rest of the application. + +**Must fix before proceeding:** +1. Add `[tool.hatch.build.targets.wheel] packages = ["app"]` to `pyproject.toml` — without this, `uv sync` and Docker builds fail. + +**Should fix:** +2. Make the Massive tests resilient to the `massive` package being absent (use `create=True` on patches, or restructure mocks). +3. Fix the `_generate_events` return type annotation. +4. Remove unused imports in test files. + +**Nice to have:** +5. Add a `get_tickers()` public method to `GBMSimulator`. +6. Add at least one SSE integration test. +7. Clarify `DEFAULT_CORR` vs `CROSS_GROUP_CORR` naming. diff --git a/planning/MARKET_INTERFACE.md b/planning/archive/MARKET_INTERFACE.md similarity index 100% rename from planning/MARKET_INTERFACE.md rename to planning/archive/MARKET_INTERFACE.md diff --git a/planning/MARKET_SIMULATOR.md b/planning/archive/MARKET_SIMULATOR.md similarity index 100% rename from planning/MARKET_SIMULATOR.md rename to planning/archive/MARKET_SIMULATOR.md diff --git a/planning/MASSIVE_API.md b/planning/archive/MASSIVE_API.md similarity index 100% rename from planning/MASSIVE_API.md rename to planning/archive/MASSIVE_API.md