diff --git a/dimos/core/blueprints.py b/dimos/core/blueprints.py index 60a172a457..6f6f0cd793 100644 --- a/dimos/core/blueprints.py +++ b/dimos/core/blueprints.py @@ -12,21 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +from abc import ABC from collections import defaultdict from collections.abc import Callable, Mapping from dataclasses import dataclass, field, replace from functools import cached_property, reduce import operator import sys -import types as types_mod from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Literal, Union, cast, get_args, get_origin, get_type_hints +from typing import TYPE_CHECKING, Any, Literal, get_args, get_origin, get_type_hints if TYPE_CHECKING: from dimos.protocol.service.system_configurator.base import SystemConfigurator from dimos.core.global_config import GlobalConfig, global_config -from dimos.core.module import ModuleBase, ModuleSpec, is_module_type +from dimos.core.module import Module, ModuleBase, ModuleSpec, is_module_type from dimos.core.module_coordinator import ModuleCoordinator from dimos.core.stream import In, Out from dimos.core.transport import LCMTransport, PubSubTransport, pLCMTransport @@ -80,6 +80,47 @@ class ModuleRef: optional: bool = False +@dataclass(frozen=True) +class StreamWiring: + """Compiled instruction: set a transport on a module's stream.""" + + module_class: type[ModuleBase] + stream_name: str + transport: PubSubTransport[Any] + + +@dataclass(frozen=True) +class ModuleRefWiring: + """Compiled instruction: link base_module.ref_name → target_module.""" + + base_module: type[ModuleBase] + ref_name: str + target_module: type[ModuleBase] + + +@dataclass(frozen=True) +class RpcWiringPlan: + """Compiled RPC wiring: registry of methods + per-module binding requests.""" + + # rpc_key -> (module_class, method_name) — the full callable registry + registry: dict[str, tuple[type[ModuleBase], str]] + # (module_class, set_method_name, linked_rpc_key) — for set_X pattern + set_methods: tuple[tuple[type[ModuleBase], str, str], ...] + # (module_class, requested_name, rpc_key) — for rpc_calls pattern + rpc_call_bindings: tuple[tuple[type[ModuleBase], str, str], ...] + + +@dataclass(frozen=True) +class DeploySpec: + """Complete deployment specification compiled by Blueprint.build().""" + + module_specs: list[ModuleSpec] + stream_wiring: list[StreamWiring] + rpc_wiring: RpcWiringPlan + module_ref_wiring: list[ModuleRefWiring] + disabled_ref_proxies: dict[tuple[type[ModuleBase], str], _DisabledModuleProxy] = field(default_factory=dict) + + @dataclass(frozen=True) class _BlueprintAtom: kwargs: dict[str, Any] @@ -124,15 +165,6 @@ def create(cls, module: type[ModuleBase[Any]], kwargs: dict[str, Any]) -> Self: # linking to specific/known module directly elif is_module_type(annotation): module_refs.append(ModuleRef(name=name, spec=annotation)) - # Optional Spec or Module: SomeSpec | None - elif origin in (Union, types_mod.UnionType): - args = [a for a in get_args(annotation) if a is not type(None)] - if len(args) == 1: - inner = args[0] - if is_spec(inner): - module_refs.append(ModuleRef(name=name, spec=inner, optional=True)) - elif is_module_type(inner): - module_refs.append(ModuleRef(name=name, spec=inner, optional=True)) return cls( module=module, @@ -197,6 +229,25 @@ def _active_blueprints(self) -> tuple[_BlueprintAtom, ...]: disabled = set(self.disabled_modules_tuple) return tuple(bp for bp in self.blueprints if bp.module not in disabled) + def _check_ambiguity( + self, + requested_method_name: str, + interface_methods: Mapping[str, list[tuple[type[ModuleBase], str]]], + requesting_module: type[ModuleBase], + ) -> None: + if ( + requested_method_name in interface_methods + and len(interface_methods[requested_method_name]) > 1 + ): + modules_str = ", ".join( + impl[0].__name__ for impl in interface_methods[requested_method_name] + ) + raise ValueError( + f"Ambiguous RPC method '{requested_method_name}' requested by " + f"{requesting_module.__name__}. Multiple implementations found: " + f"{modules_str}. Please use a concrete class name instead." + ) + def _get_transport_for(self, name: str, stream_type: type) -> PubSubTransport[Any]: transport = self.transport_map.get((name, stream_type), None) if transport: @@ -204,17 +255,13 @@ def _get_transport_for(self, name: str, stream_type: type) -> PubSubTransport[An use_pickled = getattr(stream_type, "lcm_encode", None) is None topic = f"/{name}" if self._is_name_unique(name) else f"/{short_id()}" - transport = pLCMTransport(topic) if use_pickled else LCMTransport(topic, stream_type) - - return transport + return pLCMTransport(topic) if use_pickled else LCMTransport(topic, stream_type) @cached_property def _all_name_types(self) -> set[tuple[str, type]]: - # Apply remappings to get the actual names that will be used result = set() for blueprint in self._active_blueprints: for conn in blueprint.streams: - # Check if this stream should be remapped remapped_name = self.remapping_map.get((blueprint.module, conn.name), conn.name) if isinstance(remapped_name, str): result.add((remapped_name, conn.type)) @@ -289,68 +336,71 @@ def _verify_no_name_conflicts(self) -> None: raise ValueError("\n".join(error_lines)) - def _deploy_all_modules( - self, module_coordinator: ModuleCoordinator, global_config: GlobalConfig - ) -> None: - module_specs: list[ModuleSpec] = [] + def _compile_module_specs(self, g: GlobalConfig) -> list[ModuleSpec]: + """Compile the list of module deployment specs (pure — no side effects).""" + specs: list[ModuleSpec] = [] for blueprint in self._active_blueprints: - module_specs.append((blueprint.module, global_config, blueprint.kwargs)) - - module_coordinator.deploy_parallel(module_specs) + specs.append((blueprint.module, g, blueprint.kwargs)) + return specs - def _connect_streams(self, module_coordinator: ModuleCoordinator) -> None: - # dict when given (final/remapped) stream name+type, provides a list of modules + original (non-remapped) stream names - streams = defaultdict(list) + def _compile_stream_wiring(self) -> list[StreamWiring]: + """Compile stream transport assignments (pure — no side effects).""" + # Group streams by (remapped_name, type) -> [(module_class, original_name)] + streams: dict[ + tuple[str | type[ModuleBase] | type[Spec], type], list[tuple[type[ModuleBase], str]] + ] = defaultdict(list) for blueprint in self._active_blueprints: for conn in blueprint.streams: - # Check if this stream should be remapped remapped_name = self.remapping_map.get((blueprint.module, conn.name), conn.name) if isinstance(remapped_name, str): - # Group by remapped name and type streams[remapped_name, conn.type].append((blueprint.module, conn.name)) - # Connect all In/Out streams by remapped name and type. - for remapped_name, stream_type in streams.keys(): + wiring: list[StreamWiring] = [] + for (remapped_name, stream_type), module_streams in streams.items(): + assert isinstance(remapped_name, str) transport = self._get_transport_for(remapped_name, stream_type) - for module, original_name in streams[(remapped_name, stream_type)]: - instance = module_coordinator.get_instance(module) # type: ignore[assignment] - instance.set_transport(original_name, transport) # type: ignore[union-attr] + for module_class, original_name in module_streams: + wiring.append( + StreamWiring( + module_class=module_class, + stream_name=original_name, + transport=transport, + ) + ) logger.info( "Transport", name=remapped_name, original_name=original_name, topic=str(getattr(transport, "topic", None)), type=f"{stream_type.__module__}.{stream_type.__qualname__}", - module=module.__name__, + module=module_class.__name__, transport=transport.__class__.__name__, ) + return wiring - def _connect_module_refs(self, module_coordinator: ModuleCoordinator) -> None: - # partly fill out the mod_and_mod_ref_to_proxy - mod_and_mod_ref_to_proxy = { - (module, name): replacement - for (module, name), replacement in self.remapping_map.items() - if is_spec(replacement) or is_module_type(replacement) - } - + def _compile_module_ref_wiring(self) -> list[ModuleRefWiring]: + """Resolve module references and return wiring plan (pure — no side effects).""" + mod_and_mod_ref_to_target: dict[tuple[type[ModuleBase], str], type[ModuleBase]] = {} disabled_ref_proxies: dict[tuple[type[ModuleBase], str], _DisabledModuleProxy] = {} disabled_set = set(self.disabled_modules_tuple) - # after this loop we should have an exact module for every module_ref on every blueprint + # Seed with explicit remappings that point to modules/specs + for (module, name), replacement in self.remapping_map.items(): + if is_module_type(replacement): + mod_and_mod_ref_to_target[module, name] = replacement # type: ignore[assignment] + for blueprint in self._active_blueprints: for each_module_ref in blueprint.module_refs: - # we've got to find a another module that implements this spec - spec = mod_and_mod_ref_to_proxy.get( - (blueprint.module, each_module_ref.name), each_module_ref.spec - ) + key = (blueprint.module, each_module_ref.name) + if key in mod_and_mod_ref_to_target: + continue - # if the spec is actually module, use that (basically a user override) + spec = self.remapping_map.get(key, each_module_ref.spec) if is_module_type(spec): - mod_and_mod_ref_to_proxy[blueprint.module, each_module_ref.name] = spec + mod_and_mod_ref_to_target[key] = spec # type: ignore[assignment] continue - # find all available candidates possible_module_candidates = [ each_other_blueprint.module for each_other_blueprint in self._active_blueprints @@ -359,13 +409,12 @@ def _connect_module_refs(self, module_coordinator: ModuleCoordinator) -> None: and spec_structural_compliance(each_other_blueprint.module, spec) ) ] - # we keep valid separate from invalid to provide a better error message for "almost" valid cases valid_module_candidates = [ each_candidate for each_candidate in possible_module_candidates if spec_annotation_compliance(each_candidate, spec) ] - # none + if len(possible_module_candidates) == 0: if each_module_ref.optional: continue @@ -395,22 +444,16 @@ def _connect_module_refs(self, module_coordinator: ModuleCoordinator) -> None: raise Exception( f"""The {blueprint.module.__name__} has a module reference ({each_module_ref}) which requested a module that fills out the {each_module_ref.spec.__name__} spec. But I couldn't find a module that met that spec.\n""" ) - # exactly one structurally valid candidate elif len(possible_module_candidates) == 1: if len(valid_module_candidates) == 0: logger.warning( f"""The {blueprint.module.__name__} has a module reference ({each_module_ref}) which requested a module that fills out the {each_module_ref.spec.__name__} spec. I found a module ({possible_module_candidates[0].__name__}) that met that spec structurally, but it had a mismatch in type annotations.\nPlease either change the {each_module_ref.spec.__name__} spec or the {possible_module_candidates[0].__name__} module.\n""" ) - mod_and_mod_ref_to_proxy[blueprint.module, each_module_ref.name] = ( - possible_module_candidates[0] - ) - continue - # more than one + mod_and_mod_ref_to_target[key] = possible_module_candidates[0] elif len(valid_module_candidates) > 1: raise Exception( f"""The {blueprint.module.__name__} has a module reference ({each_module_ref}) which requested a module that fills out the {each_module_ref.spec.__name__} spec. But I found multiple modules that met that spec: {valid_module_candidates}.\nTo fix this use .remappings, for example:\n autoconnect(...).remappings([ ({blueprint.module.__name__}, {each_module_ref.name!r}, ) ])\n""" ) - # structural candidates, but no valid candidates elif len(valid_module_candidates) == 0: possible_module_candidates_str = ", ".join( [each_candidate.__name__ for each_candidate in possible_module_candidates] @@ -418,83 +461,116 @@ def _connect_module_refs(self, module_coordinator: ModuleCoordinator) -> None: raise Exception( f"""The {blueprint.module.__name__} has a module reference ({each_module_ref}) which requested a module that fills out the {each_module_ref.spec.__name__} spec. Some modules ({possible_module_candidates_str}) met the spec structurally but had a mismatch in type annotations\n""" ) - # one valid candidate (and more than one structurally valid candidate) else: - mod_and_mod_ref_to_proxy[blueprint.module, each_module_ref.name] = ( - valid_module_candidates[0] - ) + mod_and_mod_ref_to_target[key] = valid_module_candidates[0] + + wiring = [ + ModuleRefWiring(base_module=base_module, ref_name=ref_name, target_module=target) + for (base_module, ref_name), target in mod_and_mod_ref_to_target.items() + ] + return wiring, disabled_ref_proxies + + def _compile_rpc_wiring(self) -> RpcWiringPlan: + """Compile the RPC method registry and binding requests (pure — no side effects).""" + # registry: rpc_key -> (module_class, method_name) + registry: dict[str, tuple[type[ModuleBase], str]] = {} + + # Track interface methods to detect ambiguity + interface_methods: defaultdict[str, list[tuple[type[ModuleBase], str]]] = defaultdict(list) + interface_methods_dot: defaultdict[str, list[tuple[type[ModuleBase], str]]] = defaultdict( + list + ) - # now that we know the streams, we mutate the RPCClient objects - for (base_module, module_ref_name), target_module in mod_and_mod_ref_to_proxy.items(): - base_module_proxy = module_coordinator.get_instance(base_module) - target_module_proxy = module_coordinator.get_instance(target_module) # type: ignore[type-var,arg-type] - setattr( - base_module_proxy, - module_ref_name, - target_module_proxy, - ) - # Ensure the remote module instance can use the module ref inside its own RPC handlers. - base_module_proxy.set_module_ref(module_ref_name, target_module_proxy) + for blueprint in self._active_blueprints: + for method_name in blueprint.module.rpcs.keys(): # type: ignore[attr-defined] + registry[f"{blueprint.module.__name__}_{method_name}"] = ( + blueprint.module, + method_name, + ) + registry[f"{blueprint.module.__name__}.{method_name}"] = ( + blueprint.module, + method_name, + ) + + for base in blueprint.module.mro(): + if ( + base is not Module + and issubclass(base, ABC) + and hasattr(base, method_name) + and getattr(base, method_name, None) is not None + ): + interface_methods_dot[f"{base.__name__}.{method_name}"].append( + (blueprint.module, method_name) + ) + interface_methods[f"{base.__name__}_{method_name}"].append( + (blueprint.module, method_name) + ) - # Wire up no-op proxies for refs whose providers were disabled. - for (base_module, module_ref_name), proxy in disabled_ref_proxies.items(): - base_module_proxy = module_coordinator.get_instance(base_module) - setattr(base_module_proxy, module_ref_name, proxy) - base_module_proxy.set_module_ref(module_ref_name, cast("Any", proxy)) + # Add non-ambiguous interface methods to registry + for key, implementations in interface_methods_dot.items(): + if len(implementations) == 1: + registry[key] = implementations[0] + for key, implementations in interface_methods.items(): + if len(implementations) == 1: + registry[key] = implementations[0] + + # Compile set_ method bindings + set_methods: list[tuple[type[ModuleBase], str, str]] = [] + for blueprint in self._active_blueprints: + for method_name in blueprint.module.rpcs.keys(): # type: ignore[attr-defined] + if not method_name.startswith("set_"): + continue + linked_name = method_name.removeprefix("set_") + self._check_ambiguity(linked_name, interface_methods, blueprint.module) + if linked_name in registry: + set_methods.append((blueprint.module, method_name, linked_name)) + + # Compile rpc_call bindings (uses rpc_calls list from module) + rpc_call_bindings: list[tuple[type[ModuleBase], str, str]] = [] + for blueprint in self._active_blueprints: + rpc_call_names: list[str] = getattr(blueprint.module, "rpc_calls", []) + for requested_name in rpc_call_names: + self._check_ambiguity(requested_name, interface_methods_dot, blueprint.module) + if requested_name in registry: + rpc_call_bindings.append((blueprint.module, requested_name, requested_name)) + + return RpcWiringPlan( + registry=registry, + set_methods=tuple(set_methods), + rpc_call_bindings=tuple(rpc_call_bindings), + ) def build( self, cli_config_overrides: Mapping[str, Any] | None = None, ) -> ModuleCoordinator: logger.info("Building the blueprint") + + # Phase 1: Configuration global_config.update(**dict(self.global_config_overrides)) if cli_config_overrides: global_config.update(**dict(cli_config_overrides)) + # Phase 2: Validation self._run_configurators() self._check_requirements() self._verify_no_name_conflicts() - logger.info("Starting the modules") - module_coordinator = ModuleCoordinator(g=global_config) - module_coordinator.start() - - # all module constructors are called here (each of them setup their own) - self._deploy_all_modules(module_coordinator, global_config) - self._connect_streams(module_coordinator) - self._connect_module_refs(module_coordinator) - - module_coordinator.build_all_modules() - module_coordinator.start_all_modules() - - self._log_blueprint_graph(module_coordinator) - - return module_coordinator - - def _log_blueprint_graph(self, module_coordinator: ModuleCoordinator) -> None: - """Log the module graph to Rerun if a RerunBridgeModule is active.""" - from dimos.visualization.rerun.bridge import RerunBridgeModule - - if not any(bp.module is RerunBridgeModule for bp in self._active_blueprints): - return - - import shutil - - if not shutil.which("dot"): - logger.info( - "graphviz not found, skipping blueprint graph. Install: sudo apt install graphviz" - ) - return - - try: - from dimos.core.introspection.blueprint.dot import render + # Phase 3: Compile deploy spec (pure — no side effects) + module_ref_wiring, disabled_ref_proxies = self._compile_module_ref_wiring() + deploy_spec = DeploySpec( + module_specs=self._compile_module_specs(global_config), + stream_wiring=self._compile_stream_wiring(), + module_ref_wiring=module_ref_wiring, + rpc_wiring=self._compile_rpc_wiring(), + disabled_ref_proxies=disabled_ref_proxies, + ) - dot_code = render(self) - module_names = [bp.module.__name__ for bp in self._active_blueprints] - bridge = module_coordinator.get_instance(RerunBridgeModule) # type: ignore[arg-type] - bridge.log_blueprint_graph(dot_code, module_names) - except Exception: - logger.error("Failed to log blueprint graph to Rerun", exc_info=True) + # Phase 4: Execute (all mutations go through coordinator) + logger.info("Starting the modules") + coordinator = ModuleCoordinator(g=global_config, deploy_spec=deploy_spec) + coordinator.start() + return coordinator def autoconnect(*blueprints: Blueprint) -> Blueprint: @@ -525,7 +601,6 @@ def autoconnect(*blueprints: Blueprint) -> Blueprint: def _eliminate_duplicates(blueprints: list[_BlueprintAtom]) -> list[_BlueprintAtom]: - # The duplicates are eliminated in reverse so that newer blueprints override older ones. seen = set() unique_blueprints = [] for bp in reversed(blueprints): diff --git a/dimos/core/global_config.py b/dimos/core/global_config.py index 90461932a2..5a5f7ba7bc 100644 --- a/dimos/core/global_config.py +++ b/dimos/core/global_config.py @@ -38,6 +38,7 @@ class GlobalConfig(BaseSettings): new_memory: bool = False viewer: ViewerBackend = "rerun" n_workers: int = 2 + worker_to_module_ratio: float = 1.0 memory_limit: str = "auto" mujoco_camera_position: str | None = None mujoco_room: str | None = None diff --git a/dimos/core/module_coordinator.py b/dimos/core/module_coordinator.py index a80c1b6f44..63154541db 100644 --- a/dimos/core/module_coordinator.py +++ b/dimos/core/module_coordinator.py @@ -26,6 +26,7 @@ from dimos.utils.safe_thread_map import safe_thread_map if TYPE_CHECKING: + from dimos.core.blueprints import DeploySpec, ModuleRefWiring, RpcWiringPlan, StreamWiring from dimos.core.rpc_client import ModuleProxy, ModuleProxyProtocol logger = setup_logger() @@ -34,15 +35,27 @@ class ModuleCoordinator(Resource): # type: ignore[misc] + """ + - Module (classes) should be able to be deployed, stopped, and re-deployed in on one instance of ModuleCoordinator + - Arguably ModuleCoordinator could be called the "DimosRuntime" + - ModuleCoordinator is responsible for all global "addresses". + Ex: it should make sure all modules are using the same LCM url, the same rerun port, etc + (it may not do all of that at time of writing but that is the intention/job of this class) + - Modules shouldn't be deployed on their own (except for testing) + """ + _managers: dict[str, DeploymentManager] _global_config: GlobalConfig + _deploy_spec: DeploySpec | None _deployed_modules: dict[type[ModuleBase], ModuleProxyProtocol] def __init__( self, g: GlobalConfig = global_config, + deploy_spec: DeploySpec | None = None, ) -> None: self._global_config = g + self._deploy_spec = deploy_spec manager_types: list[type[DeploymentManager]] = [WorkerManagerDocker, WorkerManager] self._managers: dict[str, DeploymentManager] = { cls.deployment_identifier: cls(g=g) for cls in manager_types @@ -56,6 +69,16 @@ def start(self) -> None: for m in self._managers.values(): m.start() + if self._deploy_spec is not None: + spec = self._deploy_spec + self.deploy_parallel(spec.module_specs) + self._wire_streams(spec.stream_wiring) + self._wire_rpc_methods(spec.rpc_wiring) + self._wire_module_refs(spec.module_ref_wiring) + self._wire_disabled_ref_proxies(spec.disabled_ref_proxies) + self._build_all_modules() + self.start_all_modules() + def health_check(self) -> bool: return all(m.health_check() for m in self._managers.values()) @@ -107,7 +130,6 @@ def deploy_parallel(self, module_specs: list[ModuleSpec]) -> list[ModuleProxy]: indices_by_deployment: dict[str, list[int]] = {} specs_by_deployment: dict[str, list[ModuleSpec]] = {} for index, spec in enumerate(module_specs): - # spec = (module_class, global_config, kwargs) dep = spec[0].deployment indices_by_deployment.setdefault(dep, []).append(index) specs_by_deployment.setdefault(dep, []).append(spec) @@ -134,7 +156,48 @@ def _deploy_group(dep: str) -> None: ) return results - def build_all_modules(self) -> None: + def _wire_streams(self, wiring: list[StreamWiring]) -> None: + """Apply stream transports to deployed modules.""" + for w in wiring: + instance = self.get_instance(w.module_class) + instance.set_transport(w.stream_name, w.transport) # type: ignore[union-attr] + + def _wire_rpc_methods(self, plan: RpcWiringPlan) -> None: + """Wire RPC methods between modules using the compiled plan.""" + # Build callable registry from deployed instances + callables: dict[str, Any] = {} + for rpc_key, (module_class, method_name) in plan.registry.items(): + proxy = self.get_instance(module_class) + callables[rpc_key] = getattr(proxy, method_name) + + # Apply set_ methods + for module_class, set_method, linked_key in plan.set_methods: + if linked_key in callables: + instance = self.get_instance(module_class) + getattr(instance, set_method)(callables[linked_key]) + + # Apply rpc_call bindings + for module_class, requested_name, rpc_key in plan.rpc_call_bindings: + if rpc_key in callables: + instance = self.get_instance(module_class) + instance.set_rpc_method(requested_name, callables[rpc_key]) # type: ignore[union-attr] + + def _wire_module_refs(self, wiring: list[ModuleRefWiring]) -> None: + """Set module references between deployed modules.""" + for w in wiring: + base_proxy = self.get_instance(w.base_module) + target_proxy = self.get_instance(w.target_module) + setattr(base_proxy, w.ref_name, target_proxy) + base_proxy.set_module_ref(w.ref_name, target_proxy) # type: ignore[union-attr] + + def _wire_disabled_ref_proxies(self, proxies: dict[tuple[type[ModuleBase], str], Any]) -> None: + """Wire up no-op proxies for refs whose providers were disabled.""" + for (base_module, module_ref_name), proxy in proxies.items(): + base_module_proxy = self.get_instance(base_module) + setattr(base_module_proxy, module_ref_name, proxy) + base_module_proxy.set_module_ref(module_ref_name, proxy) # type: ignore[union-attr] + + def _build_all_modules(self) -> None: """Call build() on all deployed modules in parallel. build() handles heavy one-time work (docker builds, LFS downloads, etc.) diff --git a/pyproject.toml b/pyproject.toml index 7e2f38546e..d6c1ce988e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -327,8 +327,12 @@ docker = [ "sortedcontainers", "PyTurboJPEG", "rerun-sdk", + "typing_extensions", "open3d-unofficial-arm; platform_system == 'Linux' and platform_machine == 'aarch64'", "open3d>=0.18.0; platform_system != 'Linux' or platform_machine != 'aarch64'", + # these below should be removed later, right now they are needed even for running `dimos --help` (seperate non-docker issue) + "langchain-core", + "matplotlib", ] base = [ diff --git a/uv.lock b/uv.lock index 529842294b..5d1272f673 100644 --- a/uv.lock +++ b/uv.lock @@ -1859,7 +1859,9 @@ dev = [ ] docker = [ { name = "dimos-lcm" }, + { name = "langchain-core" }, { name = "lcm" }, + { name = "matplotlib" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "open3d", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, @@ -1877,6 +1879,7 @@ docker = [ { name = "sortedcontainers" }, { name = "structlog" }, { name = "typer" }, + { name = "typing-extensions" }, ] drone = [ { name = "pymavlink" }, @@ -2020,6 +2023,7 @@ requires-dist = [ { name = "langchain", marker = "extra == 'agents'", specifier = "==1.2.3" }, { name = "langchain-chroma", marker = "extra == 'agents'", specifier = ">=1,<2" }, { name = "langchain-core", marker = "extra == 'agents'", specifier = "==1.2.3" }, + { name = "langchain-core", marker = "extra == 'docker'" }, { name = "langchain-huggingface", marker = "extra == 'agents'", specifier = ">=1,<2" }, { name = "langchain-ollama", marker = "extra == 'agents'", specifier = ">=1,<2" }, { name = "langchain-openai", marker = "extra == 'agents'", specifier = ">=1,<2" }, @@ -2031,6 +2035,7 @@ requires-dist = [ { name = "llvmlite", specifier = ">=0.42.0" }, { name = "lxml-stubs", marker = "extra == 'dev'", specifier = ">=0.5.1,<1" }, { name = "lz4", specifier = ">=4.4.5" }, + { name = "matplotlib", marker = "extra == 'docker'" }, { name = "matplotlib", marker = "extra == 'manipulation'", specifier = ">=3.7.1" }, { name = "md-babel-py", marker = "extra == 'dev'", specifier = "==1.1.1" }, { name = "moondream", marker = "extra == 'perception'" }, @@ -2142,6 +2147,7 @@ requires-dist = [ { name = "types-tensorflow", marker = "extra == 'dev'", specifier = ">=2.18.0.20251008,<3" }, { name = "types-tqdm", marker = "extra == 'dev'", specifier = ">=4.67.0.20250809,<5" }, { name = "typing-extensions", marker = "python_full_version < '3.11'", specifier = ">=4.0" }, + { name = "typing-extensions", marker = "extra == 'docker'" }, { name = "ultralytics", marker = "extra == 'perception'", specifier = ">=8.3.70" }, { name = "unitree-webrtc-connect-leshy", marker = "extra == 'unitree'", specifier = ">=2.0.7" }, { name = "uvicorn", marker = "extra == 'web'", specifier = ">=0.34.0" },