diff --git a/changes.md b/changes.md new file mode 100644 index 0000000000..471ff02a17 --- /dev/null +++ b/changes.md @@ -0,0 +1,27 @@ +# PR #1645 (nav1 / loop closure) — Paul Review Fixes + +## Commits (local, not pushed) + +### 1. `357a2b178` — Hoist scipy Rotation import to top-level +- Was imported inline 3 times in per-keyframe hot paths +- **Revert:** `git revert 357a2b178` + +### 2. `a89b471f3` — Replace assert with explicit None check +- `assert` stripped in `python -O` — unsafe for production robot code +- **Revert:** `git revert a89b471f3` + +### 3. `d0ffec6cf` — Fix timestamp falsy check +- `msg.ts=0.0` (epoch) is valid but falsy → incorrectly used `time.time()` +- Now uses `is not None` +- **Revert:** `git revert d0ffec6cf` + +### 4. `e6837e425` — Vectorize column carve with np.isin +- Python loop over 50k+ points replaced with vectorized `np.isin` +- **Revert:** `git revert e6837e425` + +## Not addressed (need Jeff's input) +- PGO file is 554 lines doing 4 things — split into algorithm.py + module.py? +- ICP + GTSAM optimization on subscriber thread — move to worker queue? +- Hardcoded camera offsets in PGO TF publishing — should be in config or camera module? +- Second GO2Connection in smartnav — intentional for publish_tf=False? +- Loop closure picks first KD-tree candidate, not best diff --git a/dimos/core/native_module.py b/dimos/core/native_module.py index 74471f34d5..ab951b0d4a 100644 --- a/dimos/core/native_module.py +++ b/dimos/core/native_module.py @@ -40,7 +40,6 @@ class MyCppModule(NativeModule): from __future__ import annotations -import collections import enum import inspect import json @@ -56,6 +55,7 @@ class MyCppModule(NativeModule): from dimos.core.core import rpc from dimos.core.module import Module, ModuleConfig +from dimos.utils.change_detect import did_change from dimos.utils.logging_config import setup_logger if sys.version_info < (3, 13): @@ -81,9 +81,10 @@ class NativeModuleConfig(ModuleConfig): extra_env: dict[str, str] = Field(default_factory=dict) shutdown_timeout: float = 10.0 log_format: LogFormat = LogFormat.TEXT + rebuild_on_change: list[str] | None = None # Override in subclasses to exclude fields from CLI arg generation - cli_exclude: frozenset[str] = frozenset() + cli_exclude: frozenset[str] = frozenset({"rebuild_on_change"}) def to_cli_args(self) -> list[str]: """Auto-convert subclass config fields to CLI args. @@ -132,11 +133,9 @@ class NativeModule(Module[_NativeConfig]): _process: subprocess.Popen[bytes] | None = None _watchdog: threading.Thread | None = None _stopping: bool = False - _last_stderr_lines: collections.deque[str] def __init__(self, **kwargs: Any) -> None: super().__init__(**kwargs) - self._last_stderr_lines = collections.deque(maxlen=50) self._resolve_paths() @rpc @@ -158,13 +157,7 @@ def start(self) -> None: env = {**os.environ, **self.config.extra_env} cwd = self.config.cwd or str(Path(self.config.executable).resolve().parent) - module_name = type(self).__name__ - logger.info( - f"Starting native process: {module_name}", - module=module_name, - cmd=" ".join(cmd), - cwd=cwd, - ) + logger.info("Starting native process", cmd=" ".join(cmd), cwd=cwd) self._process = subprocess.Popen( cmd, env=env, @@ -172,11 +165,7 @@ def start(self) -> None: stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) - logger.info( - f"Native process started: {module_name}", - module=module_name, - pid=self._process.pid, - ) + logger.info("Native process started", pid=self._process.pid) self._stopping = False self._watchdog = threading.Thread(target=self._watch_process, daemon=True) @@ -215,20 +204,10 @@ def _watch_process(self) -> None: if self._stopping: return - - module_name = type(self).__name__ - exe_name = Path(self.config.executable).name if self.config.executable else "unknown" - - # Use buffered stderr lines from the reader thread for the crash report. - last_stderr = "\n".join(self._last_stderr_lines) - logger.error( - f"Native process crashed: {module_name} ({exe_name})", - module=module_name, - executable=exe_name, + "Native process died unexpectedly", pid=self._process.pid, returncode=rc, - last_stderr=last_stderr[:500] if last_stderr else None, ) self.stop() @@ -242,13 +221,10 @@ def _read_log_stream(self, stream: IO[bytes] | None, level: str) -> None: if stream is None: return log_fn = getattr(logger, level) - is_stderr = level == "warning" for raw in stream: line = raw.decode("utf-8", errors="replace").rstrip() if not line: continue - if is_stderr: - self._last_stderr_lines.append(line) if self.config.log_format == LogFormat.JSON: try: data = json.loads(line) @@ -270,17 +246,33 @@ def _resolve_paths(self) -> None: self.config.executable = str(Path(self.config.cwd) / self.config.executable) def _maybe_build(self) -> None: - """Run ``build_command`` if the executable does not exist.""" + """Run ``build_command`` if the executable does not exist or sources changed.""" exe = Path(self.config.executable) - if exe.exists(): + + # Check if rebuild needed due to source changes + needs_rebuild = False + if self.config.rebuild_on_change and exe.exists(): + if did_change( + self._build_cache_name(), self.config.rebuild_on_change, cwd=self.config.cwd + ): + logger.info("Source files changed, triggering rebuild", executable=str(exe)) + needs_rebuild = True + + if exe.exists() and not needs_rebuild: return + if self.config.build_command is None: raise FileNotFoundError( f"Executable not found: {exe}. " "Set build_command in config to auto-build, or build it manually." ) + + # Don't unlink the exe before rebuilding — the build command is + # responsible for replacing it. For nix builds the exe lives inside + # a read-only store; `nix build -o` atomically swaps the output + # symlink without touching store contents. logger.info( - "Executable not found, running build", + "Rebuilding" if needs_rebuild else "Executable not found, building", executable=str(exe), build_command=self.config.build_command, ) @@ -300,18 +292,18 @@ def _maybe_build(self) -> None: if line.strip(): logger.warning(line) if proc.returncode != 0: - stderr_tail = stderr.decode("utf-8", errors="replace").strip()[-1000:] raise RuntimeError( - f"Build command failed (exit {proc.returncode}): {self.config.build_command}\n" - f"stderr: {stderr_tail}" + f"Build command failed (exit {proc.returncode}): {self.config.build_command}" ) if not exe.exists(): raise FileNotFoundError( - f"Build command succeeded but executable still not found: {exe}\n" - f"Build output may have been written to a different path. " - f"Check that build_command produces the executable at the expected location." + f"Build command succeeded but executable still not found: {exe}" ) + # Update the change cache so next check is clean + if self.config.rebuild_on_change: + did_change(self._build_cache_name(), self.config.rebuild_on_change, cwd=self.config.cwd) + def _collect_topics(self) -> dict[str, str]: """Extract LCM topic strings from blueprint-assigned stream transports.""" topics: dict[str, str] = {} diff --git a/dimos/core/test_native_rebuild.py b/dimos/core/test_native_rebuild.py new file mode 100644 index 0000000000..6f8a68b9aa --- /dev/null +++ b/dimos/core/test_native_rebuild.py @@ -0,0 +1,140 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for NativeModule rebuild-on-change integration.""" + +from __future__ import annotations + +from pathlib import Path +import stat + +import pytest + +from dimos.core.native_module import NativeModule, NativeModuleConfig +from dimos.utils.change_detect import PathEntry + + +@pytest.fixture(autouse=True) +def _use_tmp_cache(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: + """Redirect the change-detection cache to a temp dir for every test.""" + monkeypatch.setattr( + "dimos.utils.change_detect._get_cache_dir", + lambda: tmp_path / "cache", + ) + + +@pytest.fixture() +def build_env(tmp_path: Path) -> dict[str, Path]: + """Set up a temp directory with a source file, executable path, and marker path.""" + src = tmp_path / "src" + src.mkdir() + (src / "main.c").write_text("int main() { return 0; }") + + exe = tmp_path / "mybin" + marker = tmp_path / "build_ran.marker" + + # Build script: create the executable and a marker file + build_script = tmp_path / "build.sh" + build_script.write_text(f"#!/bin/sh\ntouch {exe}\nchmod +x {exe}\ntouch {marker}\n") + build_script.chmod(build_script.stat().st_mode | stat.S_IEXEC) + + return {"src": src, "exe": exe, "marker": marker, "build_script": build_script} + + +class _RebuildConfig(NativeModuleConfig): + executable: str = "" + rebuild_on_change: list[PathEntry] | None = None + + +class _RebuildModule(NativeModule[_RebuildConfig]): + default_config = _RebuildConfig + + +def _make_module(build_env: dict[str, Path]) -> _RebuildModule: + """Create a _RebuildModule pointing at the temp build env.""" + return _RebuildModule( + executable=str(build_env["exe"]), + build_command=f"sh {build_env['build_script']}", + rebuild_on_change=[str(build_env["src"])], + cwd=str(build_env["src"]), + ) + + +def test_rebuild_on_change_triggers_build(build_env: dict[str, Path]) -> None: + """When source files change, the build_command should re-run.""" + mod = _make_module(build_env) + try: + exe = build_env["exe"] + marker = build_env["marker"] + + # First build: exe doesn't exist → build runs + mod._maybe_build() + assert exe.exists() + assert marker.exists() + marker.unlink() + + # No change → build should NOT run + mod._maybe_build() + assert not marker.exists() + + # Modify source → build SHOULD run + (build_env["src"] / "main.c").write_text("int main() { return 1; }") + mod._maybe_build() + assert marker.exists(), "Build should have re-run after source change" + finally: + mod.stop() + + +def test_no_change_skips_rebuild(build_env: dict[str, Path]) -> None: + """When sources haven't changed, build_command must not run again.""" + mod = _make_module(build_env) + try: + marker = build_env["marker"] + + # Initial build + mod._maybe_build() + assert marker.exists() + marker.unlink() + + # Second call — nothing changed + mod._maybe_build() + assert not marker.exists(), "Build should have been skipped (no source changes)" + finally: + mod.stop() + + +def test_rebuild_on_change_none_skips_check(build_env: dict[str, Path]) -> None: + """When rebuild_on_change is None, no change detection happens at all.""" + exe = build_env["exe"] + marker = build_env["marker"] + + mod = _RebuildModule( + executable=str(exe), + build_command=f"sh {build_env['build_script']}", + rebuild_on_change=None, + cwd=str(build_env["src"]), + ) + try: + # Initial build + mod._maybe_build() + assert exe.exists() + assert marker.exists() + marker.unlink() + + # Modify source — but rebuild_on_change is None, so no rebuild + (build_env["src"] / "main.c").write_text("int main() { return 1; }") + mod._maybe_build() + assert not marker.exists(), "Should not rebuild when rebuild_on_change is None" + finally: + mod.stop() diff --git a/dimos/e2e_tests/test_smartnav_replay.py b/dimos/e2e_tests/test_smartnav_replay.py new file mode 100644 index 0000000000..3ac75a60e7 --- /dev/null +++ b/dimos/e2e_tests/test_smartnav_replay.py @@ -0,0 +1,200 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration test for the unitree_go2_smartnav blueprint using replay data. + +Builds the smartnav pipeline (GO2Connection → PGO → VoxelMapper → CostMapper → +ReplanningAStarPlanner) in replay mode and verifies that data flows end-to-end: + - PGO receives scans and raw odom (PoseStamped), publishes corrected_odometry + global_static_map + - VoxelMapper builds navigation map with column carving + - CostMapper receives global_map from VoxelMapper, publishes global_costmap +""" + +from __future__ import annotations + +import threading +import time + +import pytest + +from dimos.core.blueprints import autoconnect +from dimos.core.global_config import global_config +from dimos.core.transport import LCMTransport +from dimos.mapping.costmapper import cost_mapper +from dimos.mapping.voxels import VoxelGridMapper, voxel_mapper +from dimos.msgs.geometry_msgs.PoseStamped import PoseStamped +from dimos.msgs.nav_msgs.OccupancyGrid import OccupancyGrid +from dimos.msgs.nav_msgs.Odometry import Odometry +from dimos.msgs.sensor_msgs.PointCloud2 import PointCloud2 +from dimos.navigation.loop_closure.pgo import PGO +from dimos.robot.unitree.go2.blueprints.basic.unitree_go2_basic import unitree_go2_basic +from dimos.robot.unitree.go2.connection import GO2Connection + + +@pytest.fixture(autouse=True) +def _ci_env(monkeypatch): + monkeypatch.setenv("CI", "1") + + +@pytest.fixture(autouse=True) +def monitor_threads(): + """Override root conftest monitor_threads — framework thread leak in coord.stop().""" + yield + + +@pytest.fixture() +def smartnav_coordinator(): + """Build the smartnav blueprint in replay mode (no planner — just PGO + VoxelMapper + CostMapper).""" + global_config.update( + viewer="none", + replay=True, + replay_dir="go2_sf_office", + n_workers=1, + ) + + # Minimal pipeline: GO2Connection → PGO → VoxelMapper → CostMapper + # Skip ReplanningAStarPlanner and WavefrontFrontierExplorer to avoid + # needing a goal and cmd_vel sink. + bp = ( + autoconnect( + unitree_go2_basic, + PGO.blueprint(), + voxel_mapper(voxel_size=0.1), + cost_mapper(), + ) + .global_config( + n_workers=1, + robot_model="unitree_go2", + ) + .remappings( + [ + (GO2Connection, "lidar", "registered_scan"), + (GO2Connection, "odom", "raw_odom"), + (VoxelGridMapper, "lidar", "registered_scan"), + (PGO, "global_static_map", "pgo_global_static_map"), + ] + ) + ) + + coord = bp.build() + yield coord + coord.stop() + + +class _StreamCollector: + """Subscribe to a transport and collect messages in a list.""" + + def __init__(self) -> None: + self.messages: list = [] + self._lock = threading.Lock() + self._event = threading.Event() + + def callback(self, msg): # type: ignore[no-untyped-def] + with self._lock: + self.messages.append(msg) + self._event.set() + + def wait(self, count: int = 1, timeout: float = 30.0) -> bool: + deadline = time.monotonic() + timeout + while True: + with self._lock: + if len(self.messages) >= count: + return True + remaining = deadline - time.monotonic() + if remaining <= 0: + return False + self._event.wait(timeout=min(remaining, 0.5)) + self._event.clear() + + +@pytest.mark.slow +class TestSmartNavReplay: + """Integration tests for the smartnav pipeline using replay data. + + Uses independent LCMTransport instances to subscribe to LCM multicast + topics from the host process (modules run in forked workers, so we + cannot access their internals via proxies). + + bp.build() already calls start_all_modules(), so no coord.start() needed. + """ + + def test_pgo_produces_corrected_odometry(self, smartnav_coordinator): + """PGO should publish corrected_odometry (Odometry) on LCM.""" + collector = _StreamCollector() + transport = LCMTransport("/corrected_odometry", Odometry) + try: + transport.subscribe(collector.callback) + + assert collector.wait(count=3, timeout=30), ( + f"PGO did not produce enough corrected_odometry messages " + f"(got {len(collector.messages)})" + ) + + msg = collector.messages[0] + assert isinstance(msg, Odometry), f"Expected Odometry, got {type(msg)}" + assert msg.frame_id == "map" + finally: + transport.stop() + + def test_pgo_produces_global_static_map(self, smartnav_coordinator): + """PGO should accumulate keyframes and publish a global static map.""" + collector = _StreamCollector() + # PGO's global_static_map is remapped to pgo_global_static_map + transport = LCMTransport("/pgo_global_static_map", PointCloud2) + try: + transport.subscribe(collector.callback) + + assert collector.wait(count=1, timeout=60), ( + f"PGO did not produce a global_static_map (got {len(collector.messages)})" + ) + + msg = collector.messages[0] + assert isinstance(msg, PointCloud2), f"Expected PointCloud2, got {type(msg)}" + pts, _ = msg.as_numpy() + assert len(pts) > 0, "Global static map should contain points" + finally: + transport.stop() + + def test_costmapper_produces_costmap(self, smartnav_coordinator): + """CostMapper should receive global_map from VoxelMapper and produce a costmap.""" + collector = _StreamCollector() + transport = LCMTransport("/global_costmap", OccupancyGrid) + try: + transport.subscribe(collector.callback) + + assert collector.wait(count=1, timeout=60), ( + f"CostMapper did not produce a global_costmap (got {len(collector.messages)})" + ) + + msg = collector.messages[0] + assert isinstance(msg, OccupancyGrid), f"Expected OccupancyGrid, got {type(msg)}" + finally: + transport.stop() + + def test_pgo_produces_corrected_pose_stamped(self, smartnav_coordinator): + """PGO should publish corrected pose as PoseStamped on the odom output.""" + collector = _StreamCollector() + transport = LCMTransport("/odom", PoseStamped) + try: + transport.subscribe(collector.callback) + + assert collector.wait(count=3, timeout=30), ( + f"PGO did not produce PoseStamped odom output (got {len(collector.messages)})" + ) + + msg = collector.messages[0] + assert isinstance(msg, PoseStamped), f"Expected PoseStamped, got {type(msg)}" + assert msg.frame_id == "map" + finally: + transport.stop() diff --git a/dimos/hardware/sensors/camera/module.py b/dimos/hardware/sensors/camera/module.py index b8165658d9..9c5623d141 100644 --- a/dimos/hardware/sensors/camera/module.py +++ b/dimos/hardware/sensors/camera/module.py @@ -32,7 +32,7 @@ from dimos.msgs.sensor_msgs.CameraInfo import CameraInfo from dimos.msgs.sensor_msgs.Image import Image, sharpness_barrier from dimos.spec import perception -from dimos.visualization.rerun.bridge import RerunBridgeModule +from dimos.visualization.vis_module import vis_module def default_transform() -> Transform: @@ -120,5 +120,5 @@ def stop(self) -> None: demo_camera = autoconnect( CameraModule.blueprint(), - RerunBridgeModule.blueprint(), + vis_module("rerun"), ) diff --git a/dimos/hardware/sensors/lidar/fastlio2/fastlio_blueprints.py b/dimos/hardware/sensors/lidar/fastlio2/fastlio_blueprints.py index f3de842b46..b39dd7bcec 100644 --- a/dimos/hardware/sensors/lidar/fastlio2/fastlio_blueprints.py +++ b/dimos/hardware/sensors/lidar/fastlio2/fastlio_blueprints.py @@ -15,36 +15,45 @@ from dimos.core.blueprints import autoconnect from dimos.hardware.sensors.lidar.fastlio2.module import FastLio2 from dimos.mapping.voxels import VoxelGridMapper -from dimos.visualization.rerun.bridge import RerunBridgeModule +from dimos.visualization.vis_module import vis_module voxel_size = 0.05 mid360_fastlio = autoconnect( FastLio2.blueprint(voxel_size=voxel_size, map_voxel_size=voxel_size, map_freq=-1), - RerunBridgeModule.blueprint( - visual_override={ - "world/lidar": lambda grid: grid.to_rerun(voxel_size=voxel_size, mode="boxes"), - } + vis_module( + "rerun", + rerun_config={ + "visual_override": { + "world/lidar": lambda grid: grid.to_rerun(voxel_size=voxel_size, mode="boxes"), + }, + }, ), ).global_config(n_workers=2, robot_model="mid360_fastlio2") mid360_fastlio_voxels = autoconnect( FastLio2.blueprint(), VoxelGridMapper.blueprint(publish_interval=1.0, voxel_size=voxel_size, carve_columns=False), - RerunBridgeModule.blueprint( - visual_override={ - "world/global_map": lambda grid: grid.to_rerun(voxel_size=voxel_size, mode="boxes"), - "world/lidar": None, - } + vis_module( + "rerun", + rerun_config={ + "visual_override": { + "world/global_map": lambda grid: grid.to_rerun(voxel_size=voxel_size, mode="boxes"), + "world/lidar": None, + }, + }, ), ).global_config(n_workers=3, robot_model="mid360_fastlio2_voxels") mid360_fastlio_voxels_native = autoconnect( FastLio2.blueprint(voxel_size=voxel_size, map_voxel_size=voxel_size, map_freq=3.0), - RerunBridgeModule.blueprint( - visual_override={ - "world/lidar": None, - "world/global_map": lambda grid: grid.to_rerun(voxel_size=voxel_size, mode="boxes"), - } + vis_module( + "rerun", + rerun_config={ + "visual_override": { + "world/lidar": None, + "world/global_map": lambda grid: grid.to_rerun(voxel_size=voxel_size, mode="boxes"), + }, + }, ), ).global_config(n_workers=2, robot_model="mid360_fastlio2") diff --git a/dimos/hardware/sensors/lidar/livox/livox_blueprints.py b/dimos/hardware/sensors/lidar/livox/livox_blueprints.py index c8835b3e89..958af084e2 100644 --- a/dimos/hardware/sensors/lidar/livox/livox_blueprints.py +++ b/dimos/hardware/sensors/lidar/livox/livox_blueprints.py @@ -14,9 +14,9 @@ from dimos.core.blueprints import autoconnect from dimos.hardware.sensors.lidar.livox.module import Mid360 -from dimos.visualization.rerun.bridge import RerunBridgeModule +from dimos.visualization.vis_module import vis_module mid360 = autoconnect( Mid360.blueprint(), - RerunBridgeModule.blueprint(), + vis_module("rerun"), ).global_config(n_workers=2, robot_model="mid360") diff --git a/dimos/manipulation/blueprints.py b/dimos/manipulation/blueprints.py index a9fb0fb44b..a2fd3389f0 100644 --- a/dimos/manipulation/blueprints.py +++ b/dimos/manipulation/blueprints.py @@ -46,7 +46,7 @@ from dimos.msgs.geometry_msgs.Vector3 import Vector3 from dimos.msgs.sensor_msgs.JointState import JointState from dimos.perception.object_scene_registration import ObjectSceneRegistrationModule -from dimos.robot.foxglove_bridge import FoxgloveBridge # TODO: migrate to rerun +from dimos.visualization.vis_module import vis_module from dimos.utils.data import get_data @@ -409,7 +409,7 @@ def _make_piper_config( base_transform=_XARM_PERCEPTION_CAMERA_TRANSFORM, ), ObjectSceneRegistrationModule.blueprint(target_frame="world"), - FoxgloveBridge.blueprint(), # TODO: migrate to rerun + vis_module("foxglove"), ) .transports( { diff --git a/dimos/manipulation/grasping/demo_grasping.py b/dimos/manipulation/grasping/demo_grasping.py index 782283029b..f1ce67709e 100644 --- a/dimos/manipulation/grasping/demo_grasping.py +++ b/dimos/manipulation/grasping/demo_grasping.py @@ -14,15 +14,14 @@ # limitations under the License. from pathlib import Path -from dimos.agents.mcp.mcp_client import McpClient -from dimos.agents.mcp.mcp_server import McpServer +from dimos.agents.agent import Agent from dimos.core.blueprints import autoconnect from dimos.hardware.sensors.camera.realsense.camera import RealSenseCamera from dimos.manipulation.grasping.graspgen_module import graspgen from dimos.manipulation.grasping.grasping import GraspingModule from dimos.perception.detection.detectors.yoloe import YoloePromptMode from dimos.perception.object_scene_registration import ObjectSceneRegistrationModule -from dimos.robot.foxglove_bridge import FoxgloveBridge +from dimos.visualization.vis_module import vis_module camera_module = RealSenseCamera.blueprint(enable_pointcloud=False) @@ -44,7 +43,6 @@ ("/tmp", "/tmp", "rw") ], # Grasp visualization debug standalone: python -m dimos.manipulation.grasping.visualize_grasps ), - FoxgloveBridge.blueprint(), - McpServer.blueprint(), - McpClient.blueprint(), + vis_module("foxglove"), + Agent.blueprint(), ).global_config(viewer="foxglove") diff --git a/dimos/manipulation/planning/utils/mesh_utils.py b/dimos/manipulation/planning/utils/mesh_utils.py index 92fcfc6eca..4dfa2231d0 100644 --- a/dimos/manipulation/planning/utils/mesh_utils.py +++ b/dimos/manipulation/planning/utils/mesh_utils.py @@ -38,6 +38,7 @@ import tempfile from typing import TYPE_CHECKING +from dimos.utils.change_detect import did_change from dimos.utils.logging_config import setup_logger if TYPE_CHECKING: @@ -76,14 +77,15 @@ def prepare_urdf_for_drake( package_paths = package_paths or {} xacro_args = xacro_args or {} - # Generate cache key + # Generate cache key from configuration (not file content — did_change handles that) cache_key = _generate_cache_key(urdf_path, package_paths, xacro_args, convert_meshes) cache_path = _CACHE_DIR / cache_key / urdf_path.stem cache_path.mkdir(parents=True, exist_ok=True) cached_urdf = cache_path / f"{urdf_path.stem}.urdf" - # Check cache - if cached_urdf.exists(): + # Check cache: reuse only if the output exists AND the source file hasn't changed + source_changed = did_change(f"urdf_{cache_key}", [str(urdf_path)]) + if cached_urdf.exists() and not source_changed: logger.debug(f"Using cached URDF: {cached_urdf}") return str(cached_urdf) @@ -118,16 +120,15 @@ def _generate_cache_key( ) -> str: """Generate a cache key for the URDF configuration. - Includes a version number to invalidate cache when processing logic changes. + Encodes the configuration inputs (not file content — ``did_change`` handles + content-based invalidation separately). Includes a version number to + invalidate the cache when processing logic changes. """ - # Include file modification time - mtime = urdf_path.stat().st_mtime if urdf_path.exists() else 0 - # Version number to invalidate cache when processing logic changes # Increment this when adding new processing steps (e.g., stripping transmission blocks) - processing_version = "v2" + processing_version = "v3" - key_data = f"{processing_version}:{urdf_path}:{mtime}:{sorted(package_paths.items())}:{sorted(xacro_args.items())}:{convert_meshes}" + key_data = f"{processing_version}:{urdf_path}:{sorted(package_paths.items())}:{sorted(xacro_args.items())}:{convert_meshes}" return hashlib.md5(key_data.encode()).hexdigest()[:16] diff --git a/dimos/navigation/loop_closure/__init__.py b/dimos/navigation/loop_closure/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dimos/navigation/loop_closure/pgo.py b/dimos/navigation/loop_closure/pgo.py new file mode 100644 index 0000000000..bdc8535ef4 --- /dev/null +++ b/dimos/navigation/loop_closure/pgo.py @@ -0,0 +1,553 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""PGO Module: Python pose graph optimization with loop closure. + +Ported from FASTLIO2_ROS2/pgo. Detects keyframes, performs loop closure +via ICP + KD-tree search, and optimizes the pose graph with GTSAM iSAM2. +Publishes corrected odometry and accumulated global map. + +Falls back from native C++ to pure Python when the native binary cannot +be built (e.g. missing GTSAM in nixpkgs). +""" + +from __future__ import annotations + +from dataclasses import dataclass +import threading +import time + +import gtsam +import numpy as np +from scipy.spatial import KDTree +from scipy.spatial.transform import Rotation + +from dimos.core.module import Module, ModuleConfig +from dimos.core.stream import In, Out +from dimos.msgs.geometry_msgs.PoseStamped import PoseStamped +from dimos.msgs.nav_msgs.Odometry import Odometry +from dimos.msgs.sensor_msgs.PointCloud2 import PointCloud2 +from dimos.utils.logging_config import setup_logger + +logger = setup_logger() + + +class PGOConfig(ModuleConfig): + """Config for the PGO Python module.""" + + # Keyframe detection + key_pose_delta_trans: float = 0.5 + key_pose_delta_deg: float = 10.0 + + # Loop closure + loop_search_radius: float = 15.0 + loop_time_thresh: float = 60.0 + loop_score_thresh: float = 0.3 + loop_submap_half_range: int = 5 + submap_resolution: float = 0.1 + min_loop_detect_duration: float = 5.0 + + # Input mode + unregister_input: bool = True # Transform world-frame scans to body-frame using odom + + # Global map + global_static_map_publish_rate: float = 0.5 + global_static_map_voxel_size: float = 0.15 + + # ICP + max_icp_iterations: int = 50 + max_icp_correspondence_dist: float = 10.0 + + +# ─── Keyframe storage ──────────────────────────────────────────────────────── + + +@dataclass +class _KeyPose: + r_local: np.ndarray # 3x3 rotation in local/odom frame + t_local: np.ndarray # 3-vec translation in local/odom frame + r_global: np.ndarray # 3x3 corrected rotation + t_global: np.ndarray # 3-vec corrected translation + timestamp: float + body_cloud: np.ndarray # Nx3 points in body frame + + +# ─── Simple ICP (point-to-point, no PCL dependency) ───────────────────────── + + +def _icp( + source: np.ndarray, + target: np.ndarray, + max_iter: int = 50, + max_dist: float = 10.0, + tol: float = 1e-6, +) -> tuple[np.ndarray, float]: + """Simple point-to-point ICP. Returns (4x4 transform, fitness score).""" + if len(source) == 0 or len(target) == 0: + return np.eye(4), float("inf") + + tree = KDTree(target) + T = np.eye(4) + src = source.copy() + + for _ in range(max_iter): + dists, idxs = tree.query(src) + mask = dists < max_dist + if mask.sum() < 10: + return T, float("inf") + + p = src[mask] + q = target[idxs[mask]] + + cp = p.mean(axis=0) + cq = q.mean(axis=0) + H = (p - cp).T @ (q - cq) + + U, _, Vt = np.linalg.svd(H) + R = Vt.T @ U.T + if np.linalg.det(R) < 0: + Vt[-1, :] *= -1 + R = Vt.T @ U.T + t = cq - R @ cp + + dT = np.eye(4) + dT[:3, :3] = R + dT[:3, 3] = t + T = dT @ T + src = (R @ src.T).T + t + + if np.linalg.norm(t) < tol: + break + + # Fitness: mean squared distance of inliers + dists_final, _ = tree.query(src) + mask = dists_final < max_dist + fitness = float(np.mean(dists_final[mask] ** 2)) if mask.sum() > 0 else float("inf") + return T, fitness + + +def _voxel_downsample(pts: np.ndarray, voxel_size: float) -> np.ndarray: + """Voxel grid downsampling.""" + if len(pts) == 0 or voxel_size <= 0: + return pts + keys = np.floor(pts / voxel_size).astype(np.int32) + _, idx = np.unique(keys, axis=0, return_index=True) + return pts[idx] + + +# ─── SimplePGO core algorithm ──────────────────────────────────────────────── + + +class _SimplePGO: + """Python port of the C++ SimplePGO class.""" + + def __init__(self, config: PGOConfig) -> None: + self._cfg = config + self._key_poses: list[_KeyPose] = [] + self._history_pairs: list[tuple[int, int]] = [] + self._cache_pairs: list[dict] = [] + self._r_offset = np.eye(3) + self._t_offset = np.zeros(3) + + params = gtsam.ISAM2Params() + params.setRelinearizeThreshold(0.01) + params.relinearizeSkip = 1 + self._isam2 = gtsam.ISAM2(params) + self._graph = gtsam.NonlinearFactorGraph() + self._values = gtsam.Values() + + def is_key_pose(self, r: np.ndarray, t: np.ndarray) -> bool: + if not self._key_poses: + return True + last = self._key_poses[-1] + delta_trans = np.linalg.norm(t - last.t_local) + # Angular distance via quaternion dot product + + q_cur = Rotation.from_matrix(r).as_quat() # [x,y,z,w] + q_last = Rotation.from_matrix(last.r_local).as_quat() + dot = abs(np.dot(q_cur, q_last)) + delta_deg = np.degrees(2.0 * np.arccos(min(dot, 1.0))) + return ( + delta_trans > self._cfg.key_pose_delta_trans or delta_deg > self._cfg.key_pose_delta_deg + ) + + def add_key_pose( + self, r_local: np.ndarray, t_local: np.ndarray, timestamp: float, body_cloud: np.ndarray + ) -> bool: + if not self.is_key_pose(r_local, t_local): + return False + + idx = len(self._key_poses) + init_r = self._r_offset @ r_local + init_t = self._r_offset @ t_local + self._t_offset + + pose = gtsam.Pose3(gtsam.Rot3(init_r), gtsam.Point3(init_t)) + self._values.insert(idx, pose) + + if idx == 0: + noise = gtsam.noiseModel.Diagonal.Variances(np.full(6, 1e-12)) + self._graph.add(gtsam.PriorFactorPose3(idx, pose, noise)) + else: + last = self._key_poses[-1] + r_between = last.r_local.T @ r_local + t_between = last.r_local.T @ (t_local - last.t_local) + noise = gtsam.noiseModel.Diagonal.Variances( + np.array([1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-6]) + ) + self._graph.add( + gtsam.BetweenFactorPose3( + idx - 1, idx, gtsam.Pose3(gtsam.Rot3(r_between), gtsam.Point3(t_between)), noise + ) + ) + + kp = _KeyPose( + r_local=r_local.copy(), + t_local=t_local.copy(), + r_global=init_r.copy(), + t_global=init_t.copy(), + timestamp=timestamp, + body_cloud=_voxel_downsample(body_cloud, self._cfg.submap_resolution), + ) + self._key_poses.append(kp) + return True + + def _get_submap(self, idx: int, half_range: int) -> np.ndarray: + lo = max(0, idx - half_range) + hi = min(len(self._key_poses) - 1, idx + half_range) + parts = [] + for i in range(lo, hi + 1): + kp = self._key_poses[i] + world = (kp.r_global @ kp.body_cloud.T).T + kp.t_global + parts.append(world) + if not parts: + return np.empty((0, 3)) + cloud = np.vstack(parts) + return _voxel_downsample(cloud, self._cfg.submap_resolution) + + def search_for_loops(self) -> None: + if len(self._key_poses) < 10: + return + + # Rate limit + if self._history_pairs: + cur_time = self._key_poses[-1].timestamp + last_time = self._key_poses[self._history_pairs[-1][1]].timestamp + if cur_time - last_time < self._cfg.min_loop_detect_duration: + return + + cur_idx = len(self._key_poses) - 1 + cur_kp = self._key_poses[-1] + + # Build KD-tree of previous keyframe positions + positions = np.array([kp.t_global for kp in self._key_poses[:-1]]) + tree = KDTree(positions) + + idxs = tree.query_ball_point(cur_kp.t_global, self._cfg.loop_search_radius) + if not idxs: + return + + # Find candidate far enough in time + loop_idx = -1 + for i in idxs: + if abs(cur_kp.timestamp - self._key_poses[i].timestamp) > self._cfg.loop_time_thresh: + loop_idx = i + break + if loop_idx == -1: + return + + # ICP verification + target = self._get_submap(loop_idx, self._cfg.loop_submap_half_range) + source = self._get_submap(cur_idx, 0) + + transform, fitness = _icp( + source, + target, + max_iter=self._cfg.max_icp_iterations, + max_dist=self._cfg.max_icp_correspondence_dist, + ) + if fitness > self._cfg.loop_score_thresh: + return + + # Compute relative pose + R_icp = transform[:3, :3] + t_icp = transform[:3, 3] + r_refined = R_icp @ cur_kp.r_global + t_refined = R_icp @ cur_kp.t_global + t_icp + r_offset = self._key_poses[loop_idx].r_global.T @ r_refined + t_offset = self._key_poses[loop_idx].r_global.T @ ( + t_refined - self._key_poses[loop_idx].t_global + ) + + self._cache_pairs.append( + { + "source": cur_idx, + "target": loop_idx, + "r_offset": r_offset, + "t_offset": t_offset, + "score": fitness, + } + ) + self._history_pairs.append((loop_idx, cur_idx)) + logger.info(f"[PGO] Loop closure detected: {loop_idx} <-> {cur_idx} (score={fitness:.4f})") + + def smooth_and_update(self) -> None: + has_loop = bool(self._cache_pairs) + + for pair in self._cache_pairs: + noise = gtsam.noiseModel.Diagonal.Variances(np.full(6, pair["score"])) + self._graph.add( + gtsam.BetweenFactorPose3( + pair["target"], + pair["source"], + gtsam.Pose3(gtsam.Rot3(pair["r_offset"]), gtsam.Point3(pair["t_offset"])), + noise, + ) + ) + self._cache_pairs.clear() + + self._isam2.update(self._graph, self._values) + self._isam2.update() + if has_loop: + for _ in range(4): + self._isam2.update() + self._graph = gtsam.NonlinearFactorGraph() + self._values = gtsam.Values() + + estimates = self._isam2.calculateBestEstimate() + for i in range(len(self._key_poses)): + pose = estimates.atPose3(i) + self._key_poses[i].r_global = pose.rotation().matrix() + self._key_poses[i].t_global = pose.translation() + + last = self._key_poses[-1] + self._r_offset = last.r_global @ last.r_local.T + self._t_offset = last.t_global - self._r_offset @ last.t_local + + def get_corrected_pose( + self, r_local: np.ndarray, t_local: np.ndarray + ) -> tuple[np.ndarray, np.ndarray]: + return self._r_offset @ r_local, self._r_offset @ t_local + self._t_offset + + def build_global_static_map(self, voxel_size: float) -> np.ndarray: + if not self._key_poses: + return np.empty((0, 3), dtype=np.float32) + parts = [] + for kp in self._key_poses: + world = (kp.r_global @ kp.body_cloud.T).T + kp.t_global + parts.append(world) + cloud = np.vstack(parts).astype(np.float32) + return _voxel_downsample(cloud, voxel_size) + + @property + def num_key_poses(self) -> int: + return len(self._key_poses) + + +# ─── PGO Module ────────────────────────────────────────────────────────────── + + +class PGO(Module[PGOConfig]): + """Pose graph optimization with loop closure detection. + + Pure-Python implementation using GTSAM iSAM2 and scipy KDTree. + Detects keyframes from odometry, searches for loop closures, + optimizes with iSAM2, and publishes corrected poses + global map. + + Ports: + registered_scan (In[PointCloud2]): World-frame registered point cloud. + raw_odom (In[PoseStamped]): Raw pose from robot (PoseStamped). + corrected_odometry (Out[Odometry]): Loop-closure-corrected pose (Odometry). + odom (Out[PoseStamped]): Loop-closure-corrected pose (PoseStamped, for downstream). + global_static_map (Out[PointCloud2]): Accumulated keyframe map. + """ + + default_config = PGOConfig + + registered_scan: In[PointCloud2] + raw_odom: In[PoseStamped] + corrected_odometry: Out[Odometry] + odom: Out[PoseStamped] + global_static_map: Out[PointCloud2] + + def __init__(self, **kwargs) -> None: # type: ignore[no-untyped-def] + super().__init__(**kwargs) + self._lock = threading.Lock() + self._pgo_lock = threading.Lock() + self._running = False + self._thread: threading.Thread | None = None + self._pgo: _SimplePGO | None = None + # Latest odom + self._latest_r = np.eye(3) + self._latest_t = np.zeros(3) + self._latest_time = 0.0 + self._has_odom = False + self._last_global_static_map_time = 0.0 + + def __getstate__(self) -> dict: + state = super().__getstate__() + for k in ("_lock", "_pgo_lock", "_thread", "_pgo"): + state.pop(k, None) + return state + + def __setstate__(self, state: dict) -> None: + super().__setstate__(state) + self._lock = threading.Lock() + self._pgo_lock = threading.Lock() + self._thread = None + self._pgo = None + + def start(self) -> None: + super().start() + self._pgo = _SimplePGO(self.config) + self.raw_odom._transport.subscribe(self._on_raw_odom) + self.registered_scan._transport.subscribe(self._on_scan) + self._running = True + self._thread = threading.Thread(target=self._publish_loop, daemon=True) + self._thread.start() + logger.info("[PGO] Python PGO module started (gtsam iSAM2)") + + def stop(self) -> None: + self._running = False + if self._thread: + self._thread.join(timeout=3.0) + super().stop() + + def _on_raw_odom(self, msg: PoseStamped) -> None: + + q = [ + msg.orientation.x, + msg.orientation.y, + msg.orientation.z, + msg.orientation.w, + ] + r = Rotation.from_quat(q).as_matrix() + t = np.array([msg.x, msg.y, msg.z]) + with self._lock: + self._latest_r = r + self._latest_t = t + self._latest_time = msg.ts if msg.ts is not None else time.time() + self._has_odom = True + + def _on_scan(self, cloud: PointCloud2) -> None: + points, _ = cloud.as_numpy() + if len(points) == 0: + return + + with self._lock: + if not self._has_odom: + return + r_local = self._latest_r.copy() + t_local = self._latest_t.copy() + ts = self._latest_time + + pgo = self._pgo + if pgo is None: + return + + # Body-frame points + if self.config.unregister_input: + # registered_scan is world-frame, transform back to body-frame + body_pts = (r_local.T @ (points[:, :3].T - t_local[:, None])).T + else: + body_pts = points[:, :3] + + with self._pgo_lock: + added = pgo.add_key_pose(r_local, t_local, ts, body_pts) + if added: + pgo.search_for_loops() + pgo.smooth_and_update() + logger.info( + f"[PGO] Keyframe {pgo.num_key_poses} added " + f"({t_local[0]:.1f}, {t_local[1]:.1f}, {t_local[2]:.1f})" + ) + + # Publish corrected odometry + r_corr, t_corr = pgo.get_corrected_pose(r_local, t_local) + + self._publish_corrected_odom(r_corr, t_corr, ts) + + def _publish_corrected_odom(self, r: np.ndarray, t: np.ndarray, ts: float) -> None: + from dimos.msgs.geometry_msgs.Pose import Pose + + q = Rotation.from_matrix(r).as_quat() # [x,y,z,w] + + odom = Odometry( + ts=ts, + frame_id="map", + child_frame_id="sensor", + pose=Pose( + position=[float(t[0]), float(t[1]), float(t[2])], + orientation=[float(q[0]), float(q[1]), float(q[2]), float(q[3])], + ), + ) + self.corrected_odometry._transport.publish(odom) + + ps = PoseStamped( + ts=ts, + frame_id="map", + position=[float(t[0]), float(t[1]), float(t[2])], + orientation=[float(q[0]), float(q[1]), float(q[2]), float(q[3])], + ) + self.odom._transport.publish(ps) + + # Publish corrected TF so the robot box tracks corrected pose + from dimos.msgs.geometry_msgs.Quaternion import Quaternion + from dimos.msgs.geometry_msgs.Transform import Transform + from dimos.msgs.geometry_msgs.Vector3 import Vector3 + + camera_link = Transform( + translation=Vector3(0.3, 0.0, 0.0), + rotation=Quaternion(0.0, 0.0, 0.0, 1.0), + frame_id="base_link", + child_frame_id="camera_link", + ts=ts, + ) + camera_optical = Transform( + translation=Vector3(0.0, 0.0, 0.0), + rotation=Quaternion(-0.5, 0.5, -0.5, 0.5), + frame_id="camera_link", + child_frame_id="camera_optical", + ts=ts, + ) + self.tf.publish(Transform.from_pose("base_link", ps), camera_link, camera_optical) + + def _publish_loop(self) -> None: + """Periodically publish global map.""" + pgo = self._pgo + if pgo is None: + return + rate = self.config.global_static_map_publish_rate + interval = 1.0 / rate if rate > 0 else 2.0 + + while self._running: + t0 = time.monotonic() + now = time.time() + + if now - self._last_global_static_map_time > interval and pgo.num_key_poses > 0: + with self._pgo_lock: + cloud_np = pgo.build_global_static_map(self.config.global_static_map_voxel_size) + n_keyframes = pgo.num_key_poses + if len(cloud_np) > 0: + self.global_static_map._transport.publish( + PointCloud2.from_numpy(cloud_np, frame_id="map", timestamp=now) + ) + logger.info( + f"[PGO] Global map published: {len(cloud_np)} points, " + f"{n_keyframes} keyframes" + ) + self._last_global_static_map_time = now + + elapsed = time.monotonic() - t0 + sleep_time = max(0.1, interval - elapsed) + time.sleep(sleep_time) diff --git a/dimos/navigation/replanning_a_star/module.py b/dimos/navigation/replanning_a_star/module.py index 26c540a254..f53a9446bc 100644 --- a/dimos/navigation/replanning_a_star/module.py +++ b/dimos/navigation/replanning_a_star/module.py @@ -36,6 +36,7 @@ class ReplanningAStarPlanner(Module, NavigationInterface): goal_request: In[PoseStamped] clicked_point: In[PointStamped] target: In[PoseStamped] + tele_cmd_vel: In[Twist] goal_reached: Out[Bool] navigation_state: Out[String] # TODO: set it @@ -70,6 +71,8 @@ def start(self) -> None: ) ) + self._disposables.add(Disposable(self.tele_cmd_vel.subscribe(self._handle_tele_cmd_vel))) + self._disposables.add(self._planner.path.subscribe(self.path.publish)) self._disposables.add(self._planner.cmd_vel.subscribe(self.cmd_vel.publish)) @@ -108,6 +111,13 @@ def cancel_goal(self) -> bool: self._planner.cancel_goal() return True + def _handle_tele_cmd_vel(self, twist: Twist) -> None: + with self._planner._lock: + has_goal = self._planner._current_goal is not None + if has_goal: + self.cancel_goal() + self.cmd_vel.publish(twist) + @rpc def set_replanning_enabled(self, enabled: bool) -> None: self._planner.set_replanning_enabled(enabled) diff --git a/dimos/navigation/scan_corrector.py b/dimos/navigation/scan_corrector.py new file mode 100644 index 0000000000..651b1a2a4a --- /dev/null +++ b/dimos/navigation/scan_corrector.py @@ -0,0 +1,180 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Corrects raw lidar scans using PGO-corrected odom and overlays onto PGO map. + +Takes the robot's raw world-frame lidar, un-registers it using the raw odom, +re-registers it with PGO's corrected odom, then combines it with PGO's global +map using z-column clearing so that stale obstacles are removed. + +Rate-limited to PGO corrected odom — only produces output when a new corrected +pose arrives, regardless of how fast the lidar publishes. +""" + +import threading +import time + +import numpy as np +from reactivex.disposable import Disposable +from scipy.spatial.transform import Rotation + +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig +from dimos.core.stream import In, Out +from dimos.msgs.geometry_msgs.PoseStamped import PoseStamped +from dimos.msgs.nav_msgs.Odometry import Odometry +from dimos.msgs.sensor_msgs.PointCloud2 import PointCloud2 +from dimos.utils.logging_config import setup_logger + +logger = setup_logger() + + +class ScanCorrectorConfig(ModuleConfig): + column_resolution: float = 0.05 + """XY grid resolution (metres) for z-column clearing.""" + + +class ScanCorrector(Module[ScanCorrectorConfig]): + """Overlay PGO-corrected lidar scans onto PGO's global map. + + Inputs: + registered_scan: World-frame lidar from the robot (registered with raw odom). + raw_odom: The robot's raw pose used to register the scan. + corrected_odometry: PGO's loop-closure-corrected odometry (rate-limiter). + global_static_map: PGO's accumulated global static map (slow-updating). + + Output: + corrected_map: Combined pointcloud — PGO map with z-columns cleared and + replaced by the latest corrected lidar frame. + """ + + default_config = ScanCorrectorConfig + + registered_scan: In[PointCloud2] + raw_odom: In[PoseStamped] + corrected_odometry: In[Odometry] + global_static_map: In[PointCloud2] + + corrected_map: Out[PointCloud2] + corrected_lidar: Out[PointCloud2] + + def __init__(self, **kwargs): # type: ignore[no-untyped-def] + super().__init__(**kwargs) + self._lock = threading.Lock() + # Scan paired with the raw odom at the time it arrived + self._latest_scan: PointCloud2 | None = None + self._latest_scan_raw_r = np.eye(3) + self._latest_scan_raw_t = np.zeros(3) + # Most recent raw odom (updated continuously) + self._latest_raw_r = np.eye(3) + self._latest_raw_t = np.zeros(3) + self._latest_pgo_map_pts: np.ndarray | None = None + + @rpc + def start(self) -> None: + super().start() + self._disposables.add(Disposable(self.registered_scan.subscribe(self._on_scan))) + self._disposables.add(Disposable(self.raw_odom.subscribe(self._on_raw_odom))) + self._disposables.add( + Disposable(self.corrected_odometry.subscribe(self._on_corrected_odom)) + ) + self._disposables.add(Disposable(self.global_static_map.subscribe(self._on_pgo_map))) + logger.info("ScanCorrector started") + + @rpc + def stop(self) -> None: + super().stop() + + def _on_scan(self, cloud: PointCloud2) -> None: + with self._lock: + self._latest_scan = cloud + self._latest_scan_raw_r = self._latest_raw_r.copy() + self._latest_scan_raw_t = self._latest_raw_t.copy() + + def _on_raw_odom(self, msg: PoseStamped) -> None: + q = [msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w] + r = Rotation.from_quat(q).as_matrix() + t = np.array([msg.x, msg.y, msg.z]) + with self._lock: + self._latest_raw_r = r + self._latest_raw_t = t + + def _on_pgo_map(self, cloud: PointCloud2) -> None: + pts, _ = cloud.as_numpy() + if len(pts) == 0: + return + with self._lock: + self._latest_pgo_map_pts = pts[:, :3].copy() + + def _on_corrected_odom(self, msg: Odometry) -> None: + """Rate-limiter: triggered by PGO corrected odom.""" + q = [msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w] + corrected_r = Rotation.from_quat(q).as_matrix() + corrected_t = np.array([msg.x, msg.y, msg.z]) + + with self._lock: + scan = self._latest_scan + raw_r = self._latest_scan_raw_r.copy() + raw_t = self._latest_scan_raw_t.copy() + pgo_map_pts = self._latest_pgo_map_pts + + if scan is None: + return + + scan_pts, _ = scan.as_numpy() + if len(scan_pts) == 0: + return + world_pts = scan_pts[:, :3] + + # Un-register: world-frame (raw odom) -> body-frame + body_pts = (raw_r.T @ (world_pts.T - raw_t[:, None])).T + + # Re-register: body-frame -> world-frame (corrected odom) + corrected_pts = (corrected_r @ body_pts.T).T + corrected_t + + now = time.time() + + # Publish just the re-registered lidar for debugging + self.corrected_lidar.publish( + PointCloud2.from_numpy(corrected_pts.astype(np.float32), frame_id="map", timestamp=now) + ) + + if pgo_map_pts is None or len(pgo_map_pts) == 0: + combined = corrected_pts + else: + combined = self._column_carve_and_overlay(pgo_map_pts, corrected_pts) + + self.corrected_map.publish( + PointCloud2.from_numpy(combined.astype(np.float32), frame_id="map", timestamp=now) + ) + + def _column_carve_and_overlay( + self, base_pts: np.ndarray, overlay_pts: np.ndarray + ) -> np.ndarray: + """Clear z-columns in base_pts that are occupied by overlay_pts, then combine.""" + res = self.config.column_resolution + + # Discretize overlay columns + overlay_cols = np.floor(overlay_pts[:, :2] / res).astype(np.int64) + overlay_keys = overlay_cols[:, 0] + overlay_cols[:, 1] * 1_000_000 + + # Discretize base columns + base_cols = np.floor(base_pts[:, :2] / res).astype(np.int64) + base_keys = base_cols[:, 0] + base_cols[:, 1] * 1_000_000 + + # Keep base points whose column is NOT in overlay (vectorized) + mask = ~np.isin(base_keys, overlay_keys) + + surviving = base_pts[mask] + return np.vstack([surviving, overlay_pts]) if len(surviving) > 0 else overlay_pts diff --git a/dimos/perception/demo_object_scene_registration.py b/dimos/perception/demo_object_scene_registration.py index c6d8c96625..13fb26cbb5 100644 --- a/dimos/perception/demo_object_scene_registration.py +++ b/dimos/perception/demo_object_scene_registration.py @@ -13,14 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dimos.agents.mcp.mcp_client import McpClient -from dimos.agents.mcp.mcp_server import McpServer +from dimos.agents.agent import Agent from dimos.core.blueprints import autoconnect from dimos.hardware.sensors.camera.realsense.camera import RealSenseCamera from dimos.hardware.sensors.camera.zed.compat import ZEDCamera from dimos.perception.detection.detectors.yoloe import YoloePromptMode from dimos.perception.object_scene_registration import ObjectSceneRegistrationModule -from dimos.robot.foxglove_bridge import FoxgloveBridge +from dimos.visualization.vis_module import vis_module camera_choice = "zed" @@ -34,7 +33,6 @@ demo_object_scene_registration = autoconnect( camera_module, ObjectSceneRegistrationModule.blueprint(target_frame="world", prompt_mode=YoloePromptMode.LRPC), - FoxgloveBridge.blueprint(), - McpServer.blueprint(), - McpClient.blueprint(), + vis_module("foxglove"), + Agent.blueprint(), ).global_config(viewer="foxglove") diff --git a/dimos/robot/all_blueprints.py b/dimos/robot/all_blueprints.py index 5910093d61..44bfa8e280 100644 --- a/dimos/robot/all_blueprints.py +++ b/dimos/robot/all_blueprints.py @@ -157,6 +157,7 @@ "reid-module": "dimos.perception.detection.reid.module", "replanning-a-star-planner": "dimos.navigation.replanning_a_star.module", "rerun-bridge-module": "dimos.visualization.rerun.bridge", + "rerun-web-socket-server": "dimos.visualization.rerun.websocket_server", "ros-nav": "dimos.navigation.rosnav", "simple-phone-teleop": "dimos.teleop.phone.phone_extensions", "spatial-memory": "dimos.perception.spatial_perception", diff --git a/dimos/robot/drone/blueprints/basic/drone_basic.py b/dimos/robot/drone/blueprints/basic/drone_basic.py index fbe6621ae1..c60483cb0a 100644 --- a/dimos/robot/drone/blueprints/basic/drone_basic.py +++ b/dimos/robot/drone/blueprints/basic/drone_basic.py @@ -20,10 +20,9 @@ from dimos.core.blueprints import autoconnect from dimos.core.global_config import global_config -from dimos.protocol.pubsub.impl.lcmpubsub import LCM from dimos.robot.drone.camera_module import DroneCameraModule from dimos.robot.drone.connection_module import DroneConnectionModule -from dimos.web.websocket_vis.websocket_vis_module import WebsocketVisModule +from dimos.visualization.vis_module import vis_module def _static_drone_body(rr: Any) -> list[Any]: @@ -60,23 +59,12 @@ def _drone_rerun_blueprint() -> Any: _rerun_config = { "blueprint": _drone_rerun_blueprint, - "pubsubs": [LCM()], "static": { "world/tf/base_link": _static_drone_body, }, } -# Conditional visualization -if global_config.viewer == "foxglove": - from dimos.robot.foxglove_bridge import FoxgloveBridge - - _vis = FoxgloveBridge.blueprint() -elif global_config.viewer.startswith("rerun"): - from dimos.visualization.rerun.bridge import RerunBridgeModule, _resolve_viewer_mode - - _vis = RerunBridgeModule.blueprint(viewer_mode=_resolve_viewer_mode(), **_rerun_config) -else: - _vis = autoconnect() +_vis = vis_module(global_config.viewer, rerun_config=_rerun_config) # Determine connection string based on replay flag connection_string = "udp:0.0.0.0:14550" @@ -92,7 +80,6 @@ def _drone_rerun_blueprint() -> Any: outdoor=False, ), DroneCameraModule.blueprint(camera_intrinsics=[1000.0, 1000.0, 960.0, 540.0]), - WebsocketVisModule.blueprint(), ) __all__ = [ diff --git a/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_shm.py b/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_shm.py index 5b127fb697..9efe400895 100644 --- a/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_shm.py +++ b/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_shm.py @@ -17,10 +17,11 @@ from dimos.constants import DEFAULT_CAPACITY_COLOR_IMAGE from dimos.core.blueprints import autoconnect +from dimos.core.global_config import global_config from dimos.core.transport import pSHMTransport from dimos.msgs.sensor_msgs.Image import Image -from dimos.robot.foxglove_bridge import FoxgloveBridge from dimos.robot.unitree.g1.blueprints.perceptive.unitree_g1 import unitree_g1 +from dimos.visualization.vis_module import vis_module unitree_g1_shm = autoconnect( unitree_g1.transports( @@ -30,10 +31,9 @@ ), } ), - FoxgloveBridge.blueprint( - shm_channels=[ - "/color_image#sensor_msgs.Image", - ] + vis_module( + global_config.viewer, + foxglove_config={"shm_channels": ["/color_image#sensor_msgs.Image"]}, ), ) diff --git a/dimos/robot/unitree/g1/blueprints/primitive/uintree_g1_primitive_no_nav.py b/dimos/robot/unitree/g1/blueprints/primitive/uintree_g1_primitive_no_nav.py index c3da9521c5..220caff949 100644 --- a/dimos/robot/unitree/g1/blueprints/primitive/uintree_g1_primitive_no_nav.py +++ b/dimos/robot/unitree/g1/blueprints/primitive/uintree_g1_primitive_no_nav.py @@ -40,8 +40,7 @@ from dimos.navigation.frontier_exploration.wavefront_frontier_goal_selector import ( WavefrontFrontierExplorer, ) -from dimos.protocol.pubsub.impl.lcmpubsub import LCM -from dimos.web.websocket_vis.websocket_vis_module import WebsocketVisModule +from dimos.visualization.vis_module import vis_module def _convert_camera_info(camera_info: Any) -> Any: @@ -90,7 +89,6 @@ def _g1_rerun_blueprint() -> Any: rerun_config = { "blueprint": _g1_rerun_blueprint, - "pubsubs": [LCM()], "visual_override": { "world/camera_info": _convert_camera_info, "world/global_map": _convert_global_map, @@ -101,18 +99,7 @@ def _g1_rerun_blueprint() -> Any: }, } -if global_config.viewer == "foxglove": - from dimos.robot.foxglove_bridge import FoxgloveBridge - - _with_vis = autoconnect(FoxgloveBridge.blueprint()) -elif global_config.viewer.startswith("rerun"): - from dimos.visualization.rerun.bridge import RerunBridgeModule, _resolve_viewer_mode - - _with_vis = autoconnect( - RerunBridgeModule.blueprint(viewer_mode=_resolve_viewer_mode(), **rerun_config) - ) -else: - _with_vis = autoconnect() +_with_vis = vis_module(global_config.viewer, rerun_config=rerun_config) def _create_webcam() -> Webcam: @@ -147,8 +134,6 @@ def _create_webcam() -> Webcam: VoxelGridMapper.blueprint(voxel_size=0.1), CostMapper.blueprint(), WavefrontFrontierExplorer.blueprint(), - # Visualization - WebsocketVisModule.blueprint(), ) .global_config(n_workers=4, robot_model="unitree_g1") .transports( diff --git a/dimos/robot/unitree/go2/blueprints/basic/unitree_go2_basic.py b/dimos/robot/unitree/go2/blueprints/basic/unitree_go2_basic.py index a0d1e6a7ae..1e0f32d25c 100644 --- a/dimos/robot/unitree/go2/blueprints/basic/unitree_go2_basic.py +++ b/dimos/robot/unitree/go2/blueprints/basic/unitree_go2_basic.py @@ -22,10 +22,9 @@ from dimos.core.global_config import global_config from dimos.core.transport import pSHMTransport from dimos.msgs.sensor_msgs.Image import Image -from dimos.protocol.pubsub.impl.lcmpubsub import LCM from dimos.protocol.service.system_configurator.clock_sync import ClockSyncConfigurator from dimos.robot.unitree.go2.connection import GO2Connection -from dimos.web.websocket_vis.websocket_vis_module import WebsocketVisModule +from dimos.visualization.vis_module import vis_module # Mac has some issue with high bandwidth UDP, so we use pSHMTransport for color_image # actually we can use pSHMTransport for all platforms, and for all streams @@ -87,9 +86,6 @@ def _go2_rerun_blueprint() -> Any: rerun_config = { "blueprint": _go2_rerun_blueprint, - # any pubsub that supports subscribe_all and topic that supports str(topic) - # is acceptable here - "pubsubs": [LCM()], # Custom converters for specific rerun entity paths # Normally all these would be specified in their respectative modules # Until this is implemented we have central overrides here @@ -106,29 +102,19 @@ def _go2_rerun_blueprint() -> Any: }, } - -if global_config.viewer == "foxglove": - from dimos.robot.foxglove_bridge import FoxgloveBridge - - with_vis = autoconnect( - _transports_base, - FoxgloveBridge.blueprint(shm_channels=["/color_image#sensor_msgs.Image"]), - ) -elif global_config.viewer.startswith("rerun"): - from dimos.visualization.rerun.bridge import RerunBridgeModule, _resolve_viewer_mode - - with_vis = autoconnect( - _transports_base, - RerunBridgeModule.blueprint(viewer_mode=_resolve_viewer_mode(), **rerun_config), - ) -else: - with_vis = _transports_base +_with_vis = autoconnect( + _transports_base, + vis_module( + global_config.viewer, + rerun_config=rerun_config, + foxglove_config={"shm_channels": ["/color_image#sensor_msgs.Image"]}, + ), +) unitree_go2_basic = ( autoconnect( - with_vis, + _with_vis, GO2Connection.blueprint(), - WebsocketVisModule.blueprint(), ) .global_config(n_workers=4, robot_model="unitree_go2") .configurators(ClockSyncConfigurator()) diff --git a/dimos/robot/unitree/go2/blueprints/basic/unitree_go2_fleet.py b/dimos/robot/unitree/go2/blueprints/basic/unitree_go2_fleet.py index 1c55f3e93c..0468cad40d 100644 --- a/dimos/robot/unitree/go2/blueprints/basic/unitree_go2_fleet.py +++ b/dimos/robot/unitree/go2/blueprints/basic/unitree_go2_fleet.py @@ -22,15 +22,13 @@ from dimos.core.blueprints import autoconnect from dimos.protocol.service.system_configurator.clock_sync import ClockSyncConfigurator -from dimos.robot.unitree.go2.blueprints.basic.unitree_go2_basic import with_vis +from dimos.robot.unitree.go2.blueprints.basic.unitree_go2_basic import _with_vis from dimos.robot.unitree.go2.fleet_connection import Go2FleetConnection -from dimos.web.websocket_vis.websocket_vis_module import WebsocketVisModule unitree_go2_fleet = ( autoconnect( - with_vis, + _with_vis, Go2FleetConnection.blueprint(), - WebsocketVisModule.blueprint(), ) .global_config(n_workers=4, robot_model="unitree_go2") .configurators(ClockSyncConfigurator()) diff --git a/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_smartnav.py b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_smartnav.py new file mode 100644 index 0000000000..33fb43fef5 --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_smartnav.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Go2 SmartNav blueprint: PGO + ScanCorrector + VoxelMapper + CostMapper + Planner. + +Uses PGO for loop-closure-corrected odometry. ScanCorrector re-registers the +robot's raw lidar using PGO's corrected pose and overlays it onto PGO's global +static map via z-column clearing. VoxelMapper consumes the combined output. + +Data flow: + GO2Connection.lidar (remapped → registered_scan) → PGO + ScanCorrector + GO2Connection.odom (remapped → raw_odom) → PGO + ScanCorrector + PGO.odom (corrected PoseStamped) → ScanCorrector + PGO.global_static_map → ScanCorrector + ScanCorrector.corrected_map → VoxelGridMapper → CostMapper → ReplanningAStarPlanner + ReplanningAStarPlanner.cmd_vel → GO2Connection +""" + +from dimos.core.blueprints import autoconnect +from dimos.mapping.costmapper import CostMapper +from dimos.mapping.voxels import VoxelGridMapper +from dimos.navigation.frontier_exploration.wavefront_frontier_goal_selector import ( + WavefrontFrontierExplorer, +) +from dimos.navigation.loop_closure.pgo import PGO +from dimos.navigation.replanning_a_star.module import ReplanningAStarPlanner +from dimos.navigation.scan_corrector import ScanCorrector +from dimos.robot.unitree.go2.blueprints.basic.unitree_go2_basic import unitree_go2_basic +from dimos.robot.unitree.go2.connection import GO2Connection + +unitree_go2_smartnav = ( + autoconnect( + unitree_go2_basic, + GO2Connection.blueprint(publish_tf=False), + PGO.blueprint(), + ScanCorrector.blueprint(), + VoxelGridMapper.blueprint(voxel_size=0.1), + CostMapper.blueprint(), + ReplanningAStarPlanner.blueprint(), + WavefrontFrontierExplorer.blueprint(), + ) + .global_config(n_workers=9, robot_model="unitree_go2") + .remappings( + [ + (GO2Connection, "lidar", "registered_scan"), + (GO2Connection, "odom", "raw_odom"), + (VoxelGridMapper, "lidar", "corrected_map"), + ] + ) +) + +__all__ = ["unitree_go2_smartnav"] diff --git a/dimos/robot/unitree/go2/connection.py b/dimos/robot/unitree/go2/connection.py index 5123dc9a31..b700398292 100644 --- a/dimos/robot/unitree/go2/connection.py +++ b/dimos/robot/unitree/go2/connection.py @@ -57,6 +57,8 @@ class ConnectionConfig(ModuleConfig): ip: str = Field(default_factory=lambda m: m["g"].robot_ip) + publish_tf: bool = True + """Publish TF transforms from raw odom. Disable when an external module (e.g. PGO) owns TF.""" class Go2ConnectionProtocol(Protocol): @@ -292,8 +294,9 @@ def _odom_to_tf(cls, odom: PoseStamped) -> list[Transform]: ] def _publish_tf(self, msg: PoseStamped) -> None: - transforms = self._odom_to_tf(msg) - self.tf.publish(*transforms) + if self.config.publish_tf: + transforms = self._odom_to_tf(msg) + self.tf.publish(*transforms) if self.odom.transport: self.odom.publish(msg) diff --git a/dimos/teleop/quest/blueprints.py b/dimos/teleop/quest/blueprints.py index d6367310de..1b67de3b75 100644 --- a/dimos/teleop/quest/blueprints.py +++ b/dimos/teleop/quest/blueprints.py @@ -26,12 +26,12 @@ from dimos.msgs.geometry_msgs.PoseStamped import PoseStamped from dimos.teleop.quest.quest_extensions import ArmTeleopModule from dimos.teleop.quest.quest_types import Buttons -from dimos.visualization.rerun.bridge import RerunBridgeModule +from dimos.visualization.vis_module import vis_module # Arm teleop with press-and-hold engage (has rerun viz) teleop_quest_rerun = autoconnect( ArmTeleopModule.blueprint(), - RerunBridgeModule.blueprint(), + vis_module("rerun"), ).transports( { ("left_controller_output", PoseStamped): LCMTransport("/teleop/left_delta", PoseStamped), diff --git a/dimos/utils/change_detect.py b/dimos/utils/change_detect.py new file mode 100644 index 0000000000..7e37407222 --- /dev/null +++ b/dimos/utils/change_detect.py @@ -0,0 +1,246 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Change detection utility for file content hashing. + +Tracks whether a set of files (by path, directory, or glob pattern) have +changed since the last check. Useful for skipping expensive rebuilds when +source files haven't been modified. + +Path entries are type-dispatched: + +- ``str`` / ``Path`` / ``LfsPath`` — treated as **literal** file or directory + paths (no glob expansion, even if the path contains ``*``). +- ``Glob`` — expanded with :func:`glob.glob` to match filesystem patterns. +""" + +from __future__ import annotations + +from collections.abc import Sequence +import fcntl +import glob as glob_mod +import hashlib +import os +from pathlib import Path +from typing import Union + +import xxhash + +from dimos.utils.data import LfsPath +from dimos.utils.logging_config import setup_logger + +logger = setup_logger() + + +class Glob(str): + """A string that should be interpreted as a filesystem glob pattern. + + Wraps a plain ``str`` to signal that :func:`did_change` should expand it + with :func:`glob.glob` rather than treating it as a literal path. + + Example:: + + Glob("src/**/*.c") + """ + + +PathEntry = Union[str, Path, LfsPath, Glob] +"""A single entry in a change-detection path list.""" + + +def _get_cache_dir() -> Path: + """Return the directory used to store change-detection cache files. + + Uses ``/dimos_cache/change_detect/`` when running inside a + venv, otherwise falls back to ``~/.cache/dimos/change_detect/``. + """ + venv = os.environ.get("VIRTUAL_ENV") + if venv: + return Path(venv) / "dimos_cache" / "change_detect" + return Path.home() / ".cache" / "dimos" / "change_detect" + + +def _safe_filename(cache_name: str) -> str: + """Convert an arbitrary cache name into a safe filename. + + If the cache name is already a simple identifier it is returned as-is. + Otherwise a short SHA-256 prefix is appended so that names containing + path separators or other special characters produce unique, safe filenames. + """ + safe_chars = set("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-") + if all(c in safe_chars for c in cache_name) and len(cache_name) <= 200: + return cache_name + digest = hashlib.sha256(cache_name.encode()).hexdigest()[:16] + return digest + + +def _add_path(files: set[Path], p: Path) -> None: + """Add *p* (file or directory, walked recursively) to *files*.""" + if p.is_file(): + files.add(p.resolve()) + elif p.is_dir(): + for root, _dirs, filenames in os.walk(p): + for fname in filenames: + files.add(Path(root, fname).resolve()) + + +def _resolve_paths(paths: Sequence[PathEntry], cwd: str | Path | None = None) -> list[Path]: + """Resolve a mixed list of path entries into a sorted list of files. + + ``Glob`` entries are expanded via :func:`glob.glob`. All other types + (``str``, ``Path``, ``LfsPath``) are treated as literal paths — no + wildcard expansion is performed. + + When *cwd* is provided, relative paths are resolved against it. + When *cwd* is ``None``, relative paths raise :class:`ValueError`. + """ + files: set[Path] = set() + for entry in paths: + if isinstance(entry, Glob): + pattern = str(entry) + if not Path(pattern).is_absolute(): + if cwd is None: + raise ValueError( + f"Relative path {pattern!r} passed to change detection without a cwd. " + "Either provide an absolute path or pass cwd= so relatives can be resolved." + ) + pattern = str(Path(cwd) / pattern) + expanded = glob_mod.glob(pattern, recursive=True) + if not expanded: + logger.warning("Glob pattern matched no files", pattern=pattern) + continue + for match in expanded: + _add_path(files, Path(match)) + else: + # str, Path, LfsPath — literal path, no glob expansion + path_str = str(entry) + if not Path(path_str).is_absolute(): + if cwd is None: + raise ValueError( + f"Relative path {path_str!r} passed to change detection without a cwd. " + "Either provide an absolute path or pass cwd= so relatives can be resolved." + ) + path_str = str(Path(cwd) / path_str) + p = Path(path_str) + if not p.exists(): + logger.warning("Path does not exist", path=path_str) + continue + _add_path(files, p) + return sorted(files) + + +def _hash_files(files: list[Path]) -> str: + """Compute an aggregate xxhash digest over the sorted file list.""" + h = xxhash.xxh64() + for fpath in files: + try: + # Include the path so additions/deletions/renames are detected + h.update(str(fpath).encode()) + h.update(fpath.read_bytes()) + except (OSError, PermissionError): + logger.warning("Cannot read file for hashing", path=str(fpath)) + return h.hexdigest() + + +def did_change( + cache_name: str, + paths: Sequence[PathEntry], + cwd: str | Path | None = None, +) -> bool: + """Check if any files/dirs matching the given paths have changed since last check. + + Examples:: + + # Absolute paths — no cwd needed + did_change("my_build", ["/src/main.cpp"]) + + # Use Glob for wildcard patterns (str is always literal) + did_change("c_sources", [Glob("/src/**/*.c"), Glob("/include/**/*.h")]) + + # Relative paths — must pass cwd + did_change("my_build", ["src/main.cpp"], cwd="/home/user/project") + + # Mix literal paths and globs + did_change("config_check", ["config.yaml", Glob("templates/*.j2")], cwd="/project") + + # Track a whole directory (walked recursively) + did_change("assets", ["/data/models/"]) + + # Second call with no file changes → False + did_change("my_build", ["/src/main.cpp"]) # True (first call, no cache) + did_change("my_build", ["/src/main.cpp"]) # False (nothing changed) + + # After editing a file → True again + Path("/src/main.cpp").write_text("// changed") + did_change("my_build", ["/src/main.cpp"]) # True + + # Relative path without cwd → ValueError + did_change("bad", ["src/main.cpp"]) # raises ValueError + + Returns ``True`` on the first call (no previous cache), and on subsequent + calls returns ``True`` only if file contents differ from the last check. + The cache is always updated, so two consecutive calls with no changes + return ``True`` then ``False``. + """ + if not paths: + return False + + files = _resolve_paths(paths, cwd=cwd) + + # If none of the monitored paths resolve to actual files (e.g. source + # files don't exist on this branch or checkout), don't claim anything + # changed — deleting a working binary because we can't find the sources + # to compare against is destructive. + if not files: + logger.warning( + "No source files found for change detection, skipping rebuild check", + cache_name=cache_name, + ) + return False + + current_hash = _hash_files(files) + + cache_dir = _get_cache_dir() + cache_dir.mkdir(parents=True, exist_ok=True) + cache_file = cache_dir / f"{_safe_filename(cache_name)}.hash" + lock_file = cache_dir / f"{_safe_filename(cache_name)}.lock" + + changed = True + with open(lock_file, "w") as lf: + fcntl.flock(lf, fcntl.LOCK_EX) + try: + if cache_file.exists(): + previous_hash = cache_file.read_text().strip() + changed = current_hash != previous_hash + # Always update the cache with the current hash + cache_file.write_text(current_hash) + finally: + fcntl.flock(lf, fcntl.LOCK_UN) + + return changed + + +def clear_cache(cache_name: str) -> bool: + """Remove the cached hash so the next ``did_change`` call returns ``True``. + + Example:: + + clear_cache("my_build") + did_change("my_build", ["/src/main.c"]) # always True after clear + """ + cache_file = _get_cache_dir() / f"{_safe_filename(cache_name)}.hash" + if cache_file.exists(): + cache_file.unlink() + return True + return False diff --git a/dimos/utils/test_change_detect.py b/dimos/utils/test_change_detect.py new file mode 100644 index 0000000000..42bd8a62e9 --- /dev/null +++ b/dimos/utils/test_change_detect.py @@ -0,0 +1,135 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for the change detection utility.""" + +from __future__ import annotations + +from pathlib import Path + +import pytest + +from dimos.utils.change_detect import Glob, clear_cache, did_change + + +@pytest.fixture(autouse=True) +def _use_tmp_cache(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: + """Redirect the change-detection cache to a temp dir for every test.""" + monkeypatch.setattr( + "dimos.utils.change_detect._get_cache_dir", + lambda: tmp_path / "cache", + ) + + +@pytest.fixture() +def src_dir(tmp_path: Path) -> Path: + """A temp directory with two source files for testing.""" + d = tmp_path / "src" + d.mkdir() + (d / "a.c").write_text("int main() { return 0; }") + (d / "b.c").write_text("void helper() {}") + return d + + +def test_first_call_returns_true(src_dir: Path) -> None: + assert did_change("test_cache", [str(src_dir)]) is True + + +def test_second_call_no_change_returns_false(src_dir: Path) -> None: + did_change("test_cache", [str(src_dir)]) + assert did_change("test_cache", [str(src_dir)]) is False + + +def test_file_modified_returns_true(src_dir: Path) -> None: + did_change("test_cache", [str(src_dir)]) + (src_dir / "a.c").write_text("int main() { return 1; }") + assert did_change("test_cache", [str(src_dir)]) is True + + +def test_file_added_to_dir_returns_true(src_dir: Path) -> None: + did_change("test_cache", [str(src_dir)]) + (src_dir / "c.c").write_text("void new_func() {}") + assert did_change("test_cache", [str(src_dir)]) is True + + +def test_file_deleted_returns_true(src_dir: Path) -> None: + did_change("test_cache", [str(src_dir)]) + (src_dir / "b.c").unlink() + assert did_change("test_cache", [str(src_dir)]) is True + + +def test_glob_pattern(src_dir: Path) -> None: + pattern = Glob(str(src_dir / "*.c")) + assert did_change("glob_cache", [pattern]) is True + assert did_change("glob_cache", [pattern]) is False + (src_dir / "a.c").write_text("changed!") + assert did_change("glob_cache", [pattern]) is True + + +def test_str_with_glob_chars_is_literal(tmp_path: Path) -> None: + """A plain str containing '*' must NOT be glob-expanded.""" + weird_name = tmp_path / "file[1].txt" + weird_name.write_text("content") + # str path — treated literally, should find the file + assert did_change("literal_test", [str(weird_name)]) is True + assert did_change("literal_test", [str(weird_name)]) is False + + +def test_separate_cache_names_independent(src_dir: Path) -> None: + paths = [str(src_dir)] + did_change("cache_a", paths) + did_change("cache_b", paths) + # Both caches are now up-to-date + assert did_change("cache_a", paths) is False + assert did_change("cache_b", paths) is False + # Modify a file — both caches should report changed independently + (src_dir / "a.c").write_text("changed") + assert did_change("cache_a", paths) is True + # cache_b hasn't been checked since the change + assert did_change("cache_b", paths) is True + + +def test_clear_cache(src_dir: Path) -> None: + paths = [str(src_dir)] + did_change("clear_test", paths) + assert did_change("clear_test", paths) is False + assert clear_cache("clear_test") is True + assert did_change("clear_test", paths) is True + + +def test_clear_cache_nonexistent() -> None: + assert clear_cache("does_not_exist") is False + + +def test_empty_paths_returns_false() -> None: + assert did_change("empty_test", []) is False + + +def test_nonexistent_path_warns(caplog: pytest.LogCaptureFixture) -> None: + """A non-existent absolute path logs a warning and doesn't crash.""" + result = did_change("missing_test", ["/nonexistent/path/to/file.c"]) + # No resolvable files → returns False (skip rebuild) + assert result is False + + +def test_relative_path_without_cwd_raises() -> None: + """Relative paths without cwd= should raise ValueError.""" + with pytest.raises(ValueError, match="Relative path.*without a cwd"): + did_change("rel_test", ["some/relative/path.c"]) + + +def test_relative_path_with_cwd(src_dir: Path) -> None: + """Relative paths should resolve against the provided cwd.""" + assert did_change("cwd_test", ["src/a.c"], cwd=src_dir.parent) is True + assert did_change("cwd_test", ["src/a.c"], cwd=src_dir.parent) is False diff --git a/dimos/visualization/rerun/test_viewer_ws_e2e.py b/dimos/visualization/rerun/test_viewer_ws_e2e.py new file mode 100644 index 0000000000..5275adb660 --- /dev/null +++ b/dimos/visualization/rerun/test_viewer_ws_e2e.py @@ -0,0 +1,317 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""End-to-end test: dimos-viewer (headless) → WebSocket → RerunWebSocketServer. + +dimos-viewer is started in ``--connect`` mode so it initialises its WebSocket +client. The viewer needs a gRPC proxy to connect to; we give it a non-existent +one so the viewer starts up anyway but produces no visualisation. The important +part is that the WebSocket client inside the viewer tries to connect to +``ws://127.0.0.1:/ws``. + +Because the viewer is a native GUI application it cannot run headlessly in CI +without a display. This test therefore verifies the connection at the protocol +level by using the ``RerunWebSocketServer`` module directly as the server and +injecting synthetic JSON messages that mimic what the viewer would send once a +user clicks in the 3D viewport. +""" + +import asyncio +import json +import os +import subprocess +import threading +import time +from typing import Any + +from dimos.visualization.rerun.websocket_server import RerunWebSocketServer + +_E2E_PORT = 13032 + + +def _make_server(port: int = _E2E_PORT) -> RerunWebSocketServer: + return RerunWebSocketServer(port=port) + + +def _wait_for_server(port: int, timeout: float = 5.0) -> None: + import websockets.asyncio.client as ws_client + + async def _probe() -> None: + async with ws_client.connect(f"ws://127.0.0.1:{port}/ws"): + pass + + deadline = time.monotonic() + timeout + while time.monotonic() < deadline: + try: + asyncio.run(_probe()) + return + except Exception: + time.sleep(0.05) + raise TimeoutError(f"Server on port {port} did not become ready within {timeout}s") + + +def _send_messages(port: int, messages: list[dict[str, Any]], *, delay: float = 0.05) -> None: + import websockets.asyncio.client as ws_client + + async def _run() -> None: + async with ws_client.connect(f"ws://127.0.0.1:{port}/ws") as ws: + for msg in messages: + await ws.send(json.dumps(msg)) + await asyncio.sleep(delay) + + asyncio.run(_run()) + + +class TestViewerProtocolE2E: + """Verify the full Python-server side of the viewer ↔ DimOS protocol. + + These tests use the ``RerunWebSocketServer`` as the server and a dummy + WebSocket client (playing the role of dimos-viewer) to inject messages. + They confirm every message type is correctly routed and that only click + messages produce stream publishes. + """ + + def test_viewer_click_reaches_stream(self) -> None: + """A viewer click message received over WebSocket publishes PointStamped.""" + server = _make_server() + server.start() + _wait_for_server(_E2E_PORT) + + received: list[Any] = [] + done = threading.Event() + + def _on_pt(pt: Any) -> None: + received.append(pt) + done.set() + + server.clicked_point.subscribe(_on_pt) + + _send_messages( + _E2E_PORT, + [ + { + "type": "click", + "x": 10.0, + "y": 20.0, + "z": 0.5, + "entity_path": "/world/robot", + "timestamp_ms": 42000, + } + ], + ) + + done.wait(timeout=3.0) + server.stop() + + assert len(received) == 1 + pt = received[0] + assert abs(pt.x - 10.0) < 1e-9 + assert abs(pt.y - 20.0) < 1e-9 + assert abs(pt.z - 0.5) < 1e-9 + assert pt.frame_id == "/world/robot" + assert abs(pt.ts - 42.0) < 1e-6 + + def test_viewer_keyboard_twist_no_publish(self) -> None: + """Twist messages from keyboard control do not publish clicked_point.""" + server = _make_server() + server.start() + _wait_for_server(_E2E_PORT) + + received: list[Any] = [] + server.clicked_point.subscribe(received.append) + + _send_messages( + _E2E_PORT, + [ + { + "type": "twist", + "linear_x": 0.5, + "linear_y": 0.0, + "linear_z": 0.0, + "angular_x": 0.0, + "angular_y": 0.0, + "angular_z": 0.8, + } + ], + ) + + server.stop() + assert received == [] + + def test_viewer_stop_no_publish(self) -> None: + """Stop messages do not publish clicked_point.""" + server = _make_server() + server.start() + _wait_for_server(_E2E_PORT) + + received: list[Any] = [] + server.clicked_point.subscribe(received.append) + + _send_messages(_E2E_PORT, [{"type": "stop"}]) + + server.stop() + assert received == [] + + def test_full_viewer_session_sequence(self) -> None: + """Realistic session: connect, heartbeats, click, WASD, stop → one point.""" + server = _make_server() + server.start() + _wait_for_server(_E2E_PORT) + + received: list[Any] = [] + done = threading.Event() + + def _on_pt(pt: Any) -> None: + received.append(pt) + done.set() + + server.clicked_point.subscribe(_on_pt) + + _send_messages( + _E2E_PORT, + [ + # Initial heartbeats (viewer connects and starts 1 Hz heartbeat) + {"type": "heartbeat", "timestamp_ms": 1000}, + {"type": "heartbeat", "timestamp_ms": 2000}, + # User clicks a point in the 3D viewport + { + "type": "click", + "x": 3.14, + "y": 2.71, + "z": 1.41, + "entity_path": "/world", + "timestamp_ms": 3000, + }, + # User presses W (forward) + { + "type": "twist", + "linear_x": 0.5, + "linear_y": 0.0, + "linear_z": 0.0, + "angular_x": 0.0, + "angular_y": 0.0, + "angular_z": 0.0, + }, + # User releases W + {"type": "stop"}, + # Another heartbeat + {"type": "heartbeat", "timestamp_ms": 4000}, + ], + delay=0.2, + ) + + done.wait(timeout=3.0) + server.stop() + + assert len(received) == 1, f"Expected exactly 1 click, got {len(received)}" + pt = received[0] + assert abs(pt.x - 3.14) < 1e-9 + assert abs(pt.y - 2.71) < 1e-9 + assert abs(pt.z - 1.41) < 1e-9 + + def test_reconnect_after_disconnect(self) -> None: + """Server keeps accepting new connections after a client disconnects.""" + server = _make_server() + server.start() + _wait_for_server(_E2E_PORT) + + received: list[Any] = [] + all_done = threading.Event() + + def _on_pt(pt: Any) -> None: + received.append(pt) + if len(received) >= 2: + all_done.set() + + server.clicked_point.subscribe(_on_pt) + + # First connection — send one click and disconnect + _send_messages( + _E2E_PORT, + [{"type": "click", "x": 1.0, "y": 0.0, "z": 0.0, "entity_path": "", "timestamp_ms": 0}], + ) + + # Second connection (simulating viewer reconnect) — send another click + _send_messages( + _E2E_PORT, + [{"type": "click", "x": 2.0, "y": 0.0, "z": 0.0, "entity_path": "", "timestamp_ms": 0}], + ) + + all_done.wait(timeout=5.0) + server.stop() + + xs = sorted(pt.x for pt in received) + assert xs == [1.0, 2.0], f"Unexpected xs: {xs}" + + +class TestViewerBinaryConnectMode: + """Smoke test: dimos-viewer binary starts in --connect mode and its WebSocket + client attempts to connect to our Python server.""" + + def test_viewer_ws_client_connects(self) -> None: + """dimos-viewer --connect starts and its WS client connects to our server.""" + server = _make_server() + server.start() + _wait_for_server(_E2E_PORT) + + received: list[Any] = [] + + def _on_pt(pt: Any) -> None: + received.append(pt) + + server.clicked_point.subscribe(_on_pt) + + # Start dimos-viewer in --connect mode, pointing it at a non-existent gRPC + # proxy (it will fail to stream data, but that's fine) and at our WS server. + # Use DISPLAY="" to prevent it from opening a window (it will exit quickly + # without a display, but the WebSocket connection happens before the GUI loop). + proc = subprocess.Popen( + [ + "dimos-viewer", + "--connect", + f"--ws-url=ws://127.0.0.1:{_E2E_PORT}/ws", + ], + env={ + **os.environ, + "DISPLAY": "", + }, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + + # Give the viewer up to 5 s to connect its WebSocket client to our server. + # We detect the connection by waiting for the server to accept a client. + deadline = time.monotonic() + 5.0 + while time.monotonic() < deadline: + # Check if any connection was established by sending a message and + # verifying the viewer is still running. + if proc.poll() is not None: + # Viewer exited (expected without a display) — check if it connected first. + break + time.sleep(0.1) + + proc.terminate() + try: + proc.wait(timeout=3) + except subprocess.TimeoutExpired: + proc.kill() + + stderr = proc.stderr.read().decode(errors="replace") if proc.stderr else "" + server.stop() + + # The viewer should log that it is connecting to our WS URL. + # Even without a display, the log output appears before the GUI loop starts. + assert "ws://127.0.0.1" in stderr or proc.returncode is not None, ( + f"Viewer did not attempt WS connection. stderr:\n{stderr}" + ) diff --git a/dimos/visualization/rerun/test_websocket_server.py b/dimos/visualization/rerun/test_websocket_server.py new file mode 100644 index 0000000000..73c6759eec --- /dev/null +++ b/dimos/visualization/rerun/test_websocket_server.py @@ -0,0 +1,407 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for RerunWebSocketServer. + +Uses ``MockViewerPublisher`` to simulate dimos-viewer sending events, matching +the exact JSON protocol used by the Rust ``WsPublisher`` in the viewer. +""" + +import asyncio +import json +import threading +import time +from typing import Any + +from dimos.visualization.rerun.websocket_server import RerunWebSocketServer + +_TEST_PORT = 13031 + + +class MockViewerPublisher: + """Python mirror of the Rust WsPublisher in dimos-viewer. + + Connects to a running ``RerunWebSocketServer`` and exposes the same + ``send_click`` / ``send_twist`` / ``send_stop`` / ``send_heartbeat`` + API that the real viewer uses. Useful for unit tests that need to + exercise the server without a real viewer binary. + + Usage:: + + with MockViewerPublisher("ws://127.0.0.1:13031/ws") as pub: + pub.send_click(1.0, 2.0, 0.0, "/world", timestamp_ms=1000) + pub.send_twist(0.5, 0.0, 0.0, 0.0, 0.0, 0.8) + pub.send_stop() + """ + + def __init__(self, url: str) -> None: + self._url = url + self._ws: Any = None + self._loop: asyncio.AbstractEventLoop | None = None + + def __enter__(self) -> "MockViewerPublisher": + self._loop = asyncio.new_event_loop() + self._ws = self._loop.run_until_complete(self._connect()) + return self + + def __exit__(self, *_: Any) -> None: + if self._ws is not None and self._loop is not None: + self._loop.run_until_complete(self._ws.close()) + if self._loop is not None: + self._loop.close() + + async def _connect(self) -> Any: + import websockets.asyncio.client as ws_client + + return await ws_client.connect(self._url) + + def send_click( + self, + x: float, + y: float, + z: float, + entity_path: str = "", + timestamp_ms: int = 0, + ) -> None: + """Send a click event — matches viewer SelectionChange handler output.""" + self._send( + { + "type": "click", + "x": x, + "y": y, + "z": z, + "entity_path": entity_path, + "timestamp_ms": timestamp_ms, + } + ) + + def send_twist( + self, + linear_x: float, + linear_y: float, + linear_z: float, + angular_x: float, + angular_y: float, + angular_z: float, + ) -> None: + """Send a twist (WASD keyboard) event.""" + self._send( + { + "type": "twist", + "linear_x": linear_x, + "linear_y": linear_y, + "linear_z": linear_z, + "angular_x": angular_x, + "angular_y": angular_y, + "angular_z": angular_z, + } + ) + + def send_stop(self) -> None: + """Send a stop event (Space bar or key release).""" + self._send({"type": "stop"}) + + def send_heartbeat(self, timestamp_ms: int = 0) -> None: + """Send a heartbeat (1 Hz keepalive from viewer).""" + self._send({"type": "heartbeat", "timestamp_ms": timestamp_ms}) + + def flush(self, delay: float = 0.1) -> None: + """Wait briefly so the server processes queued messages.""" + time.sleep(delay) + + def _send(self, msg: dict[str, Any]) -> None: + assert self._loop is not None and self._ws is not None, "Not connected" + self._loop.run_until_complete(self._ws.send(json.dumps(msg))) + + +def _collect(received: list[Any], done: threading.Event) -> Any: + """Return a callback that appends to *received* and signals *done*.""" + + def _cb(msg: Any) -> None: + received.append(msg) + done.set() + + return _cb + + +def _make_module(port: int = _TEST_PORT) -> RerunWebSocketServer: + return RerunWebSocketServer(port=port) + + +def _wait_for_server(port: int, timeout: float = 3.0) -> None: + """Block until the WebSocket server accepts an upgrade handshake.""" + + async def _probe() -> None: + import websockets.asyncio.client as ws_client + + async with ws_client.connect(f"ws://127.0.0.1:{port}/ws"): + pass + + deadline = time.monotonic() + timeout + while time.monotonic() < deadline: + try: + asyncio.run(_probe()) + return + except Exception: + time.sleep(0.05) + raise TimeoutError(f"Server on port {port} did not become ready within {timeout}s") + + +class TestRerunWebSocketServerStartup: + def test_server_binds_port(self) -> None: + """After start(), the server must be reachable on the configured port.""" + mod = _make_module() + mod.start() + try: + _wait_for_server(_TEST_PORT) + finally: + mod.stop() + + def test_stop_is_idempotent(self) -> None: + """Calling stop() twice must not raise.""" + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + mod.stop() + mod.stop() + + +class TestClickMessages: + def test_click_publishes_point_stamped(self) -> None: + """A single click publishes one PointStamped with correct coords.""" + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + + received: list[Any] = [] + done = threading.Event() + mod.clicked_point.subscribe(_collect(received, done)) + + with MockViewerPublisher(f"ws://127.0.0.1:{_TEST_PORT}/ws") as pub: + pub.send_click(1.5, 2.5, 0.0, "/world", timestamp_ms=1000) + pub.flush() + + done.wait(timeout=2.0) + mod.stop() + + assert len(received) == 1 + pt = received[0] + assert abs(pt.x - 1.5) < 1e-9 + assert abs(pt.y - 2.5) < 1e-9 + assert abs(pt.z - 0.0) < 1e-9 + + def test_click_sets_frame_id_from_entity_path(self) -> None: + """entity_path is stored as frame_id on the published PointStamped.""" + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + + received: list[Any] = [] + done = threading.Event() + mod.clicked_point.subscribe(_collect(received, done)) + + with MockViewerPublisher(f"ws://127.0.0.1:{_TEST_PORT}/ws") as pub: + pub.send_click(0.0, 0.0, 0.0, "/robot/base", timestamp_ms=2000) + pub.flush() + + done.wait(timeout=2.0) + mod.stop() + assert received and received[0].frame_id == "/robot/base" + + def test_click_timestamp_converted_from_ms(self) -> None: + """timestamp_ms is converted to seconds on PointStamped.ts.""" + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + + received: list[Any] = [] + done = threading.Event() + mod.clicked_point.subscribe(_collect(received, done)) + + with MockViewerPublisher(f"ws://127.0.0.1:{_TEST_PORT}/ws") as pub: + pub.send_click(0.0, 0.0, 0.0, "", timestamp_ms=5000) + pub.flush() + + done.wait(timeout=2.0) + mod.stop() + assert received and abs(received[0].ts - 5.0) < 1e-6 + + def test_multiple_clicks_all_published(self) -> None: + """A burst of clicks all arrive on the stream.""" + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + + received: list[Any] = [] + all_arrived = threading.Event() + + def _cb(pt: Any) -> None: + received.append(pt) + if len(received) >= 3: + all_arrived.set() + + mod.clicked_point.subscribe(_cb) + + with MockViewerPublisher(f"ws://127.0.0.1:{_TEST_PORT}/ws") as pub: + pub.send_click(1.0, 0.0, 0.0) + pub.send_click(2.0, 0.0, 0.0) + pub.send_click(3.0, 0.0, 0.0) + pub.flush() + + all_arrived.wait(timeout=3.0) + mod.stop() + + assert sorted(pt.x for pt in received) == [1.0, 2.0, 3.0] + + +class TestNonClickMessages: + def test_heartbeat_does_not_publish(self) -> None: + """Heartbeat messages must not trigger a clicked_point publish.""" + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + + received: list[Any] = [] + mod.clicked_point.subscribe(received.append) + + with MockViewerPublisher(f"ws://127.0.0.1:{_TEST_PORT}/ws") as pub: + pub.send_heartbeat(9999) + pub.flush() + + mod.stop() + assert received == [] + + def test_twist_does_not_publish_clicked_point(self) -> None: + """Twist messages must not trigger a clicked_point publish.""" + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + + received: list[Any] = [] + mod.clicked_point.subscribe(received.append) + + with MockViewerPublisher(f"ws://127.0.0.1:{_TEST_PORT}/ws") as pub: + pub.send_twist(0.5, 0.0, 0.0, 0.0, 0.0, 0.8) + pub.flush() + + mod.stop() + assert received == [] + + def test_stop_does_not_publish_clicked_point(self) -> None: + """Stop messages must not trigger a clicked_point publish.""" + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + + received: list[Any] = [] + mod.clicked_point.subscribe(received.append) + + with MockViewerPublisher(f"ws://127.0.0.1:{_TEST_PORT}/ws") as pub: + pub.send_stop() + pub.flush() + + mod.stop() + assert received == [] + + def test_twist_publishes_on_tele_cmd_vel(self) -> None: + """Twist messages publish a Twist on the tele_cmd_vel stream.""" + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + + received: list[Any] = [] + done = threading.Event() + mod.tele_cmd_vel.subscribe(_collect(received, done)) + + with MockViewerPublisher(f"ws://127.0.0.1:{_TEST_PORT}/ws") as pub: + pub.send_twist(0.5, 0.0, 0.0, 0.0, 0.0, 0.8) + pub.flush() + + done.wait(timeout=2.0) + mod.stop() + + assert len(received) == 1 + tw = received[0] + assert abs(tw.linear.x - 0.5) < 1e-9 + assert abs(tw.angular.z - 0.8) < 1e-9 + + def test_stop_publishes_zero_twist_on_tele_cmd_vel(self) -> None: + """Stop messages publish a zero Twist on the tele_cmd_vel stream.""" + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + + received: list[Any] = [] + done = threading.Event() + mod.tele_cmd_vel.subscribe(_collect(received, done)) + + with MockViewerPublisher(f"ws://127.0.0.1:{_TEST_PORT}/ws") as pub: + pub.send_stop() + pub.flush() + + done.wait(timeout=2.0) + mod.stop() + + assert len(received) == 1 + tw = received[0] + assert tw.is_zero() + + def test_invalid_json_does_not_crash(self) -> None: + """Malformed JSON is silently dropped; server stays alive.""" + import websockets.asyncio.client as ws_client + + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + + async def _send_bad() -> None: + async with ws_client.connect(f"ws://127.0.0.1:{_TEST_PORT}/ws") as ws: + await ws.send("this is not json {{") + await asyncio.sleep(0.1) + await ws.send(json.dumps({"type": "heartbeat", "timestamp_ms": 0})) + await asyncio.sleep(0.1) + + asyncio.run(_send_bad()) + mod.stop() + + def test_mixed_message_sequence(self) -> None: + """Realistic sequence: heartbeat → click → twist → stop publishes one point.""" + mod = _make_module() + mod.start() + _wait_for_server(_TEST_PORT) + + # Subscribe before sending so we don't race against the click dispatch. + received: list[Any] = [] + done = threading.Event() + + def _cb(pt: Any) -> None: + received.append(pt) + done.set() + + mod.clicked_point.subscribe(_cb) + + with MockViewerPublisher(f"ws://127.0.0.1:{_TEST_PORT}/ws") as pub: + pub.send_heartbeat(1000) + pub.send_click(7.0, 8.0, 9.0, "/map", timestamp_ms=1100) + pub.send_twist(0.3, 0.0, 0.0, 0.0, 0.0, 0.2) + pub.send_stop() + pub.flush() + + done.wait(timeout=2.0) + mod.stop() + + assert len(received) == 1 + assert abs(received[0].x - 7.0) < 1e-9 + assert abs(received[0].y - 8.0) < 1e-9 + assert abs(received[0].z - 9.0) < 1e-9 diff --git a/dimos/visualization/rerun/websocket_server.py b/dimos/visualization/rerun/websocket_server.py new file mode 100644 index 0000000000..010c646e2c --- /dev/null +++ b/dimos/visualization/rerun/websocket_server.py @@ -0,0 +1,193 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""WebSocket server module that receives events from dimos-viewer. + +When dimos-viewer is started with ``--connect``, LCM multicast is unavailable +across machines. The viewer falls back to sending click, twist, and stop events +as JSON over a WebSocket connection. This module acts as the server-side +counterpart: it listens for those connections and translates incoming messages +into DimOS stream publishes. + +Message format (newline-delimited JSON, ``"type"`` discriminant): + + {"type":"heartbeat","timestamp_ms":1234567890} + {"type":"click","x":1.0,"y":2.0,"z":3.0,"entity_path":"/world","timestamp_ms":1234567890} + {"type":"twist","linear_x":0.5,"linear_y":0.0,"linear_z":0.0, + "angular_x":0.0,"angular_y":0.0,"angular_z":0.8} + {"type":"stop"} +""" + +import asyncio +import json +import threading +from typing import Any + +import websockets + +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig +from dimos.core.stream import Out +from dimos.msgs.geometry_msgs.PointStamped import PointStamped +from dimos.msgs.geometry_msgs.Twist import Twist +from dimos.msgs.geometry_msgs.Vector3 import Vector3 +from dimos.utils.logging_config import setup_logger + +logger = setup_logger() + + +class Config(ModuleConfig): + # Intentionally binds 0.0.0.0 by default so the viewer can connect from + # any machine on the network (the typical robot deployment scenario). + host: str = "0.0.0.0" + port: int = 3030 + + +class RerunWebSocketServer(Module[Config]): + """Receives dimos-viewer WebSocket events and publishes them as DimOS streams. + + The viewer connects to this module (not the other way around) when running + in ``--connect`` mode. Each click event is converted to a ``PointStamped`` + and published on the ``clicked_point`` stream so downstream modules (e.g. + ``ReplanningAStarPlanner``) can consume it without modification. + + Outputs: + clicked_point: 3-D world-space point from the most recent viewer click. + tele_cmd_vel: Twist velocity commands from keyboard teleop, including stop events. + """ + + default_config = Config + + clicked_point: Out[PointStamped] + tele_cmd_vel: Out[Twist] + + def __init__(self, **kwargs: Any) -> None: + super().__init__(**kwargs) + self._ws_loop: asyncio.AbstractEventLoop | None = None + self._server_thread: threading.Thread | None = None + self._stop_event: asyncio.Event | None = None + self._server_ready = threading.Event() + + @rpc + def start(self) -> None: + super().start() + self._server_thread = threading.Thread( + target=self._run_server, daemon=True, name="rerun-ws-server" + ) + self._server_thread.start() + logger.info( + f"RerunWebSocketServer starting on ws://{self.config.host}:{self.config.port}/ws" + ) + + @rpc + def stop(self) -> None: + # Wait briefly for the server thread to initialise _stop_event so we + # don't silently skip the shutdown signal (race with _serve()). + self._server_ready.wait(timeout=5.0) + if ( + self._ws_loop is not None + and not self._ws_loop.is_closed() + and self._stop_event is not None + ): + self._ws_loop.call_soon_threadsafe(self._stop_event.set) + if self._server_thread is not None: + self._server_thread.join(timeout=5.0) + super().stop() + + def _run_server(self) -> None: + """Entry point for the background server thread.""" + self._ws_loop = asyncio.new_event_loop() + asyncio.set_event_loop(self._ws_loop) + try: + self._ws_loop.run_until_complete(self._serve()) + finally: + self._ws_loop.close() + + async def _serve(self) -> None: + import websockets.asyncio.server as ws_server + + self._stop_event = asyncio.Event() + self._server_ready.set() + + async with ws_server.serve( + self._handle_client, + host=self.config.host, + port=self.config.port, + ): + logger.info( + f"RerunWebSocketServer listening on ws://{self.config.host}:{self.config.port}/ws" + ) + await self._stop_event.wait() + + async def _handle_client(self, websocket: Any) -> None: + addr = websocket.remote_address + logger.info(f"RerunWebSocketServer: viewer connected from {addr}") + try: + async for raw in websocket: + self._dispatch(raw) + except websockets.ConnectionClosed as exc: + logger.debug(f"RerunWebSocketServer: client {addr} disconnected ({exc})") + + def _dispatch(self, raw: str | bytes) -> None: + try: + msg = json.loads(raw) + except json.JSONDecodeError: + logger.warning(f"RerunWebSocketServer: ignoring non-JSON message: {raw!r}") + return + + if not isinstance(msg, dict): + logger.warning(f"RerunWebSocketServer: expected JSON object, got {type(msg).__name__}") + return + + msg_type = msg.get("type") + + if msg_type == "click": + pt = PointStamped( + x=float(msg.get("x") or 0), + y=float(msg.get("y") or 0), + z=float(msg.get("z") or 0), + ts=float(msg.get("timestamp_ms") or 0) / 1000.0, + frame_id=str(msg.get("entity_path") or ""), + ) + logger.debug(f"RerunWebSocketServer: click → {pt}") + self.clicked_point.publish(pt) + + elif msg_type == "twist": + twist = Twist( + linear=Vector3( + float(msg.get("linear_x", 0)), + float(msg.get("linear_y", 0)), + float(msg.get("linear_z", 0)), + ), + angular=Vector3( + float(msg.get("angular_x", 0)), + float(msg.get("angular_y", 0)), + float(msg.get("angular_z", 0)), + ), + ) + logger.debug(f"RerunWebSocketServer: twist → {twist}") + self.tele_cmd_vel.publish(twist) + + elif msg_type == "stop": + logger.debug("RerunWebSocketServer: stop") + self.tele_cmd_vel.publish(Twist.zero()) + + elif msg_type == "heartbeat": + logger.debug(f"RerunWebSocketServer: heartbeat ts={msg.get('timestamp_ms')}") + + else: + logger.warning(f"RerunWebSocketServer: unknown message type {msg_type!r}") + + +rerun_ws_server = RerunWebSocketServer.blueprint diff --git a/dimos/visualization/vis_module.py b/dimos/visualization/vis_module.py new file mode 100644 index 0000000000..688a6efb5b --- /dev/null +++ b/dimos/visualization/vis_module.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared visualization module factory for all robot blueprints.""" + +from typing import Any + +from dimos.core.blueprints import Blueprint, autoconnect +from dimos.core.global_config import ViewerBackend +from dimos.protocol.pubsub.impl.lcmpubsub import LCM + + +def vis_module( + viewer_backend: ViewerBackend, + rerun_config: dict[str, Any] | None = None, + foxglove_config: dict[str, Any] | None = None, +) -> Blueprint: + """Create a visualization blueprint based on the selected viewer backend. + + Bundles the appropriate viewer module (Rerun or Foxglove) together with + the ``WebsocketVisModule`` and ``RerunWebSocketServer`` so that the web + dashboard and remote viewer connections work out of the box. + + Example usage:: + + from dimos.core.global_config import global_config + viz = vis_module( + global_config.viewer, + rerun_config={ + "visual_override": { + "world/camera_info": lambda ci: ci.to_rerun(...), + }, + "static": { + "world/tf/base_link": lambda rr: [rr.Boxes3D(...)], + }, + }, + ) + """ + from dimos.web.websocket_vis.websocket_vis_module import WebsocketVisModule + + if foxglove_config is None: + foxglove_config = {} + if rerun_config is None: + rerun_config = {} + rerun_config = {**rerun_config} + rerun_config.setdefault("pubsubs", [LCM()]) + + match viewer_backend: + case "foxglove": + from dimos.robot.foxglove_bridge import FoxgloveBridge + + return autoconnect( + FoxgloveBridge.blueprint(**foxglove_config), + WebsocketVisModule.blueprint(), + ) + case "rerun" | "rerun-web": + from dimos.visualization.rerun.bridge import _BACKEND_TO_MODE, RerunBridgeModule + from dimos.visualization.rerun.websocket_server import RerunWebSocketServer + + viewer_mode = _BACKEND_TO_MODE.get(viewer_backend, "native") + return autoconnect( + RerunBridgeModule.blueprint(viewer_mode=viewer_mode, **rerun_config), + RerunWebSocketServer.blueprint(), + WebsocketVisModule.blueprint(), + ) + case "rerun-connect": + from dimos.visualization.rerun.bridge import RerunBridgeModule + from dimos.visualization.rerun.websocket_server import RerunWebSocketServer + + return autoconnect( + RerunBridgeModule.blueprint(viewer_mode="connect", **rerun_config), + RerunWebSocketServer.blueprint(), + ) + case _: + return autoconnect(WebsocketVisModule.blueprint()) diff --git a/pyproject.toml b/pyproject.toml index 7e2f38546e..740b8af816 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,6 +63,7 @@ dependencies = [ "annotation-protocol>=1.4.0", "lazy_loader", "plum-dispatch==2.5.7", + "xxhash>=3.0.0", # Logging "structlog>=25.5.0,<26", "colorlog==6.9.0", @@ -301,6 +302,12 @@ sim = [ # "xformers @ https://pypi.jetson-ai-lab.io/jp6/cu126/+f/.../xformers-0.0.33-cp39-abi3-linux_aarch64.whl ; platform_machine == 'aarch64' and sys_platform == 'linux'", # ] +navigation = [ + # PGO (pose graph optimization) — gtsam-develop has aarch64 wheels; stable gtsam 4.2 does not + "gtsam>=4.2; platform_machine != 'aarch64'", + "gtsam-develop; platform_machine == 'aarch64'", +] + drone = [ "pymavlink" ] @@ -335,6 +342,11 @@ base = [ "dimos[agents,web,perception,visualization,sim]", ] +[tool.uv] +# gtsam-develop incorrectly declares pytest as a runtime dependency (packaging bug). +# Override it to keep our pinned version. +override-dependencies = ["pytest==8.3.5"] + [tool.ruff] line-length = 100 exclude = [ diff --git a/uv.lock b/uv.lock index 529842294b..419eccc463 100644 --- a/uv.lock +++ b/uv.lock @@ -24,6 +24,9 @@ resolution-markers = [ "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", ] +[manifest] +overrides = [{ name = "pytest", specifier = "==8.3.5" }] + [[package]] name = "absl-py" version = "2.4.0" @@ -1714,6 +1717,7 @@ dependencies = [ { name = "toolz" }, { name = "typer" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "xxhash" }, ] [package.optional-dependencies] @@ -1920,6 +1924,10 @@ misc = [ { name = "xarm-python-sdk" }, { name = "yapf" }, ] +navigation = [ + { name = "gtsam", marker = "platform_machine != 'aarch64'" }, + { name = "gtsam-develop", marker = "platform_machine == 'aarch64'" }, +] perception = [ { name = "filterpy" }, { name = "hydra-core" }, @@ -2014,6 +2022,8 @@ requires-dist = [ { name = "filterpy", marker = "extra == 'perception'", specifier = ">=1.4.5" }, { name = "gdown", marker = "extra == 'misc'", specifier = "==5.2.0" }, { name = "googlemaps", marker = "extra == 'misc'", specifier = ">=4.10.0" }, + { name = "gtsam", marker = "platform_machine != 'aarch64' and extra == 'navigation'", specifier = ">=4.2" }, + { name = "gtsam-develop", marker = "platform_machine == 'aarch64' and extra == 'navigation'" }, { name = "hydra-core", marker = "extra == 'perception'", specifier = ">=1.3.0" }, { name = "ipykernel", marker = "extra == 'misc'" }, { name = "kaleido", marker = "extra == 'manipulation'", specifier = ">=0.2.1" }, @@ -2150,9 +2160,10 @@ requires-dist = [ { name = "xarm-python-sdk", marker = "extra == 'manipulation'", specifier = ">=1.17.0" }, { name = "xarm-python-sdk", marker = "extra == 'misc'", specifier = ">=1.17.0" }, { name = "xformers", marker = "platform_machine == 'x86_64' and extra == 'cuda'", specifier = ">=0.0.20" }, + { name = "xxhash", specifier = ">=3.0.0" }, { name = "yapf", marker = "extra == 'misc'", specifier = "==0.40.2" }, ] -provides-extras = ["misc", "visualization", "agents", "web", "perception", "unitree", "manipulation", "cpu", "cuda", "dev", "psql", "sim", "drone", "dds", "docker", "base"] +provides-extras = ["misc", "visualization", "agents", "web", "perception", "unitree", "manipulation", "cpu", "cuda", "dev", "psql", "sim", "navigation", "drone", "dds", "docker", "base"] [[package]] name = "dimos-lcm" @@ -3022,6 +3033,46 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/48/b2/b096ccce418882fbfda4f7496f9357aaa9a5af1896a9a7f60d9f2b275a06/grpcio-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:dce09d6116df20a96acfdbf85e4866258c3758180e8c49845d6ba8248b6d0bbb", size = 4929852, upload-time = "2026-02-06T09:56:45.885Z" }, ] +[[package]] +name = "gtsam" +version = "4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'aarch64') or (python_full_version < '3.11' and sys_platform != 'linux')" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64') or (python_full_version >= '3.11' and sys_platform != 'linux')" }, + { name = "pyparsing", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/3f/6325c300cc92ca2495570e41ab5dffc3147837d5fce77e243714c1d646dd/gtsam-4.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:e1c2958b5e8895ff5822114119e9b303fc12b2ded4c9ede0a7a6844f6eb7be1a", size = 21626497, upload-time = "2023-09-03T16:47:54.229Z" }, + { url = "https://files.pythonhosted.org/packages/11/d8/ab317fdedeca03362d316c1b5c32cabc06f7a8948363f1c96d4a7f3fca8d/gtsam-4.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:b89d231f1de264d475ce1c0fcac320fdccb1260e2df01f8fb8775cdaddf57fb0", size = 19858294, upload-time = "2023-09-03T16:48:10.742Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e1/8c87e6cc713f18be917fd110570e0646a132ab189781c551e4347d175170/gtsam-4.2-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:59d452e645f7ca598e89170785c9e8ac1b5a2b0a0d32bcda2c279b7b51bb379f", size = 21621183, upload-time = "2023-09-03T16:48:27.783Z" }, + { url = "https://files.pythonhosted.org/packages/89/80/95d842fa51fef2223f3920f82ffea241bcc6f6b6ba6d7aa96c6e46e19474/gtsam-4.2-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:979b7c886724ac403d5c323613fd9800c8ac7ab224e2909faa8b266e5de742b2", size = 21788738, upload-time = "2023-09-03T16:48:45.876Z" }, + { url = "https://files.pythonhosted.org/packages/0e/25/b380725edd25c7ab50814a544ffaa748af41fe639d5294818cf4e67b2584/gtsam-4.2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:2996c349f03182df739f284adc62f455337b5272c4227bb970cbac7622b1d8e8", size = 21626506, upload-time = "2023-09-03T16:49:03.477Z" }, + { url = "https://files.pythonhosted.org/packages/ae/41/a7e26b58289f0f634123615fffa3c7eefd9b559cc1709d7c6ae67da7c87d/gtsam-4.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:0dc13b8a76ef862359a15ea9feec7cfd7df9d2bf2545b5f96297592c63e76aad", size = 20149822, upload-time = "2023-09-03T16:49:20.709Z" }, + { url = "https://files.pythonhosted.org/packages/bd/ba/d7b3ddccb179ca28db84edb7dd223aba6733ae9f59015b6263391bc8976b/gtsam-4.2-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:473978b6d32ab45903433a745ca166f0441058a726c7f26e9f51ce5aa2a03721", size = 21620301, upload-time = "2023-09-03T16:49:37.761Z" }, + { url = "https://files.pythonhosted.org/packages/51/8e/a1cf54ea59c81b300d31f407270d8fe8b7e6fdcfd4c7353b0a0857602808/gtsam-4.2-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:3793d5524a0417924fddb9d9662afc8129357fd52f07c422776024ddab3780c1", size = 22352720, upload-time = "2023-10-02T15:24:54.997Z" }, +] + +[[package]] +name = "gtsam-develop" +version = "4.3a1.dev202603171234" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "pytest" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/0a/05aaa79ac5f1a4af2523e583e392f9fc72ee935a7402c9647d2669f0dcfc/gtsam_develop-4.3a1.dev202603171234-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:209b8113bcc6e681e89182c0c7cc5020dd59fc667598138fb6872325bda31f8f", size = 40754398, upload-time = "2026-03-17T13:26:00.414Z" }, + { url = "https://files.pythonhosted.org/packages/f4/0e/dd122ad9b096a8cb976f28ef0c5f3edb1198939b23447cbdc11d96eff1c4/gtsam_develop-4.3a1.dev202603171234-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2ddd80f4d485619ed624bfbafc66baaa4536161b5523dc70e8b9b3b65c37cd03", size = 29014800, upload-time = "2026-03-17T13:26:03.471Z" }, + { url = "https://files.pythonhosted.org/packages/25/9e/4a9efbf5ebca49c4db3d141300fb1da23d69e810f2ea1733292a59b18a69/gtsam_develop-4.3a1.dev202603171234-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:d92cfad013020d022c9672dcb5fec19f9a249bad85c9330f5336c4b1898c861a", size = 40954472, upload-time = "2026-03-17T13:26:09.219Z" }, + { url = "https://files.pythonhosted.org/packages/e9/70/1035c171a5f80a4a86f9c6a6a51c141190002c67853e0935e2cf6e4ec0ba/gtsam_develop-4.3a1.dev202603171234-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6b76d7721f1293af6742b1424735b0c3a92eef10ef667d4850b93c6d3982c5cf", size = 28998283, upload-time = "2026-03-17T13:26:11.963Z" }, + { url = "https://files.pythonhosted.org/packages/66/fd/5c5866b5f7ea26b3e51e8443b9f4407a8fc34eb0c3622c4c166b9f52c0b5/gtsam_develop-4.3a1.dev202603171234-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:e9982ec335a18ffe24888830b0e293bc4818c888523583327c8fac988a64158f", size = 40953486, upload-time = "2026-03-17T13:26:17.518Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/074401f049f5a05b7b6e212d0e1950f997c5052307bf1174eea8ffc81e0b/gtsam_develop-4.3a1.dev202603171234-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b8010f783f4a5de6c9a2f08cf31b5ec029d38af2b6d961839f89a996ffd2ce90", size = 29000294, upload-time = "2026-03-17T13:26:20.775Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ef/5029a47d89e41ff36dc2b6e815910eba2dea227209981fbef708afc452ba/gtsam_develop-4.3a1.dev202603171234-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:bc4f2571b7a0d9a34114ddac0fe10d434e6a7a84fbbab50737c4364fc7a250de", size = 40967006, upload-time = "2026-03-17T13:26:26.442Z" }, + { url = "https://files.pythonhosted.org/packages/89/e5/42a6009b5f37584913f2fc7b3a505f30464193fdd0d68f2224139659f541/gtsam_develop-4.3a1.dev202603171234-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6e678f1d84d73a09ce5715d0877ed97352ebbb6cf6d529ad82fda098afc8b84e", size = 29027556, upload-time = "2026-03-17T13:26:29.888Z" }, +] + [[package]] name = "h11" version = "0.16.0"