From 9a5a3a9fc96367435b6ec15a3ff3ca5df7f576f8 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Mon, 16 Mar 2026 16:56:05 +0800 Subject: [PATCH 01/20] docs: add remote repair design and plan --- docs/plans/2026-03-16-remote-repair-design.md | 388 +++++++++++ ...03-16-remote-repair-implementation-plan.md | 609 ++++++++++++++++++ 2 files changed, 997 insertions(+) create mode 100644 docs/plans/2026-03-16-remote-repair-design.md create mode 100644 docs/plans/2026-03-16-remote-repair-implementation-plan.md diff --git a/docs/plans/2026-03-16-remote-repair-design.md b/docs/plans/2026-03-16-remote-repair-design.md new file mode 100644 index 00000000..10ea7958 --- /dev/null +++ b/docs/plans/2026-03-16-remote-repair-design.md @@ -0,0 +1,388 @@ +# ClawPal Remote Repair Design + +日期:2026-03-16 + +## 1. 目标 + +为远程 OpenClaw 实例提供受控的远程修复能力,链路如下: + +1. ClawPal 检测到远程实例 A 异常 +2. 用户在 ClawPal 中点击“修复” +3. ClawPal 将诊断结果发送给开放修复 bot B +4. B 返回结构化 repair plan +5. ClawPal 在本地校验 plan 安全性 +6. ClawPal 通过 SSH 到 A 执行 plan +7. ClawPal 将执行结果回传给 B +8. ClawPal 再次对 A 运行 Doctor +9. 若仍异常,则进入下一轮,直到修复成功或达到停止条件 + +本设计要求: + +- B 只能产出受控 DSL,不允许直接执行 +- ClawPal 持有 SSH 凭据并保留最终执行否决权 +- Doctor 复检结果是唯一成功判定来源 +- 所有执行步骤均可审计、可回显、可中断 + +## 2. 非目标 + +本期明确不做: + +- 任意 shell 脚本/bundle 上传执行 +- 无限制的任意命令透传 +- 后台长期自愈 daemon +- 跨实例并发修复编排 +- 无用户确认的高风险自动执行 + +## 3. 方案对比 + +### 3.1 薄代理方案 + +B 只返回高层修复建议,ClawPal 本地映射为 SSH 操作。 + +优点: + +- 本地安全边界最清晰 +- B 无需理解主机细节 + +缺点: + +- ClawPal 需要维护越来越多的动作映射 +- 扩展新修复场景时双端耦合更重 + +### 3.2 结构化 DSL 方案 + +B 返回结构化 repair DSL,ClawPal 负责校验、执行、回传、复检。 + +优点: + +- 扩展性最佳 +- B 可以按执行结果逐轮调整策略 +- ClawPal 角色稳定为安全执行层 + +缺点: + +- 需要设计 DSL、状态机、策略校验和审计结构 + +### 3.3 远端脚本方案 + +B 返回脚本或 bundle,ClawPal 上传到 A 并执行。 + +优点: + +- 表达复杂修复逻辑简单 + +缺点: + +- 安全性和审计性最差 +- 与受控动作约束冲突 + +### 3.4 结论 + +采用“结构化 DSL + 本地执行器 + 多轮 Doctor 复检”方案。 + +## 4. 核心架构 + +### 4.1 角色边界 + +- A:被修复目标,即用户自己的远程 OpenClaw +- B:开放 repair bot,只负责出 plan 和根据结果调整下一轮策略 +- ClawPal:协调器、执行器、策略裁决者、审计落点 + +### 4.2 真相源 + +- “是否仍有问题”由 ClawPal 对 A 运行的 Doctor 结果判定 +- B 的 `stop` 或“修复成功”只能视为建议,不是最终成功信号 + +### 4.3 运行闭环 + +1. ClawPal 对 A 运行 Doctor,得到 `diagnosis` +2. ClawPal 组装 `diagnosis + host facts + prior rounds` 发给 B +3. B 返回一轮 `repair plan` +4. ClawPal 执行本地 `policy validation` +5. 校验通过后通过 SSH 在 A 上逐步执行 +6. ClawPal 汇总每一步 `step result` +7. ClawPal 将结果回传给 B +8. ClawPal 对 A 再次运行 Doctor +9. 若未恢复则下一轮;否则结束 + +## 5. Repair DSL + +### 5.1 最小动作集合 + +第一版仅支持以下动作: + +- `read_file` +- `write_file` +- `run_command` +- `restart_service` +- `collect_logs` +- `health_check` +- `stop` + +### 5.2 Plan 结构 + +```json +{ + "planId": "rp_20260316_xxx", + "round": 1, + "goal": "restore gateway health", + "summary": "Restart gateway and verify readiness", + "steps": [ + { + "id": "step_1", + "type": "run_command", + "command": ["systemctl", "restart", "openclaw"], + "cwd": "/home/ubuntu", + "timeoutSec": 20, + "allowlistTag": "service_control", + "onFailure": "continue" + }, + { + "id": "step_2", + "type": "health_check", + "check": "doctor_gateway_ready", + "onFailure": "stop_round" + } + ], + "stopPolicy": { + "maxRounds": 5, + "stopOnUnsafeAction": true, + "stopOnRepeatedFailure": 2 + } +} +``` + +### 5.3 设计原则 + +- 不用自然语言描述执行意图 +- 每一步必须是结构化字段 +- `run_command` 必须附带 `allowlistTag` +- 默认面向幂等或可安全重试动作 + +## 6. 状态机 + +修复会话状态: + +- `idle` +- `diagnosing` +- `planning` +- `validating_plan` +- `executing` +- `reporting` +- `rechecking` +- `completed` +- `blocked` +- `failed` + +说明: + +- `blocked` 表示命中安全策略或等待用户确认 +- `failed` 表示本轮或会话已不可继续 +- `completed` 仅在 Doctor 复检健康时成立 + +## 7. 安全边界 + +### 7.1 凭据与执行权 + +- SSH 凭据只保存在 ClawPal 本地 +- B 不直接接触 A,也不直接执行命令 +- ClawPal 对每一步拥有最终执行否决权 + +### 7.2 本地策略校验 + +在 `validating_plan` 阶段执行以下检查: + +- 动作类型必须在 DSL 白名单内 +- `run_command` 必须命中命令 allowlist +- `write_file` 只能写允许目录 +- 禁止写敏感路径,例如 SSH key、系统认证配置、shell profile +- 每一步必须声明超时或使用默认超时 +- 整轮存在最大步数、最大轮次、重复失败阈值 + +### 7.3 第一版建议 + +第一版进一步收紧: + +- `run_command` 不直接执行任意 argv +- `allowlistTag` 需要映射到 ClawPal 内置模板或受控前缀 +- 默认展示 plan 摘要后由用户确认执行 + +## 8. 数据模型 + +### 8.1 修复会话 + +```ts +interface RemoteRepairSession { + id: string; + instanceId: string; + startedAt: string; + status: "idle" | "running" | "completed" | "blocked" | "failed"; + currentRound: number; + lastDiagnosisId?: string; + lastPlanId?: string; +} +``` + +### 8.2 Repair Plan + +```ts +interface RemoteRepairPlan { + planId: string; + round: number; + goal: string; + summary: string; + steps: RemoteRepairStep[]; + stopPolicy: { + maxRounds: number; + stopOnUnsafeAction: boolean; + stopOnRepeatedFailure: number; + }; +} +``` + +### 8.3 Repair Step + +```ts +interface RemoteRepairStep { + id: string; + type: + | "read_file" + | "write_file" + | "run_command" + | "restart_service" + | "collect_logs" + | "health_check" + | "stop"; + allowlistTag?: string; + command?: string[]; + path?: string; + content?: string; + timeoutSec?: number; + onFailure: "continue" | "stop_round" | "stop_session"; +} +``` + +### 8.4 Step Result + +```ts +interface RemoteRepairStepResult { + stepId: string; + status: "passed" | "failed" | "blocked" | "skipped"; + startedAt: string; + finishedAt: string; + exitCode?: number; + stdoutPreview?: string; + stderrPreview?: string; + changedFiles?: string[]; + message: string; +} +``` + +## 9. 与现有代码的落点 + +### 9.1 前端 + +- `src/pages/Doctor.tsx` + - 增加“远程修复”入口、修复轮次状态、时间线 +- `src/lib/api.ts` + - 增加启动、轮询、取消远程修复 API +- `src/lib/types.ts` + - 新增 repair DSL / session / result 类型 +- 新增 UI 组件 + - `RemoteRepairTimeline` + - `RemoteRepairPlanDialog` + - `RemoteRepairSessionBanner` + +### 9.2 Tauri / Rust + +- `src-tauri/src/commands/doctor_assistant.rs` + - 抽出可复用的多轮修复 orchestrator 经验 +- `src-tauri/src/commands/doctor.rs` + - 继续作为 Doctor 真相源 +- `src-tauri/src/ssh.rs` + - 复用 SSH 执行能力并增加 DSL 执行适配 +- 新增目录 `src-tauri/src/remote_repair/` + - `types.rs` + - `planner_client.rs` + - `executor.rs` + - `policy.rs` + - `session.rs` + - `orchestrator.rs` + +## 10. UI 交互 + +第一版交互流程: + +1. Doctor 检测到远程实例异常 +2. 展示“请求修复计划” +3. 返回后展示计划摘要、动作数、风险标签 +4. 用户确认后执行本轮 +5. 执行中展示步骤级进度和输出摘要 +6. 每轮结束自动重新 Doctor +7. 成功则显示“已恢复” +8. 若 blocked/failed,则展示阻塞原因和最近一步输出 + +第一版不做完全静默自动修复。 + +## 11. 错误模型 + +需要清晰区分以下失败态: + +- `planning_failed` + - B 未返回合法 plan 或通信失败 +- `policy_blocked` + - 本地安全策略拒绝执行 +- `execution_failed` + - SSH 执行失败、超时、文件写入失败 +- `diagnosis_still_failing` + - 执行完成但 Doctor 仍异常 +- `session_exhausted` + - 达到最大轮次或重复失败上限 +- `cancelled` + - 用户取消 + +要求: + +- 所有失败都需带轮次、步骤、最后原因 +- 仅当 Doctor 复检通过时标记成功 + +## 12. 测试策略 + +### 12.1 Rust 单元测试 + +- DSL 解析 +- policy 校验 +- allowlist 拦截 +- stopPolicy 触发 +- step result 汇总 + +### 12.2 Rust 集成测试 + +- mock planner 返回多轮 plan +- mock SSH 结果 +- 验证状态机轮转和结束条件 + +### 12.3 前端测试 + +- Doctor 页远程修复入口显示条件 +- 计划预览与确认 +- 执行进度展示 +- blocked / failed / completed 三态 UI + +## 13. 推荐实施顺序 + +1. 先实现本地 orchestrator 和 DSL/policy,不接真实 B +2. 使用 mock planner 跑通多轮 loop +3. 接入真实 B 的 plan API +4. 接入 Doctor 页面 UI 和审计展示 +5. 补充多轮失败、取消、恢复边界测试 + +## 14. 验收标准 + +- 远程实例异常时可从 Doctor 页面进入修复流程 +- ClawPal 能向 B 请求并接收结构化 repair plan +- plan 在本地执行前经过策略校验 +- ClawPal 可通过 SSH 在 A 上执行 plan 并回传结果 +- 至少支持多轮修复直到 Doctor 健康或达到停止条件 +- blocked/failed 原因对用户可见 +- 所有步骤存在本地审计记录 diff --git a/docs/plans/2026-03-16-remote-repair-implementation-plan.md b/docs/plans/2026-03-16-remote-repair-implementation-plan.md new file mode 100644 index 00000000..5760900f --- /dev/null +++ b/docs/plans/2026-03-16-remote-repair-implementation-plan.md @@ -0,0 +1,609 @@ +# Remote Repair Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Build a controlled remote repair flow where ClawPal requests a structured repair plan from bot B, validates it locally, executes it over SSH on remote instance A, reports results back, and loops until Doctor reports healthy or stop conditions are reached. + +**Architecture:** Add a dedicated `remote_repair` Rust module that owns repair DSL types, planner integration, policy validation, SSH-backed execution, and session orchestration. Expose it through Tauri commands consumed by the Doctor page, while reusing the existing remote Doctor, SSH pool, and progress-event patterns. + +**Tech Stack:** Tauri 2, Rust, React 18, TypeScript, Vitest, Cargo test + +--- + +### Task 1: Define remote repair TypeScript contract + +**Files:** +- Modify: `src/lib/types.ts` +- Test: `src/lib/__tests__/doctor-page-features.test.ts` + +**Step 1: Write the failing test** + +Add an assertion in `src/lib/__tests__/doctor-page-features.test.ts` that a remote repair state object with `status`, `currentRound`, `plan`, and `stepResults` is accepted by the UI-facing helpers without TypeScript errors. + +**Step 2: Run test to verify it fails** + +Run: `npm test -- doctor-page-features` +Expected: FAIL with missing remote repair types. + +**Step 3: Write minimal implementation** + +Add exact exported types for: + +- `RemoteRepairSessionStatus` +- `RemoteRepairPlan` +- `RemoteRepairStep` +- `RemoteRepairStepResult` +- `RemoteRepairSession` +- `RemoteRepairStartResult` +- `RemoteRepairProgressEvent` + +Keep fields aligned with the approved design and current API naming style in `src/lib/types.ts`. + +**Step 4: Run test to verify it passes** + +Run: `npm test -- doctor-page-features` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src/lib/types.ts src/lib/__tests__/doctor-page-features.test.ts +git commit -m "feat: add remote repair frontend type contracts" +``` + +### Task 2: Add frontend API bindings for remote repair + +**Files:** +- Modify: `src/lib/api.ts` +- Modify: `src/lib/types.ts` +- Test: `src/lib/__tests__/use-api-extra.test.ts` + +**Step 1: Write the failing test** + +Add a test in `src/lib/__tests__/use-api-extra.test.ts` that asserts the API object exposes these functions: + +- `startRemoteRepairSession` +- `getRemoteRepairSession` +- `cancelRemoteRepairSession` + +For remote instances, the input must include `hostId`. + +**Step 2: Run test to verify it fails** + +Run: `npm test -- use-api-extra` +Expected: FAIL because the API methods do not exist. + +**Step 3: Write minimal implementation** + +In `src/lib/api.ts`, add wrappers for new Tauri commands: + +- `start_remote_repair_session` +- `get_remote_repair_session` +- `cancel_remote_repair_session` + +Return the exact TypeScript types from Task 1. + +**Step 4: Run test to verify it passes** + +Run: `npm test -- use-api-extra` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src/lib/api.ts src/lib/types.ts src/lib/__tests__/use-api-extra.test.ts +git commit -m "feat: add remote repair API bindings" +``` + +### Task 3: Create Rust remote repair type module + +**Files:** +- Create: `src-tauri/src/remote_repair/types.rs` +- Modify: `src-tauri/src/lib.rs` +- Test: `src-tauri/src/remote_repair/types.rs` + +**Step 1: Write the failing test** + +Add unit tests in `src-tauri/src/remote_repair/types.rs` verifying: + +- `RemoteRepairStepType` deserializes supported step kinds +- `RemoteRepairStopPolicy` provides safe defaults +- invalid step types fail deserialization + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_repair::types` +Expected: FAIL because the module does not exist. + +**Step 3: Write minimal implementation** + +Create `src-tauri/src/remote_repair/types.rs` with serde-serializable definitions for: + +- session status +- plan +- step +- stop policy +- step result +- session snapshot +- planner request/response payloads + +Add `pub mod remote_repair;` in `src-tauri/src/lib.rs`. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_repair::types` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src-tauri/src/lib.rs src-tauri/src/remote_repair/types.rs +git commit -m "feat: define remote repair rust types" +``` + +### Task 4: Implement local policy validation + +**Files:** +- Create: `src-tauri/src/remote_repair/policy.rs` +- Modify: `src-tauri/src/remote_repair/types.rs` +- Test: `src-tauri/src/remote_repair/policy.rs` + +**Step 1: Write the failing test** + +Add tests that verify: + +- `write_file` outside allowed paths is blocked +- `run_command` without `allowlist_tag` is blocked +- unknown `allowlist_tag` is blocked +- a safe service restart plan is accepted + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_repair::policy` +Expected: FAIL because validation is not implemented. + +**Step 3: Write minimal implementation** + +Implement: + +- allowed step kinds +- allowed path matcher for `~/.openclaw` and explicit runtime dirs +- allowlist tags mapped to controlled command prefixes/templates +- result model describing `blocked` reason per step + +Do not execute anything yet; only validate. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_repair::policy` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_repair/policy.rs src-tauri/src/remote_repair/types.rs +git commit -m "feat: add remote repair policy validation" +``` + +### Task 5: Implement SSH-backed step executor + +**Files:** +- Create: `src-tauri/src/remote_repair/executor.rs` +- Modify: `src-tauri/src/ssh.rs` +- Modify: `src-tauri/src/remote_repair/types.rs` +- Test: `src-tauri/src/remote_repair/executor.rs` + +**Step 1: Write the failing test** + +Add executor tests with a fake transport that verify: + +- a validated `run_command` step returns `passed` +- a timed out step returns `failed` +- a blocked step is never executed +- `stop` returns a skipped/no-op result with explanatory message + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_repair::executor` +Expected: FAIL because no executor exists. + +**Step 3: Write minimal implementation** + +Create an executor abstraction over SSH operations so tests can use a fake transport. + +Support: + +- `run_command` +- `restart_service` +- `read_file` +- `write_file` +- `collect_logs` +- `health_check` +- `stop` + +Reuse `SshConnectionPool` for the real implementation, but keep transport injection for tests. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_repair::executor` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_repair/executor.rs src-tauri/src/ssh.rs src-tauri/src/remote_repair/types.rs +git commit -m "feat: add remote repair ssh executor" +``` + +### Task 6: Implement session store and orchestration loop + +**Files:** +- Create: `src-tauri/src/remote_repair/session.rs` +- Create: `src-tauri/src/remote_repair/orchestrator.rs` +- Create: `src-tauri/src/remote_repair/mod.rs` +- Modify: `src-tauri/src/lib.rs` +- Test: `src-tauri/src/remote_repair/orchestrator.rs` + +**Step 1: Write the failing test** + +Add orchestration tests using a fake planner and fake executor to verify: + +- healthy-after-first-round completes the session +- repeated failures trigger `session_exhausted` +- blocked plan triggers `policy_blocked` +- planner error triggers `planning_failed` + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_repair::orchestrator` +Expected: FAIL because session/orchestrator modules do not exist. + +**Step 3: Write minimal implementation** + +Implement: + +- in-memory session registry keyed by session ID +- state transitions from `diagnosing` through `completed/blocked/failed` +- stop policy handling +- round history +- session snapshot read API + +Use injected planner and executor traits so the loop is fully testable without real SSH or bot B. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_repair::orchestrator` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_repair/mod.rs src-tauri/src/remote_repair/session.rs src-tauri/src/remote_repair/orchestrator.rs src-tauri/src/lib.rs +git commit -m "feat: add remote repair orchestration loop" +``` + +### Task 7: Integrate remote Doctor recheck into orchestrator + +**Files:** +- Modify: `src-tauri/src/commands/doctor.rs` +- Modify: `src-tauri/src/remote_repair/orchestrator.rs` +- Test: `src-tauri/src/remote_repair/orchestrator.rs` + +**Step 1: Write the failing test** + +Extend orchestration tests to verify the session only ends in `completed` when a post-round Doctor recheck returns healthy, and remains failing when planner says stop but Doctor still reports issues. + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_repair::orchestrator` +Expected: FAIL because Doctor recheck is not wired into session completion logic. + +**Step 3: Write minimal implementation** + +Use the existing remote Doctor path as the health truth source for remote instances. The orchestrator must: + +- run an initial diagnosis for the target host +- re-run diagnosis after each round +- only mark success on healthy diagnosis + +Keep diagnosis data in the session snapshot so the frontend can render it. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_repair::orchestrator` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src-tauri/src/commands/doctor.rs src-tauri/src/remote_repair/orchestrator.rs +git commit -m "feat: require doctor recheck for remote repair completion" +``` + +### Task 8: Add planner client with mock-first fallback + +**Files:** +- Create: `src-tauri/src/remote_repair/planner_client.rs` +- Modify: `src-tauri/src/remote_repair/orchestrator.rs` +- Test: `src-tauri/src/remote_repair/planner_client.rs` + +**Step 1: Write the failing test** + +Add planner client tests verifying: + +- valid planner response parses into `RemoteRepairPlan` +- invalid JSON response becomes `planning_failed` +- mock planner mode returns a deterministic safe plan for a gateway restart scenario + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_repair::planner_client` +Expected: FAIL because the planner client does not exist. + +**Step 3: Write minimal implementation** + +Implement a planner trait and client module with: + +- a mock planner for local development/tests +- a real planner adapter interface placeholder for bot B +- request/response normalization and schema checks + +Do not hardcode network transport details into the orchestrator. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_repair::planner_client` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_repair/planner_client.rs src-tauri/src/remote_repair/orchestrator.rs +git commit -m "feat: add remote repair planner client abstraction" +``` + +### Task 9: Expose Tauri commands for session lifecycle + +**Files:** +- Create: `src-tauri/src/commands/remote_repair.rs` +- Modify: `src-tauri/src/commands/mod.rs` +- Modify: `src-tauri/src/lib.rs` +- Test: `src-tauri/tests/commands_delegation.rs` + +**Step 1: Write the failing test** + +Add command delegation coverage in `src-tauri/tests/commands_delegation.rs` for: + +- `start_remote_repair_session` +- `get_remote_repair_session` +- `cancel_remote_repair_session` + +The test should assert the commands are registered and return serializable results. + +**Step 2: Run test to verify it fails** + +Run: `cargo test --test commands_delegation` +Expected: FAIL because the commands are not registered. + +**Step 3: Write minimal implementation** + +Add Tauri commands that: + +- start a session for a `hostId` +- return the current session snapshot +- cancel a running session + +Register them in `src-tauri/src/lib.rs` and re-export from `src-tauri/src/commands/mod.rs`. + +**Step 4: Run test to verify it passes** + +Run: `cargo test --test commands_delegation` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src-tauri/src/commands/remote_repair.rs src-tauri/src/commands/mod.rs src-tauri/src/lib.rs src-tauri/tests/commands_delegation.rs +git commit -m "feat: add remote repair tauri commands" +``` + +### Task 10: Add progress events for frontend updates + +**Files:** +- Modify: `src-tauri/src/remote_repair/orchestrator.rs` +- Modify: `src-tauri/src/commands/remote_repair.rs` +- Test: `src-tauri/src/remote_repair/orchestrator.rs` + +**Step 1: Write the failing test** + +Add a test proving that round start, step completion, blocked, and completed transitions emit progress snapshots serializable as frontend events. + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_repair::orchestrator` +Expected: FAIL because event payloads are not emitted. + +**Step 3: Write minimal implementation** + +Emit Tauri events similar to `doctor:assistant-progress`, with event payload containing: + +- `sessionId` +- `status` +- `round` +- `stepId` +- `stepStatus` +- `message` + +Keep event names stable and explicit, for example `remote-repair:progress`. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_repair::orchestrator` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_repair/orchestrator.rs src-tauri/src/commands/remote_repair.rs +git commit -m "feat: emit remote repair progress events" +``` + +### Task 11: Build Doctor page remote repair UI + +**Files:** +- Modify: `src/pages/Doctor.tsx` +- Create: `src/components/RemoteRepairTimeline.tsx` +- Create: `src/components/RemoteRepairPlanDialog.tsx` +- Create: `src/components/RemoteRepairSessionBanner.tsx` +- Test: `src/pages/__tests__/Doctor.test.tsx` +- Test: `src/components/__tests__/DoctorRecoveryOverview.test.tsx` + +**Step 1: Write the failing test** + +Add Doctor page tests verifying that when the active instance is remote and diagnosis needs repair: + +- the page shows a “请求修复计划” action +- plan summary can be reviewed before execution +- progress UI updates when a session snapshot changes +- blocked/failed/completed states render distinct messages + +**Step 2: Run test to verify it fails** + +Run: `npm test -- Doctor` +Expected: FAIL because the remote repair UI does not exist. + +**Step 3: Write minimal implementation** + +Implement: + +- remote repair CTA in `Doctor.tsx` +- session banner showing status/round +- dialog summarizing returned plan before execution +- step timeline with latest stdout/stderr preview + +Preserve existing Doctor flows for local repair and rescue-bot repair. + +**Step 4: Run test to verify it passes** + +Run: `npm test -- Doctor` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src/pages/Doctor.tsx src/components/RemoteRepairTimeline.tsx src/components/RemoteRepairPlanDialog.tsx src/components/RemoteRepairSessionBanner.tsx src/pages/__tests__/Doctor.test.tsx src/components/__tests__/DoctorRecoveryOverview.test.tsx +git commit -m "feat: add remote repair doctor ui" +``` + +### Task 12: Persist audit logs for remote repair sessions + +**Files:** +- Create: `src-tauri/src/remote_repair/audit.rs` +- Modify: `src-tauri/src/remote_repair/orchestrator.rs` +- Modify: `src-tauri/src/models.rs` +- Test: `src-tauri/src/remote_repair/audit.rs` + +**Step 1: Write the failing test** + +Add tests verifying that each session writes an audit record containing: + +- session metadata +- round summaries +- step results +- terminal status + +The audit file should land under the ClawPal data directory, not the OpenClaw config directory. + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_repair::audit` +Expected: FAIL because audit persistence is not implemented. + +**Step 3: Write minimal implementation** + +Write audit records under a deterministic path such as `.clawpal/remote-repair/.json`, using existing app data path resolution helpers. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_repair::audit` +Expected: PASS. + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_repair/audit.rs src-tauri/src/remote_repair/orchestrator.rs src-tauri/src/models.rs +git commit -m "feat: persist remote repair audit logs" +``` + +### Task 13: Document runtime configuration and operator guidance + +**Files:** +- Modify: `README.md` +- Modify: `docs/mvp-checklist.md` +- Modify: `docs/plans/2026-03-16-remote-repair-design.md` + +**Step 1: Write the failing test** + +No automated test. Perform a docs gap review and verify the repo does not yet document remote repair session flow, safety boundaries, or operator-visible failure modes. + +**Step 2: Verify missing documentation** + +Run: `rg -n "remote repair|远程修复|repair plan" README.md docs/mvp-checklist.md docs/plans/2026-03-16-remote-repair-design.md` +Expected: the runtime/operator guidance is incomplete. + +**Step 3: Write minimal implementation** + +Document: + +- how remote repair works end to end +- why plans are locally validated +- where audit logs are stored +- what is intentionally blocked in v1 +- acceptance checklist entries for remote repair + +**Step 4: Verify documentation is present** + +Run: `rg -n "remote repair|远程修复|repair plan|audit" README.md docs/mvp-checklist.md docs/plans/2026-03-16-remote-repair-design.md` +Expected: matching lines in all updated docs. + +**Step 5: Commit** + +```bash +git add README.md docs/mvp-checklist.md docs/plans/2026-03-16-remote-repair-design.md +git commit -m "docs: add remote repair operator guidance" +``` + +### Task 14: Final verification + +**Files:** +- Verify only + +**Step 1: Run targeted Rust tests** + +Run: `cargo test remote_repair` +Expected: PASS. + +**Step 2: Run targeted frontend tests** + +Run: `npm test -- Doctor` +Expected: PASS. + +**Step 3: Run broader command wiring tests** + +Run: `cargo test --test commands_delegation` +Expected: PASS. + +**Step 4: Run project build checks** + +Run: `npm run build` +Expected: PASS. + +Run: `cargo test` +Expected: PASS or known unrelated failures documented. + +**Step 5: Commit** + +```bash +git add . +git commit -m "feat: add structured remote repair flow" +``` From 10a13c9a40d3e3dc7385281fd586f37efd43b93c Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Wed, 18 Mar 2026 23:10:48 +0800 Subject: [PATCH 02/20] feat: remote doctor --- Cargo.lock | 91 +- Cargo.toml | 8 +- agents.md | 117 +- ...2026-03-17-remote-doctor-service-design.md | 162 + ...mote-doctor-service-implementation-plan.md | 341 ++ ...6-03-18-live-raw-config-repair-e2e-plan.md | 52 + .../2026-03-18-raw-config-recovery-plan.md | 80 + ...emote-doctor-agent-investigation-design.md | 118 + ...-remote-doctor-agent-investigation-plan.md | 121 + ...escue-activation-failure-diagnosis-plan.md | 79 + openclaw-gateway-client/Cargo.lock | 1422 ++++++ openclaw-gateway-client/Cargo.toml | 28 + openclaw-gateway-client/src/auth_store.rs | 95 + openclaw-gateway-client/src/client.rs | 333 ++ openclaw-gateway-client/src/error.rs | 17 + openclaw-gateway-client/src/identity.rs | 97 + openclaw-gateway-client/src/lib.rs | 7 + openclaw-gateway-client/src/node.rs | 76 + openclaw-gateway-client/src/protocol.rs | 141 + openclaw-gateway-client/src/tls.rs | 17 + openclaw-gateway-client/tests/auth_store.rs | 37 + .../tests/client_handshake.rs | 71 + openclaw-gateway-client/tests/client_rpc.rs | 123 + .../tests/connect_payload.rs | 157 + .../tests/device_identity.rs | 70 + openclaw-gateway-client/tests/node_client.rs | 165 + .../tests/protocol_roundtrip.rs | 80 + .../tests/tls_fingerprint.rs | 38 + src-tauri/Cargo.toml | 6 +- src-tauri/src/bridge_client.rs | 75 +- src-tauri/src/commands/preferences.rs | 91 + src-tauri/src/lib.rs | 9 +- src-tauri/src/node_client.rs | 116 +- src-tauri/src/remote_doctor.rs | 4340 +++++++++++++++++ src/components/DoctorRecoveryOverview.tsx | 32 +- src/components/SettingsAlphaFeaturesCard.tsx | 60 + .../__tests__/DoctorRecoveryOverview.test.tsx | 72 + .../SettingsAlphaFeaturesCard.test.tsx | 8 + .../__tests__/doctor-page-features.test.ts | 18 + src/lib/__tests__/use-api-extra.test.ts | 15 + src/lib/api.ts | 14 +- src/lib/types.ts | 46 + src/lib/use-api.ts | 12 + src/locales/en.json | 13 + src/locales/zh.json | 13 + src/pages/Doctor.tsx | 65 +- src/pages/Settings.tsx | 36 + src/pages/__tests__/Doctor.test.tsx | 3 +- 48 files changed, 9135 insertions(+), 52 deletions(-) create mode 100644 docs/plans/2026-03-17-remote-doctor-service-design.md create mode 100644 docs/plans/2026-03-17-remote-doctor-service-implementation-plan.md create mode 100644 docs/plans/2026-03-18-live-raw-config-repair-e2e-plan.md create mode 100644 docs/plans/2026-03-18-raw-config-recovery-plan.md create mode 100644 docs/plans/2026-03-18-remote-doctor-agent-investigation-design.md create mode 100644 docs/plans/2026-03-18-remote-doctor-agent-investigation-plan.md create mode 100644 docs/plans/2026-03-18-rescue-activation-failure-diagnosis-plan.md create mode 100644 openclaw-gateway-client/Cargo.lock create mode 100644 openclaw-gateway-client/Cargo.toml create mode 100644 openclaw-gateway-client/src/auth_store.rs create mode 100644 openclaw-gateway-client/src/client.rs create mode 100644 openclaw-gateway-client/src/error.rs create mode 100644 openclaw-gateway-client/src/identity.rs create mode 100644 openclaw-gateway-client/src/lib.rs create mode 100644 openclaw-gateway-client/src/node.rs create mode 100644 openclaw-gateway-client/src/protocol.rs create mode 100644 openclaw-gateway-client/src/tls.rs create mode 100644 openclaw-gateway-client/tests/auth_store.rs create mode 100644 openclaw-gateway-client/tests/client_handshake.rs create mode 100644 openclaw-gateway-client/tests/client_rpc.rs create mode 100644 openclaw-gateway-client/tests/connect_payload.rs create mode 100644 openclaw-gateway-client/tests/device_identity.rs create mode 100644 openclaw-gateway-client/tests/node_client.rs create mode 100644 openclaw-gateway-client/tests/protocol_roundtrip.rs create mode 100644 openclaw-gateway-client/tests/tls_fingerprint.rs create mode 100644 src-tauri/src/remote_doctor.rs diff --git a/Cargo.lock b/Cargo.lock index 3b1bff67..69dfa083 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -547,6 +547,7 @@ dependencies = [ "fix-path-env", "futures", "futures-util", + "getrandom 0.2.17", "hostname", "indexmap 2.13.0", "json5", @@ -555,6 +556,7 @@ dependencies = [ "reqwest 0.12.28", "serde", "serde_json", + "sha2", "shell-words", "shellexpand", "tauri", @@ -563,7 +565,7 @@ dependencies = [ "tauri-plugin-updater", "thiserror 1.0.69", "tokio", - "tokio-tungstenite", + "tokio-tungstenite 0.24.0", "uuid", ] @@ -939,6 +941,12 @@ dependencies = [ "cipher", ] +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + [[package]] name = "digest" version = "0.10.7" @@ -2888,6 +2896,28 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +[[package]] +name = "openclaw-gateway-client" +version = "0.1.0" +dependencies = [ + "base64 0.22.1", + "ed25519-dalek", + "futures", + "hex", + "pretty_assertions", + "rand_core 0.6.4", + "serde", + "serde_json", + "sha2", + "tempfile", + "thiserror 2.0.18", + "tokio", + "tokio-tungstenite 0.28.0", + "tracing", + "url", + "uuid", +] + [[package]] name = "openssl-probe" version = "0.2.1" @@ -3367,6 +3397,16 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" +[[package]] +name = "pretty_assertions" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" +dependencies = [ + "diff", + "yansi", +] + [[package]] name = "prettyplease" version = "0.2.37" @@ -5326,10 +5366,22 @@ dependencies = [ "rustls-pki-types", "tokio", "tokio-rustls", - "tungstenite", + "tungstenite 0.24.0", "webpki-roots 0.26.11", ] +[[package]] +name = "tokio-tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite 0.28.0", +] + [[package]] name = "tokio-util" version = "0.7.18" @@ -5491,9 +5543,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ "pin-project-lite", + "tracing-attributes", "tracing-core", ] +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "tracing-core" version = "0.1.36" @@ -5551,6 +5615,23 @@ dependencies = [ "utf-8", ] +[[package]] +name = "tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" +dependencies = [ + "bytes", + "data-encoding", + "http", + "httparse", + "log", + "rand 0.9.2", + "sha1", + "thiserror 2.0.18", + "utf-8", +] + [[package]] name = "typeid" version = "1.0.3" @@ -6784,6 +6865,12 @@ dependencies = [ "rustix", ] +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + [[package]] name = "yoke" version = "0.8.1" diff --git a/Cargo.toml b/Cargo.toml index 94bc4ae7..5825a4e8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,4 +1,8 @@ [workspace] -members = ["clawpal-core", "clawpal-cli", "src-tauri"] +members = [ + "clawpal-core", + "clawpal-cli", + "src-tauri", + "openclaw-gateway-client", +] resolver = "2" - diff --git a/agents.md b/agents.md index f061a817..822c690a 100644 --- a/agents.md +++ b/agents.md @@ -1,2 +1,115 @@ - -Moved to [`AGENTS.md`](AGENTS.md). +# AGENTS.md + +ClawPal 是基于 Tauri 的 OpenClaw 桌面伴侣应用,覆盖安装、配置、Doctor 诊断、版本回滚、远程 SSH 管理和多平台打包发布。 + +技术栈:Tauri v2 + Rust + React + TypeScript + Bun + +## 目录说明 + +``` +src/ # 前端(React/TypeScript) +src/lib/api.ts # 前端对 Tauri command 的统一封装 +src-tauri/src/commands/ # Tauri command 层(参数校验、权限检查、错误映射) +src-tauri/src/commands/mod.rs # Command 路由与公共逻辑 +clawpal-core/ # 核心业务逻辑(与 Tauri 解耦) +clawpal-cli/ # CLI 接口 +docs/architecture/ # 模块边界、分层原则、核心数据流 +docs/decisions/ # 关键设计决策(ADR) +docs/plans/ # 任务计划与实施方案 +docs/runbooks/ # 启动、调试、发布、回滚、故障处理 +docs/testing/ # 测试矩阵与验证策略 +harness/fixtures/ # 最小稳定测试数据 +harness/artifacts/ # 日志、截图、trace、失败产物收集 +Makefile # 统一命令入口 +``` + +## 启动命令 + +本项目使用 `Makefile` 作为统一命令入口(无需额外安装,macOS/Linux 自带 `make`): + +```bash +make install # 安装前端依赖 +make dev # 启动开发模式(前端 + Tauri) +make dev-frontend # 仅启动前端 +make test-unit # 运行所有单元测试(前端 + Rust) +make lint # 运行所有 lint(TypeScript + Rust fmt + clippy) +make fmt # 自动修复 Rust 格式 +make build # 构建 Tauri 应用(debug) +make ci # 本地运行完整 CI 检查 +make doctor # 检查开发环境依赖 +``` + +完整命令列表:`make help` + +底层命令(不使用 make 时): + +```bash +bun install # 安装前端依赖 +bun run dev:tauri # 启动开发模式(前端 + Tauri) +bun run dev # 仅启动前端 +cargo test --workspace # Rust 单元测试 +bun test # 前端单元测试 +bun run typecheck # TypeScript 类型检查 +cargo fmt --check # Rust 格式检查 +cargo clippy # Rust lint +``` + +## 代码分层约束 + +### UI 层 (`src/`) +- 不直接在组件中使用 `invoke("xxx")`,通过 `src/lib/api.ts` 封装调用 +- 不直接访问原生能力 +- 不拼接 command 名称和错误字符串 + +### Command 层 (`src-tauri/src/commands/`) +- 保持薄层:参数校验、权限检查、错误映射、事件分发 +- 不堆积业务编排逻辑 +- 不直接写文件系统或数据库 + +### Domain 层 (`clawpal-core/`) +- 核心业务规则和用例编排 +- 尽量不依赖 `tauri::*` +- 输入输出保持普通 Rust 类型 + +### Adapter 层 +- 所有原生副作用(文件系统、shell、通知、剪贴板、updater)从 adapter 层进入 +- 须提供测试替身(mock/fake) + +## 提交与 PR 要求 + +- Conventional Commits: `feat:` / `fix:` / `docs:` / `refactor:` / `chore:` +- 分支命名: `feat/*` / `fix/*` / `chore/*` +- PR 变更建议 ≤ 500 行(不含自动生成文件) +- PR 必须通过所有 CI gate +- 涉及 UI 改动须附截图 +- 涉及权限/安全改动须附 capability 变更说明 + +## 新增 Command 检查清单 + +- [ ] Command 定义在 `src-tauri/src/commands/` 对应模块 +- [ ] 参数校验和错误映射完整 +- [ ] 已在 `lib.rs` 的 `invoke_handler!` 中注册 +- [ ] 前端 API 封装已更新 +- [ ] 相关文档已更新 + +## 安全约束 + +- 禁止提交明文密钥或配置路径泄露 +- Command 白名单制,新增原生能力必须补文档和验证 +- 对 `~/.openclaw` 的读写需包含异常回退和用户可见提示 +- 默认最小权限原则 + +## 常见排查路径 + +- **Command 调用失败** → 见 `docs/runbooks/command-debugging.md` +- **本地开发启动** → 见 `docs/runbooks/local-development.md` +- **版本发布** → 见 `docs/runbooks/release-process.md` +- **打包后行为与 dev 不一致** → 检查资源路径、权限配置、签名、窗口事件 +- **跨平台差异** → 检查 adapter 层平台分支和 CI 构建日志 + +## 参考文档 + +- [Harness Engineering 标准](https://github.com/lay2dev/clawpal/issues/123) +- [落地计划](docs/plans/2026-03-16-harness-engineering-standard.md) +- [架构设计](docs/architecture/design.md) +- [测试矩阵](docs/testing/business-flow-test-matrix.md) diff --git a/docs/plans/2026-03-17-remote-doctor-service-design.md b/docs/plans/2026-03-17-remote-doctor-service-design.md new file mode 100644 index 00000000..9824c7ea --- /dev/null +++ b/docs/plans/2026-03-17-remote-doctor-service-design.md @@ -0,0 +1,162 @@ +# ClawPal Remote Doctor Service Design + +日期:2026-03-17 + +## 1. 目标 + +为 ClawPal Doctor 页新增一条“远程 doctor 修复”路径。该路径通过 OpenClaw gateway websocket 协议连接远程 doctor agent,由 agent 生成检测/修复流程,ClawPal 本地执行命令并将结果回传,直到检测结果无问题或达到最大轮次。 + +本期同时保留现有“本地修复”路径,两条修复方式都可以作用于 local 或 ssh OpenClaw 实例。修复方式与实例位置解耦。 + +## 2. 用户行为 + +用户进入 Doctor 页,看到两个修复入口: + +- 本地修复 +- 远程 Doctor 修复 + +两者共享同一份诊断结果,但执行策略不同: + +- 本地修复:继续复用当前 `doctor assistant` / `rescue` 逻辑 +- 远程 Doctor 修复:通过 gateway doctor agent 获取 plan 并循环执行 + +## 3. 运行闭环 + +远程 Doctor 修复严格按以下顺序运行: + +1. ClawPal 向远程 doctor agent 请求“检测 OpenClaw 问题的流程” +2. ClawPal 执行检测流程中的命令 +3. ClawPal 将检测结果回传给远程 doctor agent,并请求“修复问题的流程” +4. ClawPal 执行修复流程中的命令 +5. ClawPal 再次请求“检测 OpenClaw 问题的流程” +6. 若检测结果仍有问题,则继续下一轮 +7. 若检测结果无问题,则结束并标记成功 +8. 累计轮次超过 50 次则报错 + +这里的“轮次”定义为一次 plan 请求与执行完成。完整会话会在 detect / repair 两种 plan 之间交替推进。 + +## 4. 协议边界 + +### 4.1 传输层 + +- 使用 `openclaw-gateway-client` 建立新的 websocket client +- 不复用现有 `bridge_client` +- 为 remote doctor 会话单独维护连接、请求和事件订阅 + +### 4.2 目标标识 + +每次发给 doctor agent 的请求必须显式说明目标是本地还是远程 OpenClaw: + +- `targetLocation: "local_openclaw" | "remote_openclaw"` +- `instanceId` +- `hostId`(如果是 ssh 实例) + +注意:这里的“本地/远程”描述的是被修复目标 OpenClaw 的位置,不代表所选修复方式。 + +### 4.3 Plan 结构 + +第一版不引入完整 DSL,而采用更贴近现有执行器的结构化命令 plan: + +```json +{ + "planId": "doctor_plan_xxx", + "planKind": "detect", + "summary": "Check gateway and config health", + "done": false, + "commands": [ + { + "argv": ["openclaw", "doctor", "--json"], + "timeoutSec": 20, + "purpose": "collect diagnosis", + "continueOnFailure": false + } + ] +} +``` + +约束: + +- 第一版只支持命令数组 +- 每条命令必须包含 `argv` +- `timeoutSec` 缺省时由 ClawPal 写入安全默认值 +- `done: true` 仅表示 agent 建议停止,最终成功仍由检测结果决定 + +## 5. 状态机 + +远程 Doctor 会话状态: + +- `idle` +- `planning_detect` +- `executing_detect` +- `reporting_detect` +- `planning_repair` +- `executing_repair` +- `reporting_repair` +- `completed` +- `failed` + +状态切换规则: + +- detect 执行完成后一定进入 repair 请求,除非 detect 结果已明确无问题 +- repair 执行完成后一定回到 detect 请求 +- 任一阶段出错直接进入 `failed` +- 轮次超过 50 次进入 `failed` + +## 6. 成功判定 + +最终成功只取决于最新一次检测结果: + +- 检测结果无问题 => `completed` +- doctor agent 声称 success,但检测结果仍异常 => 继续循环 +- doctor agent 不再返回修复 plan,但检测结果仍异常 => 失败 + +## 7. 日志与审计 + +必须记录完整远程 Doctor 闭环日志,包括: + +- session id +- 当前实例 id / host id +- 修复方式:`remote_doctor` +- target location:`local_openclaw` 或 `remote_openclaw` +- 当前阶段:detect / repair +- 当前轮次 +- 发给 agent 的请求摘要 +- agent 返回的 plan 摘要 +- 每条命令的 `argv` +- 每条命令的退出码、耗时、stdout、stderr、是否超时 +- agent 回传摘要 +- 最终结束原因:success / exhausted / planner_error / execution_error + +实时进度事件沿用 Doctor 页现有模式,新增专用 event,供页面展示当前轮次和最近一条命令。 + +## 8. UI 方案 + +Doctor 页从单一修复按钮改为两个入口: + +- `本地修复` +- `远程 Doctor 修复` + +规则: + +- 两个按钮都基于当前实例上下文执行 +- 若当前实例是 ssh,二者都要求 SSH 已连接 +- 本地修复走现有 `repairDoctorAssistant` +- 远程 Doctor 修复走新的 `startRemoteDoctorRepair` + +页面显示: + +- 当前运行中的修复方式 +- 当前轮次 +- 当前阶段 +- 最新进度行 +- 完成/失败结果 + +## 9. 非目标 + +本期不做: + +- 远程 Doctor 修复的取消/恢复 +- plan 可视化编辑 +- 任意 shell 脚本上传执行 +- 同时并发多个远程 Doctor 会话 +- 对现有本地修复链路做行为重构 diff --git a/docs/plans/2026-03-17-remote-doctor-service-implementation-plan.md b/docs/plans/2026-03-17-remote-doctor-service-implementation-plan.md new file mode 100644 index 00000000..6beefe20 --- /dev/null +++ b/docs/plans/2026-03-17-remote-doctor-service-implementation-plan.md @@ -0,0 +1,341 @@ +# Remote Doctor Service Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Add a new remote doctor repair flow that requests detect/repair plans from a gateway websocket agent, executes returned commands locally against the selected OpenClaw target, reports results back, and loops until health is clean or 50 rounds are exhausted. + +**Architecture:** Keep existing local repair behavior intact and add a second repair mode. The frontend exposes two repair actions, while Tauri owns a dedicated remote doctor websocket client, session orchestrator, command executor, and progress logging. The orchestrator alternates between detect and repair plans and treats the latest detection result as the only success source. + +**Tech Stack:** React 18, TypeScript, Vitest, Tauri 2, Rust, Tokio, openclaw-gateway-client, Cargo test + +--- + +### Task 1: Add frontend remote doctor types + +**Files:** +- Modify: `src/lib/types.ts` +- Test: `src/lib/__tests__/doctor-page-features.test.ts` + +**Step 1: Write the failing test** + +Add a TypeScript-facing test case that constructs a remote doctor repair session object with: + +- `mode: "remoteDoctor"` +- `status` +- `round` +- `phase` +- `lastPlanKind` +- `lastCommand` + +and uses it in existing doctor page feature helpers without type errors. + +**Step 2: Run test to verify it fails** + +Run: `npm test -- doctor-page-features` +Expected: FAIL because the remote doctor types do not exist. + +**Step 3: Write minimal implementation** + +Add exported types for: + +- `DoctorRepairMode` +- `RemoteDoctorPlanKind` +- `RemoteDoctorSessionStatus` +- `RemoteDoctorCommandPlan` +- `RemoteDoctorCommandResult` +- `RemoteDoctorRepairResult` +- `RemoteDoctorProgressEvent` + +**Step 4: Run test to verify it passes** + +Run: `npm test -- doctor-page-features` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src/lib/types.ts src/lib/__tests__/doctor-page-features.test.ts +git commit -m "feat: add remote doctor frontend types" +``` + +### Task 2: Add frontend API bindings + +**Files:** +- Modify: `src/lib/api.ts` +- Modify: `src/lib/use-api.ts` +- Test: `src/lib/__tests__/use-api-extra.test.ts` + +**Step 1: Write the failing test** + +Add a test that asserts the API exposes a `startRemoteDoctorRepair` method and that `useApi()` returns it for both local and remote instances. + +**Step 2: Run test to verify it fails** + +Run: `npm test -- use-api-extra` +Expected: FAIL because the API method does not exist. + +**Step 3: Write minimal implementation** + +Add a Tauri wrapper for: + +- `start_remote_doctor_repair` + +The method must accept the current target context: + +- `instanceId` +- `targetLocation` +- `hostId` when available + +**Step 4: Run test to verify it passes** + +Run: `npm test -- use-api-extra` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src/lib/api.ts src/lib/use-api.ts src/lib/__tests__/use-api-extra.test.ts +git commit -m "feat: add remote doctor repair api binding" +``` + +### Task 3: Expose two repair actions in Doctor UI + +**Files:** +- Modify: `src/pages/Doctor.tsx` +- Modify: `src/components/DoctorRecoveryOverview.tsx` +- Test: `src/pages/__tests__/Doctor.test.tsx` + +**Step 1: Write the failing test** + +Add a UI test that verifies: + +- the Doctor page shows both `本地修复` and `远程 Doctor 修复` +- clicking the local button still calls the existing repair method +- clicking the remote doctor button calls the new API method + +**Step 2: Run test to verify it fails** + +Run: `npm test -- Doctor.test` +Expected: FAIL because only one repair action exists. + +**Step 3: Write minimal implementation** + +Update the Doctor page to: + +- keep the existing diagnose button behavior +- show two repair actions when diagnosis indicates problems +- track which repair mode is running +- listen for remote doctor progress events + +**Step 4: Run test to verify it passes** + +Run: `npm test -- Doctor.test` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src/pages/Doctor.tsx src/components/DoctorRecoveryOverview.tsx src/pages/__tests__/Doctor.test.tsx +git commit -m "feat: add dual repair actions to doctor page" +``` + +### Task 4: Define Rust remote doctor contract + +**Files:** +- Create: `src-tauri/src/remote_doctor/types.rs` +- Create: `src-tauri/src/remote_doctor/mod.rs` +- Modify: `src-tauri/src/lib.rs` +- Test: `src-tauri/src/remote_doctor/types.rs` + +**Step 1: Write the failing test** + +Add unit tests for: + +- `RemoteDoctorPlanKind` deserialization from `detect` and `repair` +- default `targetLocation` +- command result serialization + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::types` +Expected: FAIL because the module does not exist. + +**Step 3: Write minimal implementation** + +Create serde types for: + +- repair request payload +- plan response payload +- command item +- command result +- session result +- progress payload + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::types` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src-tauri/src/lib.rs src-tauri/src/remote_doctor/mod.rs src-tauri/src/remote_doctor/types.rs +git commit -m "feat: define remote doctor rust types" +``` + +### Task 5: Implement gateway doctor client + +**Files:** +- Create: `src-tauri/src/remote_doctor/client.rs` +- Test: `src-tauri/src/remote_doctor/client.rs` + +**Step 1: Write the failing test** + +Add tests with fake request/response transport verifying: + +- detect request payload includes target location +- repair request payload includes previous command results +- planner errors are surfaced as Rust errors + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::client` +Expected: FAIL because the client does not exist. + +**Step 3: Write minimal implementation** + +Wrap `openclaw-gateway-client` and expose methods: + +- `request_detect_plan` +- `request_repair_plan` +- `report_plan_results` + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::client` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_doctor/client.rs +git commit -m "feat: add remote doctor gateway client" +``` + +### Task 6: Implement command executor and log writer + +**Files:** +- Create: `src-tauri/src/remote_doctor/executor.rs` +- Create: `src-tauri/src/remote_doctor/log.rs` +- Test: `src-tauri/src/remote_doctor/executor.rs` + +**Step 1: Write the failing test** + +Add tests verifying: + +- a command plan is executed in order +- stdout/stderr/exit code are captured +- command timeout is reported +- local and remote target contexts select the proper runner + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::executor` +Expected: FAIL because the executor does not exist. + +**Step 3: Write minimal implementation** + +Implement: + +- local target execution through existing local command helpers +- remote target execution through existing SSH helpers +- JSONL or line-oriented session logging + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::executor` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_doctor/executor.rs src-tauri/src/remote_doctor/log.rs +git commit -m "feat: add remote doctor executor and logs" +``` + +### Task 7: Implement orchestrator loop and Tauri command + +**Files:** +- Create: `src-tauri/src/remote_doctor/orchestrator.rs` +- Modify: `src-tauri/src/commands/mod.rs` +- Modify: `src-tauri/src/main.rs` +- Test: `src-tauri/src/remote_doctor/orchestrator.rs` + +**Step 1: Write the failing test** + +Add orchestration tests covering: + +- detect clean on first round => success +- detect then repair then detect clean => success +- more than 50 rounds => exhausted error +- planner failure => error + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::orchestrator` +Expected: FAIL because the orchestrator does not exist. + +**Step 3: Write minimal implementation** + +Implement: + +- alternating detect/repair plan loop +- max round guard at 50 +- progress event emission +- final result payload +- `start_remote_doctor_repair` Tauri command + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::orchestrator` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_doctor/orchestrator.rs src-tauri/src/commands/mod.rs src-tauri/src/main.rs +git commit -m "feat: add remote doctor repair orchestration" +``` + +### Task 8: Run verification and update docs + +**Files:** +- Modify: `docs/mvp-checklist.md` +- Modify: `docs/plans/2026-03-17-remote-doctor-service-design.md` + +**Step 1: Run targeted frontend tests** + +Run: `npm test -- doctor-page-features` +Expected: PASS + +**Step 2: Run Doctor page tests** + +Run: `npm test -- Doctor.test` +Expected: PASS + +**Step 3: Run targeted Rust tests** + +Run: `cargo test remote_doctor` +Expected: PASS + +**Step 4: Run typecheck** + +Run: `npm run build` or `npx tsc --noEmit` +Expected: PASS + +**Step 5: Commit** + +```bash +git add docs/mvp-checklist.md docs/plans/2026-03-17-remote-doctor-service-design.md +git commit -m "docs: document remote doctor repair flow" +``` diff --git a/docs/plans/2026-03-18-live-raw-config-repair-e2e-plan.md b/docs/plans/2026-03-18-live-raw-config-repair-e2e-plan.md new file mode 100644 index 00000000..79ab4656 --- /dev/null +++ b/docs/plans/2026-03-18-live-raw-config-repair-e2e-plan.md @@ -0,0 +1,52 @@ +# Live Raw Config Repair E2E Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Add a live remote Doctor e2e that starts a Docker OpenClaw target, corrupts `openclaw.json`, runs real `clawpal-server` remote repair, and verifies the target becomes healthy again. + +**Architecture:** Reuse the existing live gateway and Docker SSH test fixtures in `src-tauri/src/remote_doctor.rs`. Add one new live e2e guarded by the existing URL/token env vars plus Docker availability. The test will create a remote host config, deliberately corrupt the target config over SSH, invoke `start_remote_doctor_repair_impl(...)`, then verify the config is valid JSON again and rescue diagnosis is healthy. + +**Tech Stack:** Rust, Tokio tests, Docker, SSH test fixture, real `clawpal-server` websocket gateway. + +--- + +### Task 1: Add failing live e2e + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` +- Test: `src-tauri/src/remote_doctor.rs` + +**Step 1: Write the failing test** + +Add a live e2e that: +- starts the Docker SSH target +- corrupts `/root/.openclaw/openclaw.json` +- calls `start_remote_doctor_repair_impl(...)` +- expects a successful repair + +**Step 2: Run test to verify it fails** + +Run: `cargo test -p clawpal --lib remote_doctor_live_gateway_repairs_unreadable_remote_config -- --nocapture` + +Expected: FAIL until the fixture/helpers are sufficient and the live path is wired for this scenario. + +**Step 3: Write minimal implementation** + +Add only the fixture/helper code needed for the new e2e. + +**Step 4: Run test to verify it passes** + +Run: `cargo test -p clawpal --lib remote_doctor_live_gateway_repairs_unreadable_remote_config -- --nocapture` + +Expected: PASS in an environment where the real `clawpal-server` supports raw-config repair. + +### Task 2: Preserve existing remote doctor tests + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` + +**Step 1: Run the broader test group** + +Run: `cargo test -p clawpal --lib remote_doctor -- --nocapture` + +Expected: PASS diff --git a/docs/plans/2026-03-18-raw-config-recovery-plan.md b/docs/plans/2026-03-18-raw-config-recovery-plan.md new file mode 100644 index 00000000..bb0895be --- /dev/null +++ b/docs/plans/2026-03-18-raw-config-recovery-plan.md @@ -0,0 +1,80 @@ +# Raw Config Recovery Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Allow remote Doctor to continue when the target `openclaw.json` is not parseable JSON by switching to a raw-config recovery path instead of failing before a repair plan can be requested. + +**Architecture:** Extend `src-tauri/src/remote_doctor.rs` so `clawpal_server` repair requests can carry a fallback raw config excerpt and structured unreadable-config metadata when `read_target_config(...)` fails with `primary.config.unreadable`. Keep normal JSON-based repair behavior unchanged for valid configs. Add logging that makes the fallback visible in the session log. + +**Tech Stack:** Rust, Tauri, existing config read commands, cargo test. + +--- + +### Task 1: Add failing tests for unreadable config fallback + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` +- Test: `src-tauri/src/remote_doctor.rs` + +**Step 1: Write the failing test** + +Add tests that expect: +- config parse failures to be converted into a raw config context payload +- final errors to avoid the old immediate `Failed to parse target config` failure in the clawpal-server planning path + +**Step 2: Run test to verify it fails** + +Run: `cargo test -p clawpal --lib unreadable_config_ -- --nocapture` + +Expected: FAIL because the fallback context helpers do not exist yet. + +**Step 3: Write minimal implementation** + +Add the smallest helpers needed to construct raw config fallback context. + +**Step 4: Run test to verify it passes** + +Run: `cargo test -p clawpal --lib unreadable_config_ -- --nocapture` + +Expected: PASS + +### Task 2: Implement raw config recovery in clawpal_server path + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` + +**Step 1: Add raw config read helper** + +Read the target config as raw text and try to parse JSON. If parsing fails: +- keep the raw text +- record parse error +- build a fallback request context instead of returning early + +**Step 2: Adjust clawpal_server plan request payload** + +When JSON config is unavailable: +- send `configExcerpt: null` +- send `configExcerptRaw` +- send `configParseError` +- log a `config_recovery_context` event + +**Step 3: Preserve normal valid-config behavior** + +Do not change the request payload for valid configs. + +### Task 3: Verify regression behavior + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` + +**Step 1: Run focused tests** + +Run: `cargo test -p clawpal --lib unreadable_config_ -- --nocapture` + +Expected: PASS + +**Step 2: Run broader remote doctor tests** + +Run: `cargo test -p clawpal --lib remote_doctor -- --nocapture` + +Expected: PASS diff --git a/docs/plans/2026-03-18-remote-doctor-agent-investigation-design.md b/docs/plans/2026-03-18-remote-doctor-agent-investigation-design.md new file mode 100644 index 00000000..4953c841 --- /dev/null +++ b/docs/plans/2026-03-18-remote-doctor-agent-investigation-design.md @@ -0,0 +1,118 @@ +# Remote Doctor Agent Investigation Design + +## Summary + +当前远程修复在 `clawpal_server` 路径下依赖 `remote_repair_plan.*` 专用 planner。真实联调已经证明,这条路径对 `primary.config.unreadable` 只会返回 `doctorRediagnose`,不会生成坏 JSON 的诊断或修复步骤。目标是把这类场景切换为由 ClawPal 直接通过标准 gateway `agent` 会话生成步骤,ClawPal 只负责执行、回传结果和循环控制。 + +## Goals + +- 去掉 `remote_repair_plan.request` 作为远程修复默认路径 +- 对 `primary.config.unreadable` 场景不再硬编码修复逻辑 +- 让 agent 先生成诊断步骤,再基于调查结果生成修复步骤 +- 保留现有命令执行器、日志、事件、轮次上限和 stall 检测 + +## Non-Goals + +- 本轮不切到完整 `node.invoke` 工具流 +- 本轮不保留 `clawpal_server` 作为默认远程修复协议 +- 本轮不重新引入自动 `manage_rescue_bot activate rescue` + +## Architecture + +远程修复改为三态状态机: + +- `diagnose` +- `investigate` +- `repair` + +ClawPal 在每轮拿到 rescue diagnosis 后: + +- 如果存在 `primary.config.unreadable`,下一轮进入 `investigate` +- 否则进入 `repair` +- 每轮执行完命令后重新诊断,直到健康或达到上限 + +agent 通过标准 gateway `agent` 方法返回 JSON 计划。ClawPal 继续使用现有 `PlanResponse` / `PlanCommand` 执行链,不再依赖 `remote_repair_plan.*`。 + +## Agent Prompt Model + +### Shared Context + +每次请求 agent 时都提供: + +- `targetLocation` +- `instanceId` +- `diagnosis` +- `configExcerpt` +- `configExcerptRaw` +- `configParseError` +- `previousResults` + +### Diagnose Prompt + +用途是获取下一步高层方向。通常只在初始轮次或修复后复诊时使用。 + +### Investigate Prompt + +用于 `primary.config.unreadable` 场景,约束如下: + +- 只允许返回诊断命令 +- 不允许直接写文件、删文件或覆盖配置 +- 必须先要求备份方案 +- 目标是解释配置为何不可解析,并收集最小修复所需证据 + +### Repair Prompt + +用于调查完成后的修复阶段,约束如下: + +- 必须引用前一轮调查结果 +- 写配置前必须先备份原文件 +- 变更尽量最小 +- 修复后必须要求 JSON 校验和重新诊断 + +## Execution Model + +ClawPal 继续负责: + +- 执行命令 +- 收集 `stdout/stderr/exitCode` +- 记录日志 +- 向 agent 回传 `previousResults` +- 维护轮次和停滞检测 + +ClawPal 不再硬编码坏 JSON 的具体修法。 + +## Logging + +保留现有 session 日志,新增或调整以下内容: + +- `plan_received` 支持 `planKind: investigate` +- `command_result` 记录调查命令结果 +- stall 检测要覆盖“连续无效 investigate”场景 +- `config_recovery_context` 继续记录 `configExcerptRaw` 是否存在和 parse error + +## Error Handling + +- 如果 agent 在 investigate 阶段返回写命令,ClawPal 直接拒绝执行并记录协议错误 +- 如果 agent 连续多轮只返回空调查或无效调查,触发 stall +- 如果 50 轮内仍未恢复健康,错误中保留最后一次 diagnosis 和最后一步类型 + +## Testing + +### Unit Tests + +- `primary.config.unreadable` 时状态机会先进入 `investigate` +- `investigate` prompt 明确只读约束 +- `repair` prompt 明确要求引用调查结果 + +### Live E2E + +- 启动 Docker OpenClaw 目标机 +- 故意写坏 `openclaw.json` +- 通过真实 gateway `agent` 路径运行远程修复 +- 断言配置恢复为合法 JSON,诊断变为健康 + +## Migration + +- `ClawpalServer` 从默认协议移除 +- `remote_repair_plan.*` 路径降级为兼容分支或后续删除 +- 默认远程修复协议改为 agent 驱动 diff --git a/docs/plans/2026-03-18-remote-doctor-agent-investigation-plan.md b/docs/plans/2026-03-18-remote-doctor-agent-investigation-plan.md new file mode 100644 index 00000000..dc942247 --- /dev/null +++ b/docs/plans/2026-03-18-remote-doctor-agent-investigation-plan.md @@ -0,0 +1,121 @@ +# Remote Doctor Agent Investigation Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Replace the default `clawpal_server` remote repair planner with an agent-driven investigate/repair loop that can handle unreadable JSON configs without hardcoded repair logic. + +**Architecture:** Update `src-tauri/src/remote_doctor.rs` to make the standard gateway `agent` path the default remote repair flow. Extend the plan state machine with a new `investigate` phase for `primary.config.unreadable`, generate phase-specific agent prompts, and keep command execution/logging in ClawPal. Preserve the existing `clawpal_server` path only as non-default fallback during migration. + +**Tech Stack:** Rust, Tauri, websocket gateway client, cargo test, live Docker e2e. + +--- + +### Task 1: Add failing tests for investigate phase selection + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` +- Test: `src-tauri/src/remote_doctor.rs` + +**Step 1: Write the failing test** + +Add unit tests that expect: +- `primary.config.unreadable` selects `investigate` before `repair` +- investigate prompts contain read-only constraints +- repair prompts reference prior investigation results + +**Step 2: Run test to verify it fails** + +Run: `cargo test -p clawpal --lib investigate_ -- --nocapture` + +Expected: FAIL because `PlanKind::Investigate` and the new prompt rules do not exist yet. + +**Step 3: Write minimal implementation** + +Add the smallest new enum variant and prompt branching needed to satisfy the tests. + +**Step 4: Run test to verify it passes** + +Run: `cargo test -p clawpal --lib investigate_ -- --nocapture` + +Expected: PASS + +### Task 2: Make agent planner the default remote repair path + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` + +**Step 1: Change protocol selection** + +Make `AgentPlanner` the default remote repair protocol. Keep `ClawpalServer` only as explicit fallback / compatibility path. + +**Step 2: Extend the state machine** + +Add `PlanKind::Investigate` and route unreadable-config diagnoses into investigate first. + +**Step 3: Preserve execution plumbing** + +Do not rewrite command execution. Reuse existing `PlanResponse` / `PlanCommand` execution and result logging. + +### Task 3: Update agent prompt construction + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` + +**Step 1: Add phase-specific prompt rules** + +Implement: +- diagnose prompt +- investigate prompt +- repair prompt + +**Step 2: Include raw config context** + +Always include: +- `configExcerpt` +- `configExcerptRaw` +- `configParseError` + +### Task 4: Update logging and stall detection + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` + +**Step 1: Log investigate plans** + +Ensure `plan_received` and `command_result` support `planKind: investigate`. + +**Step 2: Extend stall detection** + +Treat repeated empty or non-actionable investigate plans as stalled. + +### Task 5: Add/adjust live e2e + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` + +**Step 1: Reuse the unreadable config live e2e** + +Update the existing live Docker test so it runs through the agent path instead of the `clawpal_server` planner. + +**Step 2: Verify behavior** + +Run: `cargo test -p clawpal --lib remote_doctor_live_gateway_repairs_unreadable_remote_config -- --nocapture` + +Expected: PASS when the real gateway agent returns actionable diagnostic and repair steps. + +### Task 6: Run regression tests + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` + +**Step 1: Run focused tests** + +Run: `cargo test -p clawpal --lib investigate_ -- --nocapture` + +Expected: PASS + +**Step 2: Run broader remote doctor tests** + +Run: `cargo test -p clawpal --lib remote_doctor -- --nocapture` + +Expected: PASS diff --git a/docs/plans/2026-03-18-rescue-activation-failure-diagnosis-plan.md b/docs/plans/2026-03-18-rescue-activation-failure-diagnosis-plan.md new file mode 100644 index 00000000..8d22dcf1 --- /dev/null +++ b/docs/plans/2026-03-18-rescue-activation-failure-diagnosis-plan.md @@ -0,0 +1,79 @@ +# Rescue Activation Failure Diagnosis Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Add a rescue activation failure diagnosis flow so remote Doctor logs actionable rescue startup checks instead of only returning a terminal configured-inactive error. + +**Architecture:** Extend the existing rescue preflight path in `src-tauri/src/remote_doctor.rs`. When `manage_rescue_bot activate rescue` does not produce an active rescue gateway, collect a small local/remote diagnostic bundle using existing rescue status commands and targeted shell checks, append that bundle to the session log, and include a concise summary in the final error. + +**Tech Stack:** Rust, Tauri commands, existing rescue/SSH helpers, cargo test. + +--- + +### Task 1: Add failing tests for rescue activation diagnosis + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` +- Test: `src-tauri/src/remote_doctor.rs` + +**Step 1: Write the failing test** + +Add unit tests that expect: +- rescue activation failure errors to include a diagnosis summary +- rescue activation failure diagnostics to capture rescue status / gateway checks in log-friendly command results + +**Step 2: Run test to verify it fails** + +Run: `cargo test -p clawpal --lib rescue_activation_error_mentions_runtime_state -- --nocapture` + +Expected: FAIL because the new diagnosis summary is not implemented yet. + +**Step 3: Write minimal implementation** + +Add the smallest helper/data needed to produce a structured rescue failure diagnosis bundle. + +**Step 4: Run test to verify it passes** + +Run: `cargo test -p clawpal --lib rescue_activation_error_mentions_runtime_state -- --nocapture` + +Expected: PASS + +### Task 2: Implement rescue activation failure diagnosis flow + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` + +**Step 1: Extend rescue preflight failure handling** + +When rescue activation remains inactive: +- gather rescue status details +- run a small set of check commands +- append `rescue_activation_diagnosis` to the remote doctor session log +- return an error that references the diagnosis summary + +**Step 2: Keep behavior minimal** + +Do not change the remote doctor protocol. Do not add new fallback protocols. Only improve local rescue failure diagnosis. + +**Step 3: Verify targeted tests** + +Run: `cargo test -p clawpal --lib remote_doctor -- --nocapture` + +Expected: PASS + +### Task 3: Verify regression behavior + +**Files:** +- Modify: `src-tauri/src/remote_doctor.rs` + +**Step 1: Run focused e2e regression** + +Run: `cargo test -p clawpal --lib remote_doctor_docker_e2e_rescue_activation_fails_when_gateway_stays_inactive -- --nocapture` + +Expected: PASS and still fail early on inactive rescue gateway. + +**Step 2: Run broader remote doctor tests** + +Run: `cargo test -p clawpal --lib remote_doctor` + +Expected: PASS diff --git a/openclaw-gateway-client/Cargo.lock b/openclaw-gateway-client/Cargo.lock new file mode 100644 index 00000000..6b8d1068 --- /dev/null +++ b/openclaw-gateway-client/Cargo.lock @@ -0,0 +1,1422 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest", + "fiat-crypto", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "data-encoding" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" + +[[package]] +name = "der" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "pkcs8", + "signature", +] + +[[package]] +name = "ed25519-dalek" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" +dependencies = [ + "curve25519-dalek", + "ed25519", + "rand_core 0.6.4", + "serde", + "sha2", + "subtle", + "zeroize", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "libc", + "r-efi 5.3.0", + "wasip2", +] + +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if", + "libc", + "r-efi 6.0.0", + "wasip2", + "wasip3", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "js-sys" +version = "0.3.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "libc" +version = "0.2.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" + +[[package]] +name = "linux-raw-sys" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "wasi", + "windows-sys", +] + +[[package]] +name = "once_cell" +version = "1.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" + +[[package]] +name = "openclaw-gateway-client" +version = "0.1.0" +dependencies = [ + "base64", + "ed25519-dalek", + "futures", + "hex", + "pretty_assertions", + "rand_core 0.6.4", + "serde", + "serde_json", + "sha2", + "tempfile", + "thiserror", + "tokio", + "tokio-tungstenite", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "pretty_assertions" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" +dependencies = [ + "diff", + "yansi", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha", + "rand_core 0.9.5", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.5", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.17", +] + +[[package]] +name = "rand_core" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" +dependencies = [ + "getrandom 0.3.4", +] + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "rand_core 0.6.4", +] + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tempfile" +version = "3.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd" +dependencies = [ + "fastrand", + "getrandom 0.4.2", + "once_cell", + "rustix", + "windows-sys", +] + +[[package]] +name = "thiserror" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tokio" +version = "1.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" +dependencies = [ + "bytes", + "libc", + "mio", + "pin-project-lite", + "socket2", + "tokio-macros", + "windows-sys", +] + +[[package]] +name = "tokio-macros" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite", +] + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", +] + +[[package]] +name = "tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" +dependencies = [ + "bytes", + "data-encoding", + "http", + "httparse", + "log", + "rand", + "sha1", + "thiserror", + "utf-8", +] + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "uuid" +version = "1.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" +dependencies = [ + "getrandom 0.4.2", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2578b716f8a7a858b7f02d5bd870c14bf4ddbbcf3a4c05414ba6503640505e3" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e6cc098ea4d3bd6246687de65af3f920c430e236bee1e3bf2e441463f08a02f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/openclaw-gateway-client/Cargo.toml b/openclaw-gateway-client/Cargo.toml new file mode 100644 index 00000000..86b52423 --- /dev/null +++ b/openclaw-gateway-client/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "openclaw-gateway-client" +version = "0.1.0" +edition = "2024" + +[lib] +name = "openclaw_gateway_client" +path = "src/lib.rs" + +[dependencies] +base64 = "0.22" +ed25519-dalek = { version = "2", features = ["pkcs8", "rand_core"] } +futures = "0.3" +rand_core = { version = "0.6", features = ["getrandom"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +sha2 = "0.10" +tokio = { version = "1", features = ["macros", "rt-multi-thread", "sync", "time", "net"] } +tokio-tungstenite = "0.28" +tracing = "0.1" +thiserror = "2" +url = "2" +uuid = { version = "1", features = ["v4"] } +hex = "0.4" + +[dev-dependencies] +pretty_assertions = "1" +tempfile = "3" diff --git a/openclaw-gateway-client/src/auth_store.rs b/openclaw-gateway-client/src/auth_store.rs new file mode 100644 index 00000000..e9db15e0 --- /dev/null +++ b/openclaw-gateway-client/src/auth_store.rs @@ -0,0 +1,95 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use serde::{Deserialize, Serialize}; + +use crate::error::Error; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct DeviceTokenRecord { + pub token: String, + #[serde(default)] + pub scopes: Vec, +} + +pub trait AuthStore { + fn load(&self, device_id: &str, role: &str) -> Result, Error>; + fn store(&self, device_id: &str, role: &str, record: &DeviceTokenRecord) -> Result<(), Error>; + fn clear(&self, device_id: &str, role: &str) -> Result<(), Error>; +} + +#[derive(Debug, Clone)] +pub struct FileAuthStore { + root: PathBuf, +} + +impl FileAuthStore { + pub fn new(root: PathBuf) -> Self { + Self { root } + } + + fn path_for(&self, device_id: &str, role: &str) -> PathBuf { + self.root.join(sanitize(device_id)).join(format!("{}.json", sanitize(role))) + } +} + +impl AuthStore for FileAuthStore { + fn load(&self, device_id: &str, role: &str) -> Result, Error> { + let path = self.path_for(device_id, role); + if !path.exists() { + return Ok(None); + } + let raw = fs::read_to_string(&path)?; + Ok(Some(serde_json::from_str(&raw)?)) + } + + fn store(&self, device_id: &str, role: &str, record: &DeviceTokenRecord) -> Result<(), Error> { + let path = self.path_for(device_id, role); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent)?; + } + let raw = serde_json::to_string_pretty(record)?; + fs::write(path, raw)?; + Ok(()) + } + + fn clear(&self, device_id: &str, role: &str) -> Result<(), Error> { + let path = self.path_for(device_id, role); + if path.exists() { + fs::remove_file(&path)?; + prune_empty_dirs(&self.root, path.parent()); + } + Ok(()) + } +} + +fn sanitize(value: &str) -> String { + value.chars() + .map(|ch| { + if ch.is_ascii_alphanumeric() || ch == '-' || ch == '_' { + ch + } else { + '_' + } + }) + .collect() +} + +fn prune_empty_dirs(root: &Path, current: Option<&Path>) { + let mut current = current; + while let Some(path) = current { + if path == root { + break; + } + let is_empty = fs::read_dir(path) + .ok() + .map(|mut entries| entries.next().is_none()) + .unwrap_or(false); + if !is_empty { + break; + } + let parent = path.parent(); + let _ = fs::remove_dir(path); + current = parent; + } +} diff --git a/openclaw-gateway-client/src/client.rs b/openclaw-gateway-client/src/client.rs new file mode 100644 index 00000000..9efc6112 --- /dev/null +++ b/openclaw-gateway-client/src/client.rs @@ -0,0 +1,333 @@ +use std::sync::Arc; + +use futures::{SinkExt, StreamExt}; +use serde_json::Value; +use tokio::sync::{Mutex, broadcast, oneshot}; +use tokio::task::JoinHandle; +use tokio_tungstenite::{connect_async, tungstenite::Message}; +use url::Url; +use uuid::Uuid; + +use crate::error::Error; +use crate::protocol::{ + ClientInfo, ConnectParams, EventFrame, GatewayFrame, HelloOk, PROTOCOL_VERSION, RequestFrame, + ResponseFrame, +}; +use crate::tls::normalize_fingerprint; + +#[derive(Debug, Clone)] +pub struct GatewayClient { + url: Url, + connect_params: ConnectParams, + _tls_fingerprint: Option, +} + +#[derive(Debug)] +pub struct GatewayClientHandle { + inner: Arc, +} + +#[derive(Debug)] +struct GatewayClientInner { + writer: Mutex< + futures::stream::SplitSink< + tokio_tungstenite::WebSocketStream< + tokio_tungstenite::MaybeTlsStream, + >, + Message, + >, + >, + pending: Mutex>>>, + events: broadcast::Sender, + task: Mutex>>, +} + +#[derive(Debug, Default)] +pub struct GatewayClientBuilder { + url: Option, + client_id: Option, + client_mode: Option, + client_version: Option, + platform: Option, + role: Option, + tls_fingerprint: Option, +} + +impl GatewayClientBuilder { + pub fn new(url: impl Into) -> Self { + Self { + url: Some(url.into()), + ..Self::default() + } + } + + pub fn client_id(mut self, value: impl Into) -> Self { + self.client_id = Some(value.into()); + self + } + + pub fn client_mode(mut self, value: impl Into) -> Self { + self.client_mode = Some(value.into()); + self + } + + pub fn client_version(mut self, value: impl Into) -> Self { + self.client_version = Some(value.into()); + self + } + + pub fn platform(mut self, value: impl Into) -> Self { + self.platform = Some(value.into()); + self + } + + pub fn role(mut self, value: impl Into) -> Self { + self.role = Some(value.into()); + self + } + + pub fn tls_fingerprint(mut self, value: impl Into) -> Self { + self.tls_fingerprint = Some(value.into()); + self + } + + pub fn build(self) -> Result { + let url = Url::parse(&self.url.ok_or_else(|| Error::Config("url is required".into()))?) + .map_err(|err| Error::Config(err.to_string()))?; + let tls_fingerprint = self + .tls_fingerprint + .as_deref() + .and_then(normalize_fingerprint); + if tls_fingerprint.is_some() && url.scheme() != "wss" { + return Err(Error::Config( + "tls fingerprint requires wss gateway url".into(), + )); + } + let role = self.role.unwrap_or_else(|| "operator".into()); + let mode = self.client_mode.unwrap_or_else(|| "backend".into()); + let connect_params = ConnectParams { + min_protocol: PROTOCOL_VERSION, + max_protocol: PROTOCOL_VERSION, + client: ClientInfo { + id: self.client_id.unwrap_or_else(|| "openclaw-rust".into()), + display_name: None, + version: self.client_version.unwrap_or_else(|| "dev".into()), + platform: self.platform.unwrap_or_else(|| std::env::consts::OS.into()), + mode, + instance_id: None, + device_family: None, + model_identifier: None, + }, + caps: Vec::new(), + commands: None, + permissions: None, + path_env: None, + auth: None, + role, + scopes: Vec::new(), + device: None, + locale: None, + user_agent: None, + }; + Ok(GatewayClient { + url, + connect_params, + _tls_fingerprint: tls_fingerprint, + }) + } +} + +impl GatewayClient { + pub async fn start(self) -> Result { + let (ws, _) = connect_async(self.url.as_str()) + .await + .map_err(|err| Error::Transport(err.to_string()))?; + let (writer, mut reader) = ws.split(); + + let challenge = read_until_challenge(&mut reader).await?; + let nonce = challenge + .payload + .and_then(|payload| payload.get("nonce").and_then(|value| value.as_str()).map(str::to_string)) + .ok_or_else(|| Error::Protocol("connect challenge missing nonce".into()))?; + + let request_id = Uuid::new_v4().to_string(); + let connect_frame = GatewayFrame::Request(RequestFrame { + id: request_id.clone(), + method: "connect".into(), + params: Some(serde_json::to_value(self.build_connect_params(&nonce))?), + }); + + { + let mut locked = writer; + locked + .send(Message::text(serde_json::to_string(&connect_frame)?)) + .await + .map_err(|err| Error::Transport(err.to_string()))?; + let _hello = read_until_connect_response(&mut reader, &request_id).await?; + let writer = locked; + let (events_tx, _) = broadcast::channel(256); + let inner = Arc::new(GatewayClientInner { + writer: Mutex::new(writer), + pending: Mutex::new(std::collections::HashMap::new()), + events: events_tx, + task: Mutex::new(None), + }); + + let read_inner = Arc::clone(&inner); + let task = tokio::spawn(async move { + while let Some(next) = reader.next().await { + match next { + Ok(message) => { + if !message.is_text() { + continue; + } + let Ok(frame) = serde_json::from_str::( + message.to_text().unwrap_or_default(), + ) else { + continue; + }; + match frame { + GatewayFrame::Response(response) => { + let sender = read_inner.pending.lock().await.remove(&response.id); + if let Some(sender) = sender { + let result = if response.ok { + Ok(response.payload.unwrap_or(Value::Null)) + } else { + Err(Error::Protocol( + response + .error + .and_then(|value| { + value + .get("message") + .and_then(|msg| msg.as_str()) + .map(str::to_string) + }) + .unwrap_or_else(|| "request failed".into()), + )) + }; + let _ = sender.send(result); + } + } + GatewayFrame::Event(event) => { + let _ = read_inner.events.send(event); + } + GatewayFrame::Request(_) => {} + } + } + Err(_) => break, + } + } + }); + + *inner.task.lock().await = Some(task); + + return Ok(GatewayClientHandle { inner }); + } + } + + fn build_connect_params(&self, _nonce: &str) -> ConnectParams { + self.connect_params.clone() + } +} + +impl GatewayClientHandle { + pub async fn request(&self, method: &str, params: Option) -> Result { + let id = Uuid::new_v4().to_string(); + let frame = GatewayFrame::Request(RequestFrame { + id: id.clone(), + method: method.to_string(), + params, + }); + let (tx, rx) = oneshot::channel(); + self.inner.pending.lock().await.insert(id, tx); + { + let mut writer = self.inner.writer.lock().await; + writer + .send(Message::text(serde_json::to_string(&frame)?)) + .await + .map_err(|err| Error::Transport(err.to_string()))?; + } + rx.await + .map_err(|_| Error::Protocol("request canceled".into()))? + } + + pub fn subscribe_events(&self) -> broadcast::Receiver { + self.inner.events.subscribe() + } + + pub async fn shutdown(self) -> Result<(), Error> { + { + let mut writer = self.inner.writer.lock().await; + let _ = writer.send(Message::Close(None)).await; + } + if let Some(task) = self.inner.task.lock().await.take() { + task.abort(); + let _ = task.await; + } + Ok(()) + } +} + +impl Clone for GatewayClientHandle { + fn clone(&self) -> Self { + Self { + inner: Arc::clone(&self.inner), + } + } +} + +async fn read_until_challenge( + reader: &mut futures::stream::SplitStream< + tokio_tungstenite::WebSocketStream>, + >, +) -> Result { + while let Some(message) = reader.next().await { + let message = message.map_err(|err| Error::Transport(err.to_string()))?; + if !message.is_text() { + continue; + } + let frame: GatewayFrame = serde_json::from_str(message.to_text().map_err(|err| Error::Transport(err.to_string()))?)?; + if let GatewayFrame::Event(event) = frame { + if event.event == "connect.challenge" { + return Ok(event); + } + } + } + Err(Error::Protocol("connection closed before connect.challenge".into())) +} + +async fn read_until_connect_response( + reader: &mut futures::stream::SplitStream< + tokio_tungstenite::WebSocketStream>, + >, + request_id: &str, +) -> Result { + while let Some(message) = reader.next().await { + let message = message.map_err(|err| Error::Transport(err.to_string()))?; + if !message.is_text() { + continue; + } + let frame: GatewayFrame = serde_json::from_str(message.to_text().map_err(|err| Error::Transport(err.to_string()))?)?; + if let GatewayFrame::Response(ResponseFrame { + id, + ok, + payload, + error, + }) = frame + { + if id != request_id { + continue; + } + if !ok { + return Err(Error::Protocol( + error + .and_then(|value| value.get("message").and_then(|msg| msg.as_str()).map(str::to_string)) + .unwrap_or_else(|| "connect failed".into()), + )); + } + let payload = payload.ok_or_else(|| Error::Protocol("connect response missing payload".into()))?; + return serde_json::from_value(payload).map_err(Error::from); + } + } + Err(Error::Protocol("connection closed before connect response".into())) +} diff --git a/openclaw-gateway-client/src/error.rs b/openclaw-gateway-client/src/error.rs new file mode 100644 index 00000000..fa637eb9 --- /dev/null +++ b/openclaw-gateway-client/src/error.rs @@ -0,0 +1,17 @@ +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum Error { + #[error("config error: {0}")] + Config(String), + #[error("json error: {0}")] + Json(#[from] serde_json::Error), + #[error("io error: {0}")] + Io(#[from] std::io::Error), + #[error("crypto error: {0}")] + Crypto(String), + #[error("transport error: {0}")] + Transport(String), + #[error("protocol error: {0}")] + Protocol(String), +} diff --git a/openclaw-gateway-client/src/identity.rs b/openclaw-gateway-client/src/identity.rs new file mode 100644 index 00000000..b6efb9f3 --- /dev/null +++ b/openclaw-gateway-client/src/identity.rs @@ -0,0 +1,97 @@ +use base64::Engine; +use base64::engine::general_purpose::URL_SAFE_NO_PAD; +use ed25519_dalek::{Signer, SigningKey}; +use ed25519_dalek::{pkcs8::DecodePrivateKey, pkcs8::EncodePrivateKey, pkcs8::EncodePublicKey}; +use rand_core::OsRng; +use serde_json::{Value, json}; +use uuid::Uuid; + +use crate::error::Error; + +#[derive(Debug, Clone, PartialEq)] +pub struct DeviceIdentity { + pub device_id: String, + pub public_key_pem: String, + pub private_key_pem: String, +} + +pub fn generate_device_identity() -> Result { + let signing_key = SigningKey::generate(&mut OsRng); + let verify_key = signing_key.verifying_key(); + let private_key_der = signing_key + .to_pkcs8_der() + .map_err(|err| Error::Crypto(err.to_string()))?; + let public_key_der = verify_key + .to_public_key_der() + .map_err(|err| Error::Crypto(err.to_string()))?; + Ok(DeviceIdentity { + device_id: Uuid::new_v4().to_string(), + public_key_pem: encode_pem("PUBLIC KEY", public_key_der.as_bytes()), + private_key_pem: encode_pem("PRIVATE KEY", private_key_der.as_bytes()), + }) +} + +pub fn build_device_auth_payload_v3( + device_id: &str, + client_id: &str, + client_mode: &str, + role: &str, + scopes: &[String], + signed_at_ms: u64, + token: Option<&str>, + nonce: &str, + platform: &str, + device_family: Option<&str>, +) -> Value { + let mut payload = json!({ + "v": 3, + "deviceId": device_id, + "clientId": client_id, + "clientMode": client_mode, + "role": role, + "scopes": scopes, + "signedAtMs": signed_at_ms, + "nonce": nonce, + "platform": platform, + }); + if let Some(token) = token { + payload["token"] = Value::String(token.to_string()); + } + if let Some(device_family) = device_family { + payload["deviceFamily"] = Value::String(device_family.to_string()); + } + payload +} + +pub fn sign_device_payload(private_key_pem: &str, payload: &Value) -> Result { + let private_key_der = decode_pem("PRIVATE KEY", private_key_pem)?; + let signing_key = SigningKey::from_pkcs8_der(&private_key_der) + .map_err(|err| Error::Crypto(err.to_string()))?; + let payload_bytes = serde_json::to_vec(payload)?; + let signature = signing_key.sign(&payload_bytes); + Ok(URL_SAFE_NO_PAD.encode(signature.to_bytes())) +} + +fn encode_pem(label: &str, der: &[u8]) -> String { + let body = base64::engine::general_purpose::STANDARD.encode(der); + let mut pem = String::new(); + pem.push_str(&format!("-----BEGIN {label}-----\n")); + for chunk in body.as_bytes().chunks(64) { + pem.push_str(&String::from_utf8_lossy(chunk)); + pem.push('\n'); + } + pem.push_str(&format!("-----END {label}-----\n")); + pem +} + +fn decode_pem(label: &str, pem: &str) -> Result, Error> { + let begin = format!("-----BEGIN {label}-----"); + let end = format!("-----END {label}-----"); + let body = pem + .lines() + .filter(|line| *line != begin && *line != end) + .collect::(); + base64::engine::general_purpose::STANDARD + .decode(body) + .map_err(|err| Error::Crypto(err.to_string())) +} diff --git a/openclaw-gateway-client/src/lib.rs b/openclaw-gateway-client/src/lib.rs new file mode 100644 index 00000000..99d0c5dc --- /dev/null +++ b/openclaw-gateway-client/src/lib.rs @@ -0,0 +1,7 @@ +pub mod error; +pub mod auth_store; +pub mod client; +pub mod identity; +pub mod node; +pub mod protocol; +pub mod tls; diff --git a/openclaw-gateway-client/src/node.rs b/openclaw-gateway-client/src/node.rs new file mode 100644 index 00000000..badaa64f --- /dev/null +++ b/openclaw-gateway-client/src/node.rs @@ -0,0 +1,76 @@ +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +use crate::client::GatewayClientHandle; +use crate::error::Error; + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct NodeInvokeRequest { + pub id: String, + pub node_id: String, + pub command: String, + #[serde(default)] + pub params: Option, + #[serde(default)] + pub timeout_ms: Option, +} + +#[derive(Debug, Clone)] +pub struct NodeClient { + handle: GatewayClientHandle, +} + +impl NodeClient { + pub fn new(handle: GatewayClientHandle) -> Self { + Self { handle } + } + + pub async fn next_invoke(&self) -> Result { + let mut events = self.handle.subscribe_events(); + loop { + let event = events + .recv() + .await + .map_err(|_| Error::Protocol("event stream closed".into()))?; + if event.event != "node.invoke.request" { + continue; + } + let payload = event + .payload + .ok_or_else(|| Error::Protocol("node.invoke.request missing payload".into()))?; + return serde_json::from_value(payload).map_err(Error::from); + } + } + + pub async fn send_invoke_result( + &self, + request: &NodeInvokeRequest, + ok: bool, + payload: Option, + error: Option, + ) -> Result { + let mut params = serde_json::Map::new(); + params.insert("id".into(), Value::String(request.id.clone())); + params.insert("nodeId".into(), Value::String(request.node_id.clone())); + params.insert("ok".into(), Value::Bool(ok)); + if let Some(payload) = payload { + params.insert("payload".into(), payload); + } + if let Some(error) = error { + params.insert("error".into(), error); + } + self.handle + .request("node.invoke.result", Some(Value::Object(params))) + .await + } + + pub async fn send_event(&self, event: &str, payload: Option) -> Result { + let mut params = serde_json::Map::new(); + params.insert("event".into(), Value::String(event.to_string())); + if let Some(payload) = payload { + params.insert("payload".into(), payload); + } + self.handle.request("node.event", Some(Value::Object(params))).await + } +} diff --git a/openclaw-gateway-client/src/protocol.rs b/openclaw-gateway-client/src/protocol.rs new file mode 100644 index 00000000..36dc26a7 --- /dev/null +++ b/openclaw-gateway-client/src/protocol.rs @@ -0,0 +1,141 @@ +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +pub const PROTOCOL_VERSION: u32 = 3; + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "lowercase")] +pub enum GatewayFrame { + #[serde(rename = "req")] + Request(RequestFrame), + #[serde(rename = "res")] + Response(ResponseFrame), + #[serde(rename = "event")] + Event(EventFrame), +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct RequestFrame { + pub id: String, + pub method: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub params: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ResponseFrame { + pub id: String, + pub ok: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub payload: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct EventFrame { + pub event: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub payload: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub seq: Option, + #[serde(rename = "stateversion", skip_serializing_if = "Option::is_none")] + pub state_version: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ClientInfo { + pub id: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub display_name: Option, + pub version: String, + pub platform: String, + pub mode: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub instance_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub device_family: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub model_identifier: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct AuthPayload { + #[serde(skip_serializing_if = "Option::is_none")] + pub token: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub device_token: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub password: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DeviceAuth { + pub id: String, + pub public_key: String, + pub signature: String, + pub signed_at: u64, + pub nonce: String, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ConnectParams { + pub min_protocol: u32, + pub max_protocol: u32, + pub client: ClientInfo, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub caps: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub commands: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub permissions: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub path_env: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub auth: Option, + pub role: String, + #[serde(default)] + pub scopes: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub device: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub locale: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub user_agent: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PolicyInfo { + #[serde(skip_serializing_if = "Option::is_none")] + pub tick_interval_ms: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct HelloAuth { + #[serde(skip_serializing_if = "Option::is_none")] + pub device_token: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub role: Option, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub scopes: Vec, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct HelloOk { + #[serde(skip_serializing_if = "Option::is_none")] + pub server_name: Option, + pub policy: PolicyInfo, + #[serde(skip_serializing_if = "Option::is_none")] + pub auth: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub snapshot: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub canvas_host_url: Option, +} diff --git a/openclaw-gateway-client/src/tls.rs b/openclaw-gateway-client/src/tls.rs new file mode 100644 index 00000000..dedc38a8 --- /dev/null +++ b/openclaw-gateway-client/src/tls.rs @@ -0,0 +1,17 @@ +pub fn normalize_fingerprint(input: &str) -> Option { + let compact: String = input + .chars() + .filter(|ch| *ch != ':' && !ch.is_ascii_whitespace()) + .collect(); + if compact.is_empty() || compact.len() % 2 != 0 || !compact.chars().all(|ch| ch.is_ascii_hexdigit()) + { + return None; + } + let upper = compact.to_ascii_uppercase(); + let pairs = upper + .as_bytes() + .chunks(2) + .map(|chunk| String::from_utf8_lossy(chunk).to_string()) + .collect::>(); + Some(pairs.join(":")) +} diff --git a/openclaw-gateway-client/tests/auth_store.rs b/openclaw-gateway-client/tests/auth_store.rs new file mode 100644 index 00000000..f8e2d85a --- /dev/null +++ b/openclaw-gateway-client/tests/auth_store.rs @@ -0,0 +1,37 @@ +use openclaw_gateway_client::auth_store::{AuthStore, DeviceTokenRecord, FileAuthStore}; +use tempfile::tempdir; + +#[test] +fn saves_and_loads_device_token_by_device_and_role() { + let dir = tempdir().expect("tempdir"); + let store = FileAuthStore::new(dir.path().to_path_buf()); + let record = DeviceTokenRecord { + token: "device-token".into(), + scopes: vec!["operator.read".into()], + }; + + store + .store("device-1", "node", &record) + .expect("store token"); + + let loaded = store.load("device-1", "node").expect("load token"); + assert_eq!(loaded, Some(record)); +} + +#[test] +fn clears_device_token() { + let dir = tempdir().expect("tempdir"); + let store = FileAuthStore::new(dir.path().to_path_buf()); + let record = DeviceTokenRecord { + token: "device-token".into(), + scopes: vec![], + }; + + store + .store("device-1", "node", &record) + .expect("store token"); + store.clear("device-1", "node").expect("clear token"); + + let loaded = store.load("device-1", "node").expect("load token"); + assert_eq!(loaded, None); +} diff --git a/openclaw-gateway-client/tests/client_handshake.rs b/openclaw-gateway-client/tests/client_handshake.rs new file mode 100644 index 00000000..ac000961 --- /dev/null +++ b/openclaw-gateway-client/tests/client_handshake.rs @@ -0,0 +1,71 @@ +use futures::{SinkExt, StreamExt}; +use openclaw_gateway_client::client::GatewayClientBuilder; +use serde_json::{Value, json}; +use tokio::net::TcpListener; +use tokio::sync::oneshot; +use tokio_tungstenite::{accept_async, tungstenite::Message}; + +#[tokio::test] +async fn waits_for_connect_challenge_and_sends_connect_request() { + let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind test server"); + let addr = listener.local_addr().expect("local addr"); + let (tx, rx) = oneshot::channel::(); + + let server = tokio::spawn(async move { + let (stream, _) = listener.accept().await.expect("accept"); + let mut ws = accept_async(stream).await.expect("accept websocket"); + ws.send(Message::text( + json!({ + "type": "event", + "event": "connect.challenge", + "payload": { "nonce": "nonce-123" } + }) + .to_string(), + )) + .await + .expect("send challenge"); + + let message = ws.next().await.expect("message").expect("websocket message"); + let text = message.into_text().expect("text frame"); + let value: Value = serde_json::from_str(&text).expect("json request"); + let req_id = value["id"].as_str().expect("request id").to_string(); + tx.send(value).expect("capture request"); + + ws.send(Message::text( + json!({ + "type": "res", + "id": req_id, + "ok": true, + "payload": { + "serverName": "gateway.local", + "policy": { "tickIntervalMs": 30000 } + } + }) + .to_string(), + )) + .await + .expect("send hello"); + }); + + let client = GatewayClientBuilder::new(format!("ws://{}", addr)) + .client_id("openclaw-rust") + .client_mode("node") + .client_version("0.1.0") + .platform("linux") + .role("node") + .build() + .expect("build client"); + + let handle = client.start().await.expect("start client"); + let request = rx.await.expect("captured connect request"); + + assert_eq!(request["type"], "req"); + assert_eq!(request["method"], "connect"); + assert_eq!(request["params"]["role"], "node"); + assert_eq!(request["params"]["client"]["id"], "openclaw-rust"); + assert_eq!(request["params"]["minProtocol"], 3); + assert_eq!(request["params"]["maxProtocol"], 3); + + handle.shutdown().await.expect("shutdown"); + server.await.expect("server task"); +} diff --git a/openclaw-gateway-client/tests/client_rpc.rs b/openclaw-gateway-client/tests/client_rpc.rs new file mode 100644 index 00000000..9d832835 --- /dev/null +++ b/openclaw-gateway-client/tests/client_rpc.rs @@ -0,0 +1,123 @@ +use futures::{SinkExt, StreamExt}; +use openclaw_gateway_client::client::GatewayClientBuilder; +use serde_json::{Value, json}; +use tokio::net::TcpListener; +use tokio::sync::{mpsc, oneshot}; +use tokio_tungstenite::{accept_async, tungstenite::Message}; + +#[tokio::test] +async fn request_receives_matching_response_and_events_are_broadcast() { + let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind test server"); + let addr = listener.local_addr().expect("local addr"); + let (req_tx, mut req_rx) = mpsc::unbounded_channel::(); + let (ready_tx, ready_rx) = oneshot::channel::<()>(); + + let server = tokio::spawn(async move { + let (stream, _) = listener.accept().await.expect("accept"); + let mut ws = accept_async(stream).await.expect("accept websocket"); + ws.send(Message::text( + json!({ + "type": "event", + "event": "connect.challenge", + "payload": { "nonce": "nonce-123" } + }) + .to_string(), + )) + .await + .expect("send challenge"); + + let connect_text = ws + .next() + .await + .expect("connect message") + .expect("connect frame") + .into_text() + .expect("text"); + let connect_value: Value = serde_json::from_str(&connect_text).expect("connect json"); + let connect_id = connect_value["id"].as_str().expect("connect id").to_string(); + + ws.send(Message::text( + json!({ + "type": "res", + "id": connect_id, + "ok": true, + "payload": { + "serverName": "gateway.local", + "policy": { "tickIntervalMs": 30000 } + } + }) + .to_string(), + )) + .await + .expect("send connect response"); + + ready_tx.send(()).expect("ready"); + + let request_text = ws + .next() + .await + .expect("rpc message") + .expect("rpc frame") + .into_text() + .expect("text"); + let request_value: Value = serde_json::from_str(&request_text).expect("rpc json"); + req_tx.send(request_value.clone()).expect("request capture"); + let req_id = request_value["id"].as_str().expect("rpc id").to_string(); + + ws.send(Message::text( + json!({ + "type": "event", + "event": "test.event", + "payload": { "ok": true }, + "seq": 1 + }) + .to_string(), + )) + .await + .expect("send event"); + + ws.send(Message::text( + json!({ + "type": "res", + "id": req_id, + "ok": true, + "payload": { "result": 42 } + }) + .to_string(), + )) + .await + .expect("send rpc response"); + }); + + let client = GatewayClientBuilder::new(format!("ws://{}", addr)) + .client_id("openclaw-rust") + .client_mode("node") + .client_version("0.1.0") + .platform("linux") + .role("node") + .build() + .expect("build client"); + + let handle = client.start().await.expect("start client"); + ready_rx.await.expect("handshake ready"); + + let mut events = handle.subscribe_events(); + let response = handle + .request("debug.echo", Some(json!({ "hello": "world" }))) + .await + .expect("rpc response"); + + assert_eq!(response, json!({ "result": 42 })); + + let sent_request = req_rx.recv().await.expect("captured request"); + assert_eq!(sent_request["type"], "req"); + assert_eq!(sent_request["method"], "debug.echo"); + assert_eq!(sent_request["params"], json!({ "hello": "world" })); + + let event = events.recv().await.expect("event"); + assert_eq!(event.event, "test.event"); + assert_eq!(event.payload, Some(json!({ "ok": true }))); + + handle.shutdown().await.expect("shutdown"); + server.await.expect("server task"); +} diff --git a/openclaw-gateway-client/tests/connect_payload.rs b/openclaw-gateway-client/tests/connect_payload.rs new file mode 100644 index 00000000..ac755518 --- /dev/null +++ b/openclaw-gateway-client/tests/connect_payload.rs @@ -0,0 +1,157 @@ +use openclaw_gateway_client::protocol::{ + AuthPayload, ClientInfo, ConnectParams, DeviceAuth, EventFrame, GatewayFrame, HelloOk, + PolicyInfo, ResponseFrame, +}; +use pretty_assertions::assert_eq; +use serde_json::json; + +#[test] +fn deserializes_connect_challenge_event() { + let raw = json!({ + "type": "event", + "event": "connect.challenge", + "payload": { "nonce": "nonce-123" } + }); + + let frame: GatewayFrame = serde_json::from_value(raw).expect("deserialize challenge"); + + let GatewayFrame::Event(EventFrame { event, payload, .. }) = frame else { + panic!("expected event frame"); + }; + + assert_eq!(event, "connect.challenge"); + assert_eq!(payload, Some(json!({ "nonce": "nonce-123" }))); +} + +#[test] +fn serializes_connect_params() { + let params = ConnectParams { + min_protocol: 3, + max_protocol: 3, + client: ClientInfo { + id: "openclaw-rust".into(), + display_name: Some("Rust Node".into()), + version: "0.1.0".into(), + platform: "linux".into(), + mode: "node".into(), + instance_id: Some("node-1".into()), + device_family: Some("Linux".into()), + model_identifier: None, + }, + caps: vec!["system".into()], + commands: Some(vec!["system.run".into(), "system.which".into()]), + permissions: None, + path_env: Some("/usr/bin".into()), + auth: Some(AuthPayload { + token: Some("shared-token".into()), + device_token: Some("device-token".into()), + password: None, + }), + role: "node".into(), + scopes: vec![], + device: Some(DeviceAuth { + id: "device-1".into(), + public_key: "pub".into(), + signature: "sig".into(), + signed_at: 123, + nonce: "nonce-123".into(), + }), + locale: Some("en-US".into()), + user_agent: Some("OpenClawRust/0.1.0".into()), + }; + + let encoded = serde_json::to_value(¶ms).expect("serialize connect params"); + + assert_eq!( + encoded, + json!({ + "minProtocol": 3, + "maxProtocol": 3, + "client": { + "id": "openclaw-rust", + "displayName": "Rust Node", + "version": "0.1.0", + "platform": "linux", + "mode": "node", + "instanceId": "node-1", + "deviceFamily": "Linux" + }, + "caps": ["system"], + "commands": ["system.run", "system.which"], + "pathEnv": "/usr/bin", + "auth": { + "token": "shared-token", + "deviceToken": "device-token" + }, + "role": "node", + "scopes": [], + "device": { + "id": "device-1", + "publicKey": "pub", + "signature": "sig", + "signedAt": 123, + "nonce": "nonce-123" + }, + "locale": "en-US", + "userAgent": "OpenClawRust/0.1.0" + }) + ); +} + +#[test] +fn deserializes_hello_ok_response_payload() { + let raw = json!({ + "type": "res", + "id": "req-1", + "ok": true, + "payload": { + "serverName": "gateway.local", + "policy": { "tickIntervalMs": 30000 }, + "auth": { + "deviceToken": "next-device-token", + "role": "node", + "scopes": [] + }, + "snapshot": { + "health": {}, + "presence": [] + } + } + }); + + let frame: GatewayFrame = serde_json::from_value(raw).expect("deserialize hello response"); + + let GatewayFrame::Response(ResponseFrame { payload: Some(payload), .. }) = frame else { + panic!("expected response"); + }; + + let hello: HelloOk = serde_json::from_value(payload).expect("decode hello payload"); + assert_eq!(hello.server_name.as_deref(), Some("gateway.local")); + assert_eq!(hello.policy.tick_interval_ms, Some(30_000)); + assert_eq!(hello.auth.and_then(|auth| auth.device_token), Some("next-device-token".into())); +} + +#[test] +fn serializes_policy_info_in_hello_shape() { + let hello = HelloOk { + server_name: Some("gateway.local".into()), + policy: PolicyInfo { + tick_interval_ms: Some(15_000), + }, + auth: None, + snapshot: None, + canvas_host_url: None, + }; + + let encoded = serde_json::to_value(&hello).expect("serialize hello"); + + assert_eq!( + encoded, + json!({ + "serverName": "gateway.local", + "policy": { + "tickIntervalMs": 15000 + } + }) + ); +} diff --git a/openclaw-gateway-client/tests/device_identity.rs b/openclaw-gateway-client/tests/device_identity.rs new file mode 100644 index 00000000..43fd0d25 --- /dev/null +++ b/openclaw-gateway-client/tests/device_identity.rs @@ -0,0 +1,70 @@ +use openclaw_gateway_client::identity::{ + build_device_auth_payload_v3, generate_device_identity, sign_device_payload, +}; +use serde_json::json; + +#[test] +fn generates_device_identity_with_expected_shape() { + let identity = generate_device_identity().expect("generate identity"); + + assert!(!identity.device_id.trim().is_empty()); + assert!(identity.public_key_pem.contains("BEGIN PUBLIC KEY")); + assert!(identity.private_key_pem.contains("BEGIN PRIVATE KEY")); +} + +#[test] +fn builds_device_auth_payload_with_nonce() { + let payload = build_device_auth_payload_v3( + "device-1", + "openclaw-rust", + "node", + "node", + &[], + 123, + Some("token-1"), + "nonce-123", + "linux", + Some("Linux"), + ); + + assert_eq!( + payload, + json!({ + "v": 3, + "deviceId": "device-1", + "clientId": "openclaw-rust", + "clientMode": "node", + "role": "node", + "scopes": [], + "signedAtMs": 123, + "token": "token-1", + "nonce": "nonce-123", + "platform": "linux", + "deviceFamily": "Linux" + }) + ); +} + +#[test] +fn signs_payload_and_returns_base64url_signature() { + let identity = generate_device_identity().expect("generate identity"); + let payload = build_device_auth_payload_v3( + &identity.device_id, + "openclaw-rust", + "node", + "node", + &[], + 123, + None, + "nonce-123", + "linux", + None, + ); + + let signature = sign_device_payload(&identity.private_key_pem, &payload).expect("sign payload"); + + assert!(!signature.trim().is_empty()); + assert!(!signature.contains('=')); + assert!(!signature.contains('+')); + assert!(!signature.contains('/')); +} diff --git a/openclaw-gateway-client/tests/node_client.rs b/openclaw-gateway-client/tests/node_client.rs new file mode 100644 index 00000000..bf7f9c18 --- /dev/null +++ b/openclaw-gateway-client/tests/node_client.rs @@ -0,0 +1,165 @@ +use futures::{SinkExt, StreamExt}; +use openclaw_gateway_client::client::GatewayClientBuilder; +use openclaw_gateway_client::node::NodeClient; +use serde_json::{Value, json}; +use tokio::net::TcpListener; +use tokio::sync::oneshot; +use tokio_tungstenite::{accept_async, tungstenite::Message}; + +#[tokio::test] +async fn node_client_decodes_invoke_requests_and_sends_results_and_events() { + let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind test server"); + let addr = listener.local_addr().expect("local addr"); + let (capture_tx, capture_rx) = oneshot::channel::<(Value, Value)>(); + + let server = tokio::spawn(async move { + let (stream, _) = listener.accept().await.expect("accept"); + let mut ws = accept_async(stream).await.expect("accept websocket"); + ws.send(Message::text( + json!({ + "type": "event", + "event": "connect.challenge", + "payload": { "nonce": "nonce-123" } + }) + .to_string(), + )) + .await + .expect("send challenge"); + + let connect_text = ws + .next() + .await + .expect("connect message") + .expect("connect frame") + .into_text() + .expect("text"); + let connect_value: Value = serde_json::from_str(&connect_text).expect("connect json"); + let connect_id = connect_value["id"].as_str().expect("connect id").to_string(); + + ws.send(Message::text( + json!({ + "type": "res", + "id": connect_id, + "ok": true, + "payload": { + "serverName": "gateway.local", + "policy": { "tickIntervalMs": 30000 } + } + }) + .to_string(), + )) + .await + .expect("send connect response"); + + ws.send(Message::text( + json!({ + "type": "event", + "event": "node.invoke.request", + "payload": { + "id": "invoke-1", + "nodeId": "node-1", + "command": "debug.echo", + "params": { "hello": "world" }, + "timeoutMs": 5000 + } + }) + .to_string(), + )) + .await + .expect("send invoke request"); + + let invoke_result_text = ws + .next() + .await + .expect("invoke result message") + .expect("invoke result frame") + .into_text() + .expect("text"); + let invoke_result_value: Value = + serde_json::from_str(&invoke_result_text).expect("invoke result json"); + let invoke_result_id = invoke_result_value["id"] + .as_str() + .expect("invoke result id") + .to_string(); + + ws.send(Message::text( + json!({ + "type": "res", + "id": invoke_result_id, + "ok": true, + "payload": { "status": "ok" } + }) + .to_string(), + )) + .await + .expect("ack invoke result"); + + let node_event_text = ws + .next() + .await + .expect("node event message") + .expect("node event frame") + .into_text() + .expect("text"); + let node_event_value: Value = serde_json::from_str(&node_event_text).expect("node event json"); + let node_event_id = node_event_value["id"] + .as_str() + .expect("node event id") + .to_string(); + + ws.send(Message::text( + json!({ + "type": "res", + "id": node_event_id, + "ok": true, + "payload": { "status": "ok" } + }) + .to_string(), + )) + .await + .expect("ack node event"); + + capture_tx + .send((invoke_result_value, node_event_value)) + .expect("capture values"); + }); + + let client = GatewayClientBuilder::new(format!("ws://{}", addr)) + .client_id("openclaw-rust") + .client_mode("node") + .client_version("0.1.0") + .platform("linux") + .role("node") + .build() + .expect("build client"); + let handle = client.start().await.expect("start client"); + let node = NodeClient::new(handle.clone()); + + let invoke = node.next_invoke().await.expect("invoke request"); + assert_eq!(invoke.id, "invoke-1"); + assert_eq!(invoke.node_id, "node-1"); + assert_eq!(invoke.command, "debug.echo"); + assert_eq!(invoke.params, Some(json!({ "hello": "world" }))); + + node.send_invoke_result(&invoke, true, Some(json!({ "echoed": true })), None) + .await + .expect("send invoke result"); + + node.send_event("exec.finished", Some(json!({ "runId": "run-1" }))) + .await + .expect("send node event"); + + let (invoke_result, node_event) = capture_rx.await.expect("capture channel"); + assert_eq!(invoke_result["method"], "node.invoke.result"); + assert_eq!(invoke_result["params"]["id"], "invoke-1"); + assert_eq!(invoke_result["params"]["nodeId"], "node-1"); + assert_eq!(invoke_result["params"]["ok"], true); + assert_eq!(invoke_result["params"]["payload"], json!({ "echoed": true })); + + assert_eq!(node_event["method"], "node.event"); + assert_eq!(node_event["params"]["event"], "exec.finished"); + assert_eq!(node_event["params"]["payload"], json!({ "runId": "run-1" })); + + handle.shutdown().await.expect("shutdown"); + server.await.expect("server task"); +} diff --git a/openclaw-gateway-client/tests/protocol_roundtrip.rs b/openclaw-gateway-client/tests/protocol_roundtrip.rs new file mode 100644 index 00000000..25eed560 --- /dev/null +++ b/openclaw-gateway-client/tests/protocol_roundtrip.rs @@ -0,0 +1,80 @@ +use openclaw_gateway_client::protocol::{EventFrame, GatewayFrame, RequestFrame, ResponseFrame}; +use pretty_assertions::assert_eq; +use serde_json::{json, Value}; + +#[test] +fn serializes_request_frame() { + let frame = GatewayFrame::Request(RequestFrame { + id: "req-1".into(), + method: "chat.send".into(), + params: Some(json!({ "text": "hello" })), + }); + + let encoded = serde_json::to_value(&frame).expect("serialize request frame"); + + assert_eq!( + encoded, + json!({ + "type": "req", + "id": "req-1", + "method": "chat.send", + "params": { "text": "hello" } + }) + ); +} + +#[test] +fn deserializes_response_frame() { + let raw = json!({ + "type": "res", + "id": "req-1", + "ok": true, + "payload": { "status": "ok" } + }); + + let frame: GatewayFrame = serde_json::from_value(raw).expect("deserialize response frame"); + + let GatewayFrame::Response(ResponseFrame { id, ok, payload, error }) = frame else { + panic!("expected response frame"); + }; + + assert_eq!(id, "req-1"); + assert!(ok); + assert_eq!(payload, Some(json!({ "status": "ok" }))); + assert_eq!(error, None); +} + +#[test] +fn deserializes_event_frame() { + let raw = json!({ + "type": "event", + "event": "tick", + "payload": { "now": 123 }, + "seq": 7 + }); + + let frame: GatewayFrame = serde_json::from_value(raw).expect("deserialize event frame"); + + let GatewayFrame::Event(EventFrame { event, payload, seq, state_version }) = frame else { + panic!("expected event frame"); + }; + + assert_eq!(event, "tick"); + assert_eq!(payload, Some(json!({ "now": 123 }))); + assert_eq!(seq, Some(7)); + assert_eq!(state_version, None); +} + +#[test] +fn omits_absent_optional_fields() { + let frame = GatewayFrame::Event(EventFrame { + event: "tick".into(), + payload: None, + seq: None, + state_version: None, + }); + + let encoded = serde_json::to_value(&frame).expect("serialize event frame"); + + assert_eq!(encoded, Value::from(json!({ "type": "event", "event": "tick" }))); +} diff --git a/openclaw-gateway-client/tests/tls_fingerprint.rs b/openclaw-gateway-client/tests/tls_fingerprint.rs new file mode 100644 index 00000000..ff001861 --- /dev/null +++ b/openclaw-gateway-client/tests/tls_fingerprint.rs @@ -0,0 +1,38 @@ +use openclaw_gateway_client::client::GatewayClientBuilder; +use openclaw_gateway_client::tls::normalize_fingerprint; + +#[test] +fn normalizes_sha256_fingerprint_variants() { + assert_eq!( + normalize_fingerprint("AA:bb:cc"), + Some("AA:BB:CC".into()) + ); + assert_eq!( + normalize_fingerprint("aabbcc"), + Some("AA:BB:CC".into()) + ); + assert_eq!(normalize_fingerprint(""), None); +} + +#[test] +fn rejects_non_hex_fingerprint() { + assert_eq!(normalize_fingerprint("zz:11"), None); +} + +#[test] +fn fingerprint_requires_wss_url() { + let err = GatewayClientBuilder::new("ws://127.0.0.1:18789") + .client_id("openclaw-rust") + .client_mode("node") + .client_version("0.1.0") + .platform("linux") + .role("node") + .tls_fingerprint("AA:BB") + .build() + .expect_err("non-wss should reject fingerprint"); + + assert!( + err.to_string().contains("tls fingerprint requires wss"), + "unexpected error: {err}" + ); +} diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index bff4fd99..2998a5ab 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -15,13 +15,15 @@ regex = "1.10.6" reqwest = { version = "0.12", default-features = false, features = ["blocking", "json", "rustls-tls"] } serde = { version = "1.0.214", features = ["derive"] } serde_json = "1.0.133" -tauri = { version = "2.1.0", features = [] } +tauri = { version = "2.1.0", features = ["test"] } thiserror = "1.0.63" uuid = { version = "1.11.0", features = ["v4"] } chrono = { version = "0.4.38", features = ["clock"] } base64 = "0.22" ed25519-dalek = { version = "2", features = ["pkcs8", "pem"] } -tokio = { version = "1", features = ["sync", "process", "macros"] } +getrandom = "0.2" +sha2 = "0.10" +tokio = { version = "1", features = ["sync", "process", "macros", "time"] } tokio-tungstenite = { version = "0.24", features = ["rustls-tls-webpki-roots"] } futures-util = "0.3" shellexpand = "3.1" diff --git a/src-tauri/src/bridge_client.rs b/src-tauri/src/bridge_client.rs index d7f5687e..22b5bbd9 100644 --- a/src-tauri/src/bridge_client.rs +++ b/src-tauri/src/bridge_client.rs @@ -8,9 +8,9 @@ use futures_util::stream::SplitSink; use futures_util::{SinkExt, StreamExt}; use indexmap::IndexMap; use serde_json::{json, Value}; -use tauri::{AppHandle, Emitter}; +use tauri::{AppHandle, Emitter, Runtime}; use tokio::net::TcpStream; -use tokio::sync::{oneshot, Mutex}; +use tokio::sync::{broadcast, oneshot, Mutex}; use tokio_tungstenite::{connect_async, tungstenite::Message, MaybeTlsStream, WebSocketStream}; use crate::models::resolve_paths; @@ -24,7 +24,7 @@ type WsSink = SplitSink>, Message>; const NODE_COMMANDS: &[&str] = &["clawpal", "openclaw"]; /// Maximum number of pending invoke requests kept in memory. -const MAX_PENDING_INVOKES: usize = 50; +const MAX_PENDING_INVOKES: usize = 10; /// Seconds before auto-rejecting an invoke with USER_PENDING. /// Must be less than the gateway's 30s invoke timeout so the agent @@ -45,9 +45,11 @@ struct BridgeClientInner { /// commands (read_file, run_command, etc.) on the local or remote machine. /// Uses the same WebSocket port as the operator connection (18789) but with /// a different role. +#[derive(Clone)] pub struct BridgeClient { inner: Arc>>, pending_invokes: Arc>>, + invoke_events: broadcast::Sender, /// Invoke IDs that were auto-rejected with USER_PENDING after the timeout. /// These invokes remain in pending_invokes so the user can still execute them, /// but the result must be sent as a chat message (gateway discards late results). @@ -60,6 +62,7 @@ impl BridgeClient { Self { inner: Arc::new(Mutex::new(None)), pending_invokes: Arc::new(Mutex::new(IndexMap::new())), + invoke_events: broadcast::channel(64).0, expired_invokes: Arc::new(Mutex::new(HashSet::new())), credentials: Arc::new(Mutex::new(None)), } @@ -67,10 +70,10 @@ impl BridgeClient { /// Connect to the gateway as a node via WebSocket. /// Uses the same URL as the operator connection but with `role: "node"`. - pub async fn connect( + pub async fn connect( &self, url: &str, - app: AppHandle, + app: AppHandle, creds: Option, ) -> Result<(), String> { self.disconnect().await?; @@ -104,6 +107,7 @@ impl BridgeClient { // Spawn reader task let inner_ref = Arc::clone(&self.inner); let invokes_ref = Arc::clone(&self.pending_invokes); + let invoke_events = self.invoke_events.clone(); let expired_ref = Arc::clone(&self.expired_invokes); let app_clone = app.clone(); @@ -111,16 +115,26 @@ impl BridgeClient { while let Some(msg) = rx.next().await { match msg { Ok(Message::Text(text)) => { - if let Ok(frame) = serde_json::from_str::(&text) { - Self::handle_frame( - frame, - &inner_ref, - &invokes_ref, - &expired_ref, - &app_clone, - ) - .await; - } + Self::handle_message_payload( + text.as_bytes(), + &inner_ref, + &invokes_ref, + &invoke_events, + &expired_ref, + &app_clone, + ) + .await; + } + Ok(Message::Binary(bytes)) => { + Self::handle_message_payload( + &bytes, + &inner_ref, + &invokes_ref, + &invoke_events, + &expired_ref, + &app_clone, + ) + .await; } Ok(Message::Close(_)) => { let _ = app_clone.emit( @@ -252,6 +266,10 @@ impl BridgeClient { Some((val, expired)) } + pub fn subscribe_invokes(&self) -> broadcast::Receiver { + self.invoke_events.subscribe() + } + // ── Private helpers ────────────────────────────────────────────── /// Send a request and wait for the response. @@ -332,7 +350,7 @@ impl BridgeClient { } /// Perform the connect handshake as a node. - async fn do_handshake(&self, _app: &AppHandle) -> Result<(), String> { + async fn do_handshake(&self, _app: &AppHandle) -> Result<(), String> { let creds = self.credentials.lock().await.clone(); let (token, device_id, signing_key, public_key_b64) = if let Some(c) = creds { @@ -442,13 +460,31 @@ impl BridgeClient { Ok(()) } + async fn handle_message_payload( + payload: &[u8], + inner_ref: &Arc>>, + invokes_ref: &Arc>>, + invoke_events: &broadcast::Sender, + expired_ref: &Arc>>, + app: &AppHandle, + ) { + let Ok(text) = std::str::from_utf8(payload) else { + return; + }; + if let Ok(frame) = serde_json::from_str::(text) { + Self::handle_frame(frame, inner_ref, invokes_ref, invoke_events, expired_ref, app) + .await; + } + } + /// Handle a single parsed JSON frame from the gateway. - async fn handle_frame( + async fn handle_frame( frame: Value, inner_ref: &Arc>>, invokes_ref: &Arc>>, + invoke_events: &broadcast::Sender, expired_ref: &Arc>>, - app: &AppHandle, + app: &AppHandle, ) { let frame_type = frame.get("type").and_then(|v| v.as_str()).unwrap_or(""); @@ -571,7 +607,8 @@ impl BridgeClient { return; } - let _ = app.emit("doctor:invoke", invoke_payload); + let _ = app.emit("doctor:invoke", invoke_payload.clone()); + let _ = invoke_events.send(invoke_payload); // Spawn auto-reject timer: after INVOKE_AUTO_REJECT_SECS, send // USER_PENDING error so the agent knows the user is still reviewing diff --git a/src-tauri/src/commands/preferences.rs b/src-tauri/src/commands/preferences.rs index 150fb15d..92462939 100644 --- a/src-tauri/src/commands/preferences.rs +++ b/src-tauri/src/commands/preferences.rs @@ -14,6 +14,10 @@ use crate::models::{resolve_paths, OpenClawPaths}; pub struct AppPreferences { #[serde(default)] pub show_ssh_transfer_speed_ui: bool, + #[serde(default)] + pub remote_doctor_gateway_url: Option, + #[serde(default)] + pub remote_doctor_gateway_auth_token: Option, } #[derive(Debug, Clone, Serialize, Deserialize, Default)] @@ -22,6 +26,10 @@ struct StoredAppPreferences { #[serde(default)] show_ssh_transfer_speed_ui: bool, #[serde(default)] + remote_doctor_gateway_url: Option, + #[serde(default)] + remote_doctor_gateway_auth_token: Option, + #[serde(default)] show_clawpal_logs_ui: bool, #[serde(default)] show_gateway_logs_ui: bool, @@ -38,12 +46,34 @@ fn app_preferences_path(paths: &OpenClawPaths) -> std::path::PathBuf { fn app_preferences_from_stored(stored: &StoredAppPreferences) -> AppPreferences { AppPreferences { show_ssh_transfer_speed_ui: stored.show_ssh_transfer_speed_ui, + remote_doctor_gateway_url: normalize_remote_doctor_gateway_url( + stored.remote_doctor_gateway_url.clone(), + ), + remote_doctor_gateway_auth_token: normalize_remote_doctor_gateway_auth_token( + stored.remote_doctor_gateway_auth_token.clone(), + ), } } +fn normalize_remote_doctor_gateway_url(value: Option) -> Option { + value + .map(|item| item.trim().to_string()) + .filter(|item| !item.is_empty()) +} + +fn normalize_remote_doctor_gateway_auth_token(value: Option) -> Option { + value + .map(|item| item.trim().to_string()) + .filter(|item| !item.is_empty()) +} + fn load_stored_preferences_from_paths(paths: &OpenClawPaths) -> StoredAppPreferences { let path = app_preferences_path(paths); let mut prefs = read_json::(&path).unwrap_or_default(); + prefs.remote_doctor_gateway_url = + normalize_remote_doctor_gateway_url(prefs.remote_doctor_gateway_url); + prefs.remote_doctor_gateway_auth_token = + normalize_remote_doctor_gateway_auth_token(prefs.remote_doctor_gateway_auth_token); prefs.bug_report = normalize_bug_report_settings(prefs.bug_report); prefs } @@ -67,6 +97,11 @@ fn save_app_preferences_from_paths( ) -> Result<(), String> { let mut stored = load_stored_preferences_from_paths(paths); stored.show_ssh_transfer_speed_ui = prefs.show_ssh_transfer_speed_ui; + stored.remote_doctor_gateway_url = + normalize_remote_doctor_gateway_url(prefs.remote_doctor_gateway_url.clone()); + stored.remote_doctor_gateway_auth_token = normalize_remote_doctor_gateway_auth_token( + prefs.remote_doctor_gateway_auth_token.clone(), + ); save_stored_preferences_from_paths(paths, &stored) } @@ -112,6 +147,29 @@ pub fn set_ssh_transfer_speed_ui_preference(show_ui: bool) -> Result, +) -> Result { + let paths = resolve_paths(); + let mut prefs = load_app_preferences_from_paths(&paths); + prefs.remote_doctor_gateway_url = normalize_remote_doctor_gateway_url(gateway_url); + save_app_preferences_from_paths(&paths, &prefs)?; + Ok(prefs) +} + +#[tauri::command] +pub fn set_remote_doctor_gateway_auth_token_preference( + auth_token: Option, +) -> Result { + let paths = resolve_paths(); + let mut prefs = load_app_preferences_from_paths(&paths); + prefs.remote_doctor_gateway_auth_token = + normalize_remote_doctor_gateway_auth_token(auth_token); + save_app_preferences_from_paths(&paths, &prefs)?; + Ok(prefs) +} + // --------------------------------------------------------------------------- // Per-session model overrides (in-memory only) // --------------------------------------------------------------------------- @@ -200,6 +258,8 @@ mod tests { &paths, &AppPreferences { show_ssh_transfer_speed_ui: false, + remote_doctor_gateway_url: None, + remote_doctor_gateway_auth_token: None, }, ) .unwrap(); @@ -223,6 +283,8 @@ mod tests { &paths, &AppPreferences { show_ssh_transfer_speed_ui: true, + remote_doctor_gateway_url: Some("ws://doctor.example.test:18789".into()), + remote_doctor_gateway_auth_token: Some("doctor-test-token".into()), }, ) .unwrap(); @@ -241,6 +303,14 @@ mod tests { let app_prefs = load_app_preferences_from_paths(&paths); assert!(app_prefs.show_ssh_transfer_speed_ui); + assert_eq!( + app_prefs.remote_doctor_gateway_url.as_deref(), + Some("ws://doctor.example.test:18789") + ); + assert_eq!( + app_prefs.remote_doctor_gateway_auth_token.as_deref(), + Some("doctor-test-token") + ); let _ = std::fs::remove_dir_all(root); } @@ -252,6 +322,27 @@ mod tests { let app_prefs = load_app_preferences_from_paths(&paths); assert!(app_prefs.show_ssh_transfer_speed_ui); + assert!(app_prefs.remote_doctor_gateway_url.is_none()); + assert!(app_prefs.remote_doctor_gateway_auth_token.is_none()); + let _ = std::fs::remove_dir_all(root); + } + + #[test] + fn saving_remote_doctor_gateway_url_trims_blank_values() { + let (paths, root) = test_paths(); + save_app_preferences_from_paths( + &paths, + &AppPreferences { + show_ssh_transfer_speed_ui: false, + remote_doctor_gateway_url: Some(" ".into()), + remote_doctor_gateway_auth_token: Some(" ".into()), + }, + ) + .unwrap(); + + let app_prefs = load_app_preferences_from_paths(&paths); + assert!(app_prefs.remote_doctor_gateway_url.is_none()); + assert!(app_prefs.remote_doctor_gateway_auth_token.is_none()); let _ = std::fs::remove_dir_all(root); } diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index b0491a7c..77466b6a 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -55,7 +55,9 @@ use crate::commands::{ restart_gateway, restore_from_backup, rollback, run_doctor_command, run_openclaw_upgrade, set_active_clawpal_data_dir, set_active_openclaw_home, set_agent_model, set_bug_report_settings, set_global_model, set_session_model_override, - set_ssh_transfer_speed_ui_preference, setup_agent_identity, sftp_list_dir, sftp_read_file, + set_remote_doctor_gateway_auth_token_preference, set_remote_doctor_gateway_url_preference, + set_ssh_transfer_speed_ui_preference, + setup_agent_identity, sftp_list_dir, sftp_read_file, sftp_remove_file, sftp_write_file, ssh_connect, ssh_connect_with_passphrase, ssh_disconnect, ssh_exec, ssh_status, start_watchdog, stop_watchdog, test_model_profile, trigger_cron_job, uninstall_watchdog, upsert_model_profile, upsert_ssh_host, @@ -66,6 +68,7 @@ use crate::install::commands::{ }; use crate::install::session_store::InstallSessionStore; use crate::node_client::NodeClient; +use crate::remote_doctor::start_remote_doctor_repair; use crate::ssh::SshConnectionPool; pub mod access_discovery; @@ -86,6 +89,7 @@ pub mod openclaw_doc_resolver; pub mod path_fix; pub mod prompt_templates; pub mod recipe; +pub mod remote_doctor; pub mod ssh; pub fn run() { @@ -181,6 +185,8 @@ pub fn run() { repair_primary_via_rescue, set_global_model, set_agent_model, + set_remote_doctor_gateway_auth_token_preference, + set_remote_doctor_gateway_url_preference, set_ssh_transfer_speed_ui_preference, list_bindings, list_ssh_hosts, @@ -213,6 +219,7 @@ pub fn run() { remote_restart_gateway, remote_diagnose_doctor_assistant, remote_repair_doctor_assistant, + start_remote_doctor_repair, remote_get_rescue_bot_status, remote_manage_rescue_bot, remote_diagnose_primary_via_rescue, diff --git a/src-tauri/src/node_client.rs b/src-tauri/src/node_client.rs index 5f81dec9..d5077127 100644 --- a/src-tauri/src/node_client.rs +++ b/src-tauri/src/node_client.rs @@ -8,7 +8,7 @@ use ed25519_dalek::{Signer, SigningKey}; use futures_util::stream::SplitSink; use futures_util::{SinkExt, StreamExt}; use serde_json::{json, Value}; -use tauri::{AppHandle, Emitter}; +use tauri::{AppHandle, Emitter, Runtime}; use tokio::net::TcpStream; use tokio::sync::{oneshot, Mutex}; use tokio_tungstenite::{connect_async, tungstenite::Message, MaybeTlsStream, WebSocketStream}; @@ -49,6 +49,7 @@ struct NodeClientInner { pub struct NodeClient { inner: Arc>>, credentials: Arc>>, + pending_chat_final: Arc>>>, } impl NodeClient { @@ -56,13 +57,14 @@ impl NodeClient { Self { inner: Arc::new(Mutex::new(None)), credentials: Arc::new(Mutex::new(None)), + pending_chat_final: Arc::new(Mutex::new(None)), } } - pub async fn connect( + pub async fn connect( &self, url: &str, - app: AppHandle, + app: AppHandle, creds: Option, ) -> Result<(), String> { // Disconnect existing connection if any @@ -92,14 +94,28 @@ impl NodeClient { // Spawn reader task let inner_ref = Arc::clone(&self.inner); let app_clone = app.clone(); + let chat_ref = Arc::clone(&self.pending_chat_final); tokio::spawn(async move { while let Some(msg) = rx.next().await { match msg { Ok(Message::Text(text)) => { - if let Ok(frame) = serde_json::from_str::(&text) { - Self::handle_frame(frame, &inner_ref, &app_clone).await; - } + Self::handle_message_payload( + text.as_bytes(), + &inner_ref, + &chat_ref, + &app_clone, + ) + .await; + } + Ok(Message::Binary(bytes)) => { + Self::handle_message_payload( + &bytes, + &inner_ref, + &chat_ref, + &app_clone, + ) + .await; } Ok(Message::Close(_)) => { let _ = app_clone @@ -229,7 +245,71 @@ impl NodeClient { Ok(()) } - async fn do_handshake(&self, _app: &AppHandle) -> Result<(), String> { + pub async fn run_agent_request( + &self, + agent_id: &str, + session_key: &str, + message: &str, + ) -> Result { + let rx = self + .start_agent_request(agent_id, session_key, message) + .await?; + self.await_agent_final(rx).await + } + + pub async fn start_agent_request( + &self, + agent_id: &str, + session_key: &str, + message: &str, + ) -> Result, String> { + let (tx, rx) = oneshot::channel::(); + { + let mut guard = self.pending_chat_final.lock().await; + if guard.is_some() { + return Err("Another agent request is already waiting for a final response".into()); + } + *guard = Some(tx); + } + + let send_result = self + .send_request_fire( + "agent", + json!({ + "message": message, + "idempotencyKey": uuid::Uuid::new_v4().to_string(), + "agentId": agent_id, + "sessionKey": session_key, + }), + ) + .await; + + if let Err(error) = send_result { + *self.pending_chat_final.lock().await = None; + return Err(error); + } + + Ok(rx) + } + + pub async fn await_agent_final( + &self, + rx: oneshot::Receiver, + ) -> Result { + match tokio::time::timeout(std::time::Duration::from_secs(180), rx).await { + Ok(Ok(text)) => Ok(text), + Ok(Err(_)) => { + *self.pending_chat_final.lock().await = None; + Err("Agent request ended before a final chat response was received".into()) + } + Err(_) => { + *self.pending_chat_final.lock().await = None; + Err("Timed out waiting for final agent response".into()) + } + } + } + + async fn do_handshake(&self, _app: &AppHandle) -> Result<(), String> { let creds = self.credentials.lock().await.clone(); let (token, device_id, signing_key, public_key_b64) = if let Some(c) = creds { @@ -335,10 +415,25 @@ impl NodeClient { Ok(()) } - async fn handle_frame( + async fn handle_message_payload( + payload: &[u8], + inner_ref: &Arc>>, + chat_ref: &Arc>>>, + app: &AppHandle, + ) { + let Ok(text) = std::str::from_utf8(payload) else { + return; + }; + if let Ok(frame) = serde_json::from_str::(text) { + Self::handle_frame(frame, inner_ref, chat_ref, app).await; + } + } + + async fn handle_frame( frame: Value, inner_ref: &Arc>>, - app: &AppHandle, + chat_ref: &Arc>>>, + app: &AppHandle, ) { let frame_type = frame.get("type").and_then(|v| v.as_str()).unwrap_or(""); @@ -378,6 +473,9 @@ impl NodeClient { .and_then(|t| t.as_str()) .unwrap_or(""); if is_final { + if let Some(waiter) = chat_ref.lock().await.take() { + let _ = waiter.send(text.to_string()); + } let _ = app.emit("doctor:chat-final", json!({"text": text})); } else { let _ = app.emit("doctor:chat-delta", json!({"text": text})); diff --git a/src-tauri/src/remote_doctor.rs b/src-tauri/src/remote_doctor.rs new file mode 100644 index 00000000..d813559c --- /dev/null +++ b/src-tauri/src/remote_doctor.rs @@ -0,0 +1,4340 @@ +use std::fs::{create_dir_all, OpenOptions}; +use std::io::Write; +use std::path::PathBuf; +use std::process::Command; +use std::time::Instant; + +use base64::Engine; +use ed25519_dalek::pkcs8::EncodePrivateKey; +use ed25519_dalek::SigningKey; +use serde::{Deserialize, Serialize}; +use serde_json::{json, Value}; +use sha2::{Digest, Sha256}; +use tauri::{AppHandle, Emitter, Manager, Runtime, State}; +use uuid::Uuid; + +use crate::bridge_client::BridgeClient; +use crate::cli_runner::{get_active_openclaw_home_override, run_openclaw, run_openclaw_remote}; +use crate::commands::logs::log_dev; +use crate::commands::preferences::load_app_preferences_from_paths; +use crate::commands::{agent::create_agent, agent::setup_agent_identity}; +use crate::commands::{ + diagnose_primary_via_rescue, manage_rescue_bot, read_raw_config, remote_diagnose_primary_via_rescue, + remote_manage_rescue_bot, remote_read_raw_config, remote_restart_gateway, remote_write_raw_config, + restart_gateway, RescuePrimaryDiagnosisResult, +}; +use crate::config_io::read_openclaw_config; +use crate::models::resolve_paths; +use crate::node_client::{GatewayCredentials, NodeClient}; +use crate::ssh::SshConnectionPool; + +const DEFAULT_GATEWAY_HOST: &str = "127.0.0.1"; +const DEFAULT_GATEWAY_PORT: u16 = 18789; +const DEFAULT_DETECT_METHOD: &str = "doctor.get_detection_plan"; +const DEFAULT_REPAIR_METHOD: &str = "doctor.get_repair_plan"; +const MAX_REMOTE_DOCTOR_ROUNDS: usize = 50; +const REPAIR_PLAN_STALL_THRESHOLD: usize = 3; +const REMOTE_DOCTOR_AGENT_ID: &str = "clawpal-remote-doctor"; + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +enum TargetLocation { + LocalOpenclaw, + RemoteOpenclaw, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +enum PlanKind { + Detect, + Investigate, + Repair, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct PlanCommand { + argv: Vec, + timeout_sec: Option, + purpose: Option, + continue_on_failure: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct PlanResponse { + plan_id: String, + plan_kind: PlanKind, + summary: String, + #[serde(default)] + commands: Vec, + #[serde(default)] + healthy: bool, + #[serde(default)] + done: bool, + #[serde(default)] + success: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct CommandResult { + argv: Vec, + exit_code: Option, + stdout: String, + stderr: String, + duration_ms: u64, + timed_out: bool, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum RemoteDoctorProtocol { + AgentPlanner, + LegacyDoctor, + ClawpalServer, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct ClawpalServerPlanResponse { + request_id: String, + plan_id: String, + summary: String, + #[serde(default)] + steps: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct ClawpalServerPlanStep { + #[serde(rename = "type")] + step_type: String, + path: Option, + value: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteDoctorRepairResult { + mode: String, + status: String, + round: usize, + phase: String, + last_plan_kind: String, + latest_diagnosis_healthy: bool, + last_command: Option>, + session_id: String, + message: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct RemoteDoctorProgressEvent { + session_id: String, + mode: String, + round: usize, + phase: String, + line: String, + plan_kind: Option, + command: Option>, +} + +#[derive(Debug, Clone)] +struct RemoteDoctorGatewayConfig { + url: String, + auth_token_override: Option, +} + +#[derive(Debug, Clone)] +struct ConfigExcerptContext { + config_excerpt: Value, + config_excerpt_raw: Option, + config_parse_error: Option, +} + +#[derive(Debug, Clone)] +struct RepairRoundObservation { + round: usize, + step_types: Vec, + diagnosis_signature: String, + issue_summaries: Vec, +} + +impl RepairRoundObservation { + fn new(round: usize, step_types: &[String], diagnosis: &RescuePrimaryDiagnosisResult) -> Self { + let issue_summaries = diagnosis_issue_summaries(diagnosis); + let diagnosis_signature = + serde_json::to_string(&issue_summaries).unwrap_or_else(|_| "[]".to_string()); + Self { + round, + step_types: step_types.to_vec(), + diagnosis_signature, + issue_summaries, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct StoredRemoteDoctorIdentity { + version: u8, + created_at_ms: u64, + device_id: String, + private_key_pem: String, +} + +fn parse_target_location(raw: &str) -> Result { + match raw { + "local_openclaw" => Ok(TargetLocation::LocalOpenclaw), + "remote_openclaw" => Ok(TargetLocation::RemoteOpenclaw), + other => Err(format!("Unsupported target location: {other}")), + } +} + +fn remote_doctor_log_dir() -> PathBuf { + resolve_paths().clawpal_dir.join("doctor").join("remote") +} + +fn append_remote_doctor_log(session_id: &str, payload: Value) { + let dir = remote_doctor_log_dir(); + if create_dir_all(&dir).is_err() { + return; + } + let path = dir.join(format!("{session_id}.jsonl")); + let Ok(mut file) = OpenOptions::new().create(true).append(true).open(path) else { + return; + }; + let _ = writeln!(file, "{}", payload); +} + +fn emit_progress( + app: Option<&AppHandle>, + session_id: &str, + round: usize, + phase: &str, + line: impl Into, + plan_kind: Option, + command: Option>, +) { + let payload = RemoteDoctorProgressEvent { + session_id: session_id.to_string(), + mode: "remoteDoctor".into(), + round, + phase: phase.to_string(), + line: line.into(), + plan_kind: plan_kind.map(|kind| match kind { + PlanKind::Detect => "detect".into(), + PlanKind::Investigate => "investigate".into(), + PlanKind::Repair => "repair".into(), + }), + command, + }; + if let Some(app) = app { + let _ = app.emit("doctor:remote-repair-progress", payload); + } +} + +fn remote_doctor_gateway_config() -> Result { + let paths = resolve_paths(); + let app_preferences = load_app_preferences_from_paths(&paths); + if let Some(url) = app_preferences.remote_doctor_gateway_url { + return Ok(RemoteDoctorGatewayConfig { + url, + auth_token_override: app_preferences.remote_doctor_gateway_auth_token, + }); + } + let configured_port = std::fs::read_to_string(&paths.config_path) + .ok() + .and_then(|text| serde_json::from_str::(&text).ok()) + .and_then(|config| { + config + .get("gateway") + .and_then(|gateway| gateway.get("port")) + .and_then(|value| value.as_u64()) + }) + .map(|value| value as u16) + .unwrap_or(DEFAULT_GATEWAY_PORT); + Ok(RemoteDoctorGatewayConfig { + url: format!("ws://{DEFAULT_GATEWAY_HOST}:{configured_port}"), + auth_token_override: app_preferences.remote_doctor_gateway_auth_token, + }) +} + +fn remote_doctor_gateway_credentials( + auth_token_override: Option<&str>, +) -> Result, String> { + let Some(token) = auth_token_override.filter(|value| !value.trim().is_empty()) else { + return Ok(None); + }; + let identity = load_or_create_remote_doctor_identity()?; + Ok(Some(GatewayCredentials { + token: token.to_string(), + device_id: identity.device_id, + private_key_pem: identity.private_key_pem, + })) +} + +fn remote_doctor_identity_path() -> PathBuf { + resolve_paths() + .clawpal_dir + .join("remote-doctor") + .join("device-identity.json") +} + +fn load_or_create_remote_doctor_identity() -> Result { + let path = remote_doctor_identity_path(); + if let Ok(text) = std::fs::read_to_string(&path) { + if let Ok(identity) = serde_json::from_str::(&text) { + if identity.version == 1 + && !identity.device_id.trim().is_empty() + && !identity.private_key_pem.trim().is_empty() + { + return Ok(identity); + } + } + } + + let parent = path + .parent() + .ok_or("Failed to resolve remote doctor identity directory")?; + create_dir_all(parent) + .map_err(|e| format!("Failed to create remote doctor identity dir: {e}"))?; + + let mut secret = [0u8; 32]; + getrandom::getrandom(&mut secret) + .map_err(|e| format!("Failed to generate remote doctor device secret: {e}"))?; + let signing_key = SigningKey::from_bytes(&secret); + let raw_public = signing_key.verifying_key().to_bytes(); + let device_id = Sha256::digest(raw_public) + .iter() + .map(|b| format!("{b:02x}")) + .collect::(); + let private_key_pem = signing_key + .to_pkcs8_pem(Default::default()) + .map_err(|e| format!("Failed to encode remote doctor private key: {e}"))? + .to_string(); + let created_at_ms = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .map_err(|e| format!("Failed to get system time: {e}"))? + .as_millis() as u64; + let identity = StoredRemoteDoctorIdentity { + version: 1, + created_at_ms, + device_id, + private_key_pem, + }; + let text = serde_json::to_string_pretty(&identity) + .map_err(|e| format!("Failed to serialize remote doctor identity: {e}"))?; + std::fs::write(&path, format!("{text}\n")) + .map_err(|e| format!("Failed to persist remote doctor identity: {e}"))?; + Ok(identity) +} + +fn detect_method_name() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_DETECT_METHOD") + .unwrap_or_else(|_| DEFAULT_DETECT_METHOD.to_string()) +} + +fn repair_method_name() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_REPAIR_METHOD") + .unwrap_or_else(|_| DEFAULT_REPAIR_METHOD.to_string()) +} + +fn configured_remote_doctor_protocol() -> Option { + match std::env::var("CLAWPAL_REMOTE_DOCTOR_PROTOCOL") + .ok() + .as_deref() + .map(str::trim) + { + Some("agent") => Some(RemoteDoctorProtocol::AgentPlanner), + Some("legacy") | Some("legacy_doctor") => Some(RemoteDoctorProtocol::LegacyDoctor), + Some("clawpal_server") => Some(RemoteDoctorProtocol::ClawpalServer), + _ => None, + } +} + +fn default_remote_doctor_protocol() -> RemoteDoctorProtocol { + RemoteDoctorProtocol::AgentPlanner +} + +fn protocol_requires_bridge(protocol: RemoteDoctorProtocol) -> bool { + matches!(protocol, RemoteDoctorProtocol::AgentPlanner) +} + +fn protocol_runs_rescue_preflight(protocol: RemoteDoctorProtocol) -> bool { + matches!(protocol, RemoteDoctorProtocol::LegacyDoctor) +} + +fn next_agent_plan_kind(diagnosis: &RescuePrimaryDiagnosisResult) -> PlanKind { + next_agent_plan_kind_for_round(diagnosis, &[]) +} + +fn next_agent_plan_kind_for_round( + diagnosis: &RescuePrimaryDiagnosisResult, + previous_results: &[CommandResult], +) -> PlanKind { + if diagnosis + .issues + .iter() + .any(|issue| issue.code == "primary.config.unreadable") + { + if !previous_results.is_empty() { + return PlanKind::Repair; + } + PlanKind::Investigate + } else { + PlanKind::Repair + } +} + +fn remote_doctor_agent_id() -> &'static str { + REMOTE_DOCTOR_AGENT_ID +} + +fn remote_doctor_agent_session_key(session_id: &str) -> String { + format!("agent:{}:{session_id}", remote_doctor_agent_id()) +} + +fn remote_doctor_agent_workspace_files() -> [(&'static str, &'static str); 4] { + [ + ( + "AGENTS.md", + "# Remote Doctor\nUse this workspace only for ClawPal remote doctor planning sessions.\nReturn structured, operational answers.\n", + ), + ( + "BOOTSTRAP.md", + "Bootstrap is already complete for this workspace.\nDo not ask who you are or who the user is.\nUse IDENTITY.md and USER.md as the canonical identity context.\n", + ), + ( + "USER.md", + "- Name: ClawPal Desktop\n- Role: desktop repair orchestrator\n- Preferences: concise, operational, no bootstrap chatter\n", + ), + ( + "HEARTBEAT.md", + "Status: active remote-doctor planning workspace.\n", + ), + ] +} + +fn gateway_url_is_local(url: &str) -> bool { + let rest = url + .split_once("://") + .map(|(_, remainder)| remainder) + .unwrap_or(url); + let host_port = rest.split('/').next().unwrap_or(rest); + let host = host_port + .strip_prefix('[') + .and_then(|value| value.split_once(']').map(|(host, _)| host)) + .unwrap_or_else(|| host_port.split(':').next().unwrap_or(host_port)); + matches!(host, "127.0.0.1" | "localhost") +} + +fn ensure_local_remote_doctor_agent_ready() -> Result<(), String> { + let agent_id = remote_doctor_agent_id().to_string(); + if let Err(error) = create_agent(agent_id.clone(), None, Some(true)) { + if !error.contains("already exists") { + return Err(format!("Failed to create remote doctor agent: {error}")); + } + } + + setup_agent_identity( + agent_id.clone(), + "ClawPal Remote Doctor".to_string(), + None, + )?; + + let paths = resolve_paths(); + let cfg = read_openclaw_config(&paths)?; + let workspace = clawpal_core::doctor::resolve_agent_workspace_from_config(&cfg, &agent_id, None) + .map(|path| shellexpand::tilde(&path).to_string())?; + create_dir_all(&workspace) + .map_err(|error| format!("Failed to create remote doctor workspace: {error}"))?; + + for (file_name, content) in remote_doctor_agent_workspace_files() { + std::fs::write(PathBuf::from(&workspace).join(file_name), content) + .map_err(|error| format!("Failed to write remote doctor {file_name}: {error}"))?; + } + + Ok(()) +} + +async fn ensure_agent_bridge_connected( + app: &AppHandle, + bridge: &BridgeClient, + gateway_url: &str, + auth_token_override: Option<&str>, + session_id: &str, +) { + if bridge.is_connected().await { + return; + } + + let connect_result = bridge + .connect( + gateway_url, + app.clone(), + remote_doctor_gateway_credentials(auth_token_override) + .ok() + .flatten(), + ) + .await; + if let Err(error) = connect_result { + append_remote_doctor_log( + session_id, + json!({ + "event": "bridge_connect_failed", + "reason": error, + }), + ); + } +} + +async fn ensure_remote_target_connected( + pool: &SshConnectionPool, + instance_id: &str, +) -> Result<(), String> { + let candidate_ids = remote_target_host_id_candidates(instance_id); + if candidate_ids.is_empty() { + return Ok(()); + } + for candidate in &candidate_ids { + if pool.is_connected(candidate).await { + return Ok(()); + } + } + + let hosts = crate::commands::ssh::read_hosts_from_registry()?; + let host = hosts + .into_iter() + .find(|candidate| candidate_ids.iter().any(|id| id == &candidate.id)) + .ok_or_else(|| format!("No SSH host config with id: {}", candidate_ids[0]))?; + if let Some(passphrase) = host.passphrase.as_deref().filter(|value| !value.is_empty()) { + pool.connect_with_passphrase(&host, Some(passphrase)).await + } else { + pool.connect(&host).await + } +} + +fn remote_target_host_id_candidates(instance_id: &str) -> Vec { + let mut candidates = Vec::new(); + let trimmed = instance_id.trim(); + if !trimmed.is_empty() { + candidates.push(trimmed.to_string()); + } + if let Some(stripped) = trimmed.strip_prefix("ssh:").map(str::trim) { + if !stripped.is_empty() && !candidates.iter().any(|value| value == stripped) { + candidates.push(stripped.to_string()); + } + } + candidates +} + +fn primary_remote_target_host_id(instance_id: &str) -> Result { + remote_target_host_id_candidates(instance_id) + .into_iter() + .next() + .ok_or_else(|| "Remote Doctor repair requires an ssh instance id".to_string()) +} + +fn is_unknown_method_error(error: &str) -> bool { + error.contains("unknown method") + || error.contains("\"code\":\"INVALID_REQUEST\"") + || error.contains("\"code\": \"INVALID_REQUEST\"") +} + +fn result_for_completion( + session_id: &str, + round: usize, + last_plan_kind: PlanKind, + last_command: Option>, + message: &str, +) -> RemoteDoctorRepairResult { + RemoteDoctorRepairResult { + mode: "remoteDoctor".into(), + status: "completed".into(), + round, + phase: "completed".into(), + last_plan_kind: match last_plan_kind { + PlanKind::Detect => "detect".into(), + PlanKind::Investigate => "investigate".into(), + PlanKind::Repair => "repair".into(), + }, + latest_diagnosis_healthy: true, + last_command, + session_id: session_id.to_string(), + message: message.into(), + } +} + +fn result_for_completion_with_warnings( + session_id: &str, + round: usize, + last_plan_kind: PlanKind, + last_command: Option>, + message: &str, +) -> RemoteDoctorRepairResult { + RemoteDoctorRepairResult { + mode: "remoteDoctor".into(), + status: "completed_with_warnings".into(), + round, + phase: "completed".into(), + last_plan_kind: match last_plan_kind { + PlanKind::Detect => "detect".into(), + PlanKind::Investigate => "investigate".into(), + PlanKind::Repair => "repair".into(), + }, + latest_diagnosis_healthy: false, + last_command, + session_id: session_id.to_string(), + message: message.into(), + } +} + +fn diagnosis_has_only_non_auto_fixable_issues(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { + !diagnosis.issues.is_empty() && diagnosis.issues.iter().all(|issue| !issue.auto_fixable) +} + +async fn run_rescue_diagnosis( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result { + match target_location { + TargetLocation::LocalOpenclaw => diagnose_primary_via_rescue(None, None).await, + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_diagnose_primary_via_rescue(app.state::(), host_id, None, None) + .await + } + } +} + +async fn read_target_config( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result { + let raw = match target_location { + TargetLocation::LocalOpenclaw => read_raw_config()?, + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_read_raw_config(app.state::(), host_id).await? + } + }; + serde_json::from_str::(&raw).map_err(|error| format!("Failed to parse target config: {error}")) +} + +async fn read_target_config_raw( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result { + match target_location { + TargetLocation::LocalOpenclaw => read_raw_config(), + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_read_raw_config(app.state::(), host_id).await + } + } +} + +fn build_config_excerpt_context(raw: &str) -> ConfigExcerptContext { + match serde_json::from_str::(raw) { + Ok(config_excerpt) => ConfigExcerptContext { + config_excerpt, + config_excerpt_raw: None, + config_parse_error: None, + }, + Err(error) => ConfigExcerptContext { + config_excerpt: Value::Null, + config_excerpt_raw: Some(raw.to_string()), + config_parse_error: Some(format!("Failed to parse target config: {error}")), + }, + } +} + +fn config_excerpt_log_summary(context: &ConfigExcerptContext) -> Value { + json!({ + "configExcerptPresent": !context.config_excerpt.is_null(), + "configExcerptBytes": serde_json::to_string(&context.config_excerpt).ok().map(|text| text.len()).unwrap_or(0), + "configExcerptRawPresent": context.config_excerpt_raw.as_ref().map(|text| !text.trim().is_empty()).unwrap_or(false), + "configExcerptRawBytes": context.config_excerpt_raw.as_ref().map(|text| text.len()).unwrap_or(0), + "configParseError": context.config_parse_error, + }) +} + +fn empty_config_excerpt_context() -> ConfigExcerptContext { + ConfigExcerptContext { + config_excerpt: Value::Null, + config_excerpt_raw: None, + config_parse_error: None, + } +} + +fn empty_diagnosis() -> RescuePrimaryDiagnosisResult { + serde_json::from_value(json!({ + "status": "healthy", + "checkedAt": "2026-03-18T00:00:00Z", + "targetProfile": "primary", + "rescueProfile": "rescue", + "summary": { + "status": "healthy", + "headline": "Healthy", + "recommendedAction": null, + "fixableIssueCount": 0, + "selectedFixIssueIds": [] + }, + "issues": [], + "sections": [] + })) + .expect("empty diagnosis should deserialize") +} + +async fn write_target_config( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, + config: &Value, +) -> Result<(), String> { + let text = serde_json::to_string_pretty(config).map_err(|error| error.to_string())?; + let validated = clawpal_core::config::validate_config_json(&text) + .map_err(|error| format!("Invalid config after remote doctor patch: {error}"))?; + let validated_text = + serde_json::to_string_pretty(&validated).map_err(|error| error.to_string())?; + match target_location { + TargetLocation::LocalOpenclaw => { + let paths = resolve_paths(); + crate::config_io::write_text(&paths.config_path, &validated_text)?; + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_write_raw_config(app.state::(), host_id, validated_text) + .await?; + } + } + Ok(()) +} + +async fn write_target_config_raw( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, + text: &str, +) -> Result<(), String> { + let validated = clawpal_core::config::validate_config_json(text) + .map_err(|error| format!("Invalid raw config payload: {error}"))?; + let validated_text = + serde_json::to_string_pretty(&validated).map_err(|error| error.to_string())?; + match target_location { + TargetLocation::LocalOpenclaw => { + let paths = resolve_paths(); + crate::config_io::write_text(&paths.config_path, &validated_text)?; + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_write_raw_config(app.state::(), host_id, validated_text) + .await?; + } + } + Ok(()) +} + +async fn restart_target_gateway( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result<(), String> { + match target_location { + TargetLocation::LocalOpenclaw => { + restart_gateway().await?; + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_restart_gateway(app.state::(), host_id).await?; + } + } + Ok(()) +} + +fn diagnosis_is_healthy(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { + diagnosis.status == "healthy" && diagnosis.summary.status == "healthy" && diagnosis.issues.is_empty() +} + +fn diagnosis_context(diagnosis: &RescuePrimaryDiagnosisResult) -> Value { + json!({ + "status": diagnosis.status, + "summary": { + "status": diagnosis.summary.status, + "headline": diagnosis.summary.headline, + "recommendedAction": diagnosis.summary.recommended_action, + "fixableIssueCount": diagnosis.summary.fixable_issue_count, + "selectedFixIssueIds": diagnosis.summary.selected_fix_issue_ids, + }, + "issues": diagnosis.issues, + "sections": diagnosis.sections, + }) +} + +fn diagnosis_issue_summaries(diagnosis: &RescuePrimaryDiagnosisResult) -> Vec { + diagnosis + .issues + .iter() + .map(|issue| { + json!({ + "id": issue.id, + "code": issue.code, + "severity": issue.severity, + "title": issue.message, + "target": issue.source, + "autoFixable": issue.auto_fixable, + "fixHint": issue.fix_hint, + }) + }) + .collect() +} + +fn diagnosis_missing_rescue_profile(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { + diagnosis + .issues + .iter() + .any(|issue| issue.code == "rescue.profile.missing") +} + +fn diagnosis_unhealthy_rescue_gateway(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { + diagnosis + .issues + .iter() + .any(|issue| issue.code == "rescue.gateway.unhealthy") +} + +fn rescue_setup_command_result( + action: &str, + profile: &str, + configured: bool, + active: bool, + runtime_state: &str, +) -> CommandResult { + CommandResult { + argv: vec!["manage_rescue_bot".into(), action.into(), profile.into()], + exit_code: Some(0), + stdout: format!( + "configured={} active={} runtimeState={}", + configured, active, runtime_state + ), + stderr: String::new(), + duration_ms: 0, + timed_out: false, + } +} + +fn rescue_bot_manage_command_result(result: &crate::commands::RescueBotManageResult) -> CommandResult { + CommandResult { + argv: vec![ + "manage_rescue_bot".into(), + result.action.clone(), + result.profile.clone(), + ], + exit_code: Some(if result.active || result.configured { 0 } else { 1 }), + stdout: format!( + "configured={} active={} runtimeState={} rescuePort={} mainPort={} commands={}", + result.configured, + result.active, + result.runtime_state, + result.rescue_port, + result.main_port, + result.commands.len() + ), + stderr: String::new(), + duration_ms: 0, + timed_out: false, + } +} + +fn rescue_activation_diagnostic_commands(profile: &str) -> Vec> { + vec![ + vec!["manage_rescue_bot".into(), "status".into(), profile.into()], + vec![ + "openclaw".into(), + "--profile".into(), + profile.into(), + "gateway".into(), + "status".into(), + ], + vec![ + "openclaw".into(), + "--profile".into(), + profile.into(), + "config".into(), + "get".into(), + "gateway.port".into(), + "--json".into(), + ], + ] +} + +fn rescue_activation_error_message( + profile: &str, + configured: bool, + runtime_state: &str, + suggested_checks: &[String], +) -> String { + let suffix = if suggested_checks.is_empty() { + String::new() + } else { + format!(" Suggested checks: {}.", suggested_checks.join("; ")) + }; + format!( + "Rescue profile \"{}\" was {} but did not become active (runtime state: {}).", + profile, + if configured { "configured" } else { "not configured" }, + runtime_state + ) + &suffix +} + +async fn execute_rescue_activation_diagnostic_command( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> CommandResult { + let started = Instant::now(); + if argv.first().map(String::as_str) == Some("manage_rescue_bot") + && argv.get(1).map(String::as_str) == Some("status") + { + let profile = argv + .get(2) + .map(String::as_str) + .filter(|value| !value.trim().is_empty()) + .unwrap_or("rescue"); + let result = match target_location { + TargetLocation::LocalOpenclaw => { + manage_rescue_bot("status".into(), Some(profile.to_string()), None).await + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id); + match host_id { + Ok(host_id) => { + remote_manage_rescue_bot( + app.state::(), + host_id, + "status".into(), + Some(profile.to_string()), + None, + ) + .await + } + Err(error) => Err(error), + } + } + }; + return match result { + Ok(result) => { + let mut command_result = rescue_bot_manage_command_result(&result); + command_result.duration_ms = started.elapsed().as_millis() as u64; + command_result + } + Err(error) => CommandResult { + argv: argv.to_vec(), + exit_code: Some(1), + stdout: String::new(), + stderr: error, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + }, + }; + } + + match execute_command(&app.state::(), target_location, instance_id, argv).await { + Ok(result) => result, + Err(error) => CommandResult { + argv: argv.to_vec(), + exit_code: Some(1), + stdout: String::new(), + stderr: error, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + }, + } +} + +async fn collect_rescue_activation_failure_diagnostics( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, + profile: &str, +) -> Vec { + let mut results = Vec::new(); + for argv in rescue_activation_diagnostic_commands(profile) { + results.push(execute_rescue_activation_diagnostic_command(app, target_location, instance_id, &argv).await); + } + results +} + +struct RescueActivationFailure { + message: String, + activation_result: CommandResult, + diagnostics: Vec, +} + +async fn ensure_rescue_profile_ready( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result { + let started = Instant::now(); + let result = match target_location { + TargetLocation::LocalOpenclaw => manage_rescue_bot("activate".into(), Some("rescue".into()), None) + .await + .map_err(|error| RescueActivationFailure { + message: error, + activation_result: rescue_setup_command_result("activate", "rescue", false, false, "activation_failed"), + diagnostics: Vec::new(), + })?, + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id).map_err(|error| RescueActivationFailure { + message: error, + activation_result: rescue_setup_command_result("activate", "rescue", false, false, "activation_failed"), + diagnostics: Vec::new(), + })?; + remote_manage_rescue_bot( + app.state::(), + host_id, + "activate".into(), + Some("rescue".into()), + None, + ) + .await + .map_err(|error| RescueActivationFailure { + message: error, + activation_result: rescue_setup_command_result("activate", "rescue", false, false, "activation_failed"), + diagnostics: Vec::new(), + })? + } + }; + let mut command_result = rescue_setup_command_result( + &result.action, + &result.profile, + result.configured, + result.active, + &result.runtime_state, + ); + command_result.duration_ms = started.elapsed().as_millis() as u64; + if !result.active { + let diagnostics = + collect_rescue_activation_failure_diagnostics(app, target_location, instance_id, &result.profile).await; + let suggested_checks = diagnostics + .iter() + .map(|result| result.argv.join(" ")) + .collect::>(); + return Err(RescueActivationFailure { + message: rescue_activation_error_message( + &result.profile, + result.configured, + &result.runtime_state, + &suggested_checks, + ), + activation_result: command_result, + diagnostics, + }); + } + Ok(command_result) +} + +async fn repair_rescue_gateway_if_needed( + app: &AppHandle, + session_id: &str, + round: usize, + target_location: TargetLocation, + instance_id: &str, + diagnosis: &mut RescuePrimaryDiagnosisResult, +) -> Result<(), String> { + if !(diagnosis_missing_rescue_profile(diagnosis) || diagnosis_unhealthy_rescue_gateway(diagnosis)) { + return Ok(()); + } + + emit_progress( + Some(app), + session_id, + round, + "preparing_rescue", + "Activating rescue profile before requesting remote repair plan", + Some(PlanKind::Repair), + None, + ); + let setup_result = match ensure_rescue_profile_ready(app, target_location, instance_id).await { + Ok(setup_result) => setup_result, + Err(failure) => { + append_remote_doctor_log( + session_id, + json!({ + "event": "rescue_profile_activation", + "round": round, + "result": failure.activation_result, + "status": "failed", + }), + ); + append_remote_doctor_log( + session_id, + json!({ + "event": "rescue_activation_diagnosis", + "round": round, + "checks": failure.diagnostics, + }), + ); + return Err(failure.message); + } + }; + append_remote_doctor_log( + session_id, + json!({ + "event": "rescue_profile_activation", + "round": round, + "result": setup_result, + }), + ); + *diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "after_rescue_activation", round, diagnosis); + Ok(()) +} + +fn append_diagnosis_log(session_id: &str, stage: &str, round: usize, diagnosis: &RescuePrimaryDiagnosisResult) { + append_remote_doctor_log( + session_id, + json!({ + "event": "diagnosis_result", + "stage": stage, + "round": round, + "status": diagnosis.status, + "summaryStatus": diagnosis.summary.status, + "headline": diagnosis.summary.headline, + "recommendedAction": diagnosis.summary.recommended_action, + "issueCount": diagnosis.issues.len(), + "issues": diagnosis_issue_summaries(diagnosis), + }), + ); +} + +fn clawpal_server_step_type_summary(steps: &[ClawpalServerPlanStep]) -> Value { + let mut counts = serde_json::Map::new(); + for step in steps { + let entry = counts + .entry(step.step_type.clone()) + .or_insert_with(|| Value::from(0_u64)); + let next = entry.as_u64().unwrap_or(0) + 1; + *entry = Value::from(next); + } + Value::Object(counts) +} + +fn repair_plan_stalled(observations: &[RepairRoundObservation], threshold: usize) -> bool { + if observations.len() < threshold { + return false; + } + let recent = &observations[observations.len() - threshold..]; + let Some(first) = recent.first() else { + return false; + }; + !first.issue_summaries.is_empty() + && recent.iter().all(|entry| { + entry.step_types.len() == 1 + && entry.step_types[0] == "doctorRediagnose" + && entry.diagnosis_signature == first.diagnosis_signature + }) +} + +fn round_limit_error_message( + diagnosis: &RescuePrimaryDiagnosisResult, + last_step_types: &[String], +) -> String { + let issue_summary = serde_json::to_string(&diagnosis_issue_summaries(diagnosis)) + .unwrap_or_else(|_| "[]".to_string()); + let step_summary = if last_step_types.is_empty() { + "[]".to_string() + } else { + serde_json::to_string(last_step_types).unwrap_or_else(|_| "[]".to_string()) + }; + format!( + "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean rescue diagnosis result. Last issues: {issue_summary}. Last repair step types: {step_summary}." + ) +} + +fn stalled_plan_error_message(observation: &RepairRoundObservation) -> String { + let issue_summary = + serde_json::to_string(&observation.issue_summaries).unwrap_or_else(|_| "[]".to_string()); + let step_summary = + serde_json::to_string(&observation.step_types).unwrap_or_else(|_| "[]".to_string()); + format!( + "Remote Doctor did not return actionable repair steps by round {} after {} repeated rounds. Last issues: {}. Last repair step types: {}.", + observation.round, + REPAIR_PLAN_STALL_THRESHOLD, + issue_summary, + step_summary + ) +} + +fn ensure_object(value: &mut Value) -> Result<&mut serde_json::Map, String> { + if !value.is_object() { + *value = json!({}); + } + value + .as_object_mut() + .ok_or_else(|| "Expected object while applying remote doctor config step".to_string()) +} + +fn apply_config_set(root: &mut Value, path: &str, value: Value) -> Result<(), String> { + let segments = path.split('.').filter(|segment| !segment.trim().is_empty()).collect::>(); + if segments.is_empty() { + return Err("Config set path cannot be empty".into()); + } + let mut cursor = root; + for segment in &segments[..segments.len() - 1] { + let object = ensure_object(cursor)?; + cursor = object.entry((*segment).to_string()).or_insert_with(|| json!({})); + } + let object = ensure_object(cursor)?; + object.insert(segments[segments.len() - 1].to_string(), value); + Ok(()) +} + +fn apply_config_unset(root: &mut Value, path: &str) -> Result<(), String> { + let segments = path.split('.').filter(|segment| !segment.trim().is_empty()).collect::>(); + if segments.is_empty() { + return Err("Config unset path cannot be empty".into()); + } + let mut cursor = root; + for segment in &segments[..segments.len() - 1] { + let Some(next) = cursor.as_object_mut().and_then(|object| object.get_mut(*segment)) else { + return Ok(()); + }; + cursor = next; + } + if let Some(object) = cursor.as_object_mut() { + object.remove(segments[segments.len() - 1]); + } + Ok(()) +} + +fn extract_json_block(text: &str) -> Option<&str> { + clawpal_core::doctor::extract_json_from_output(text) +} + +fn build_agent_plan_prompt( + kind: PlanKind, + session_id: &str, + round: usize, + target_location: TargetLocation, + instance_id: &str, + diagnosis: &RescuePrimaryDiagnosisResult, + config_context: &ConfigExcerptContext, + previous_results: &[CommandResult], +) -> String { + let kind_label = match kind { + PlanKind::Detect => "detection", + PlanKind::Investigate => "investigation", + PlanKind::Repair => "repair", + }; + let target_label = match target_location { + TargetLocation::LocalOpenclaw => "local_openclaw", + TargetLocation::RemoteOpenclaw => "remote_openclaw", + }; + let diagnosis_json = + serde_json::to_string_pretty(&diagnosis_context(diagnosis)).unwrap_or_else(|_| "{}".into()); + let config_context_json = serde_json::to_string_pretty(&json!({ + "configExcerpt": config_context.config_excerpt, + "configExcerptRaw": config_context.config_excerpt_raw, + "configParseError": config_context.config_parse_error, + })) + .unwrap_or_else(|_| "{}".into()); + let previous_results_json = + serde_json::to_string_pretty(previous_results).unwrap_or_else(|_| "[]".into()); + let phase_rules = match kind { + PlanKind::Detect => "For detection plans, gather only the commands needed to confirm current state. Set healthy=true and done=true only when no issue remains.", + PlanKind::Investigate => "For investigation plans, return read-only diagnosis steps only. Do not modify files, delete files, overwrite config, or restart services. Prefer commands that inspect, validate, backup, or print evidence for why the config is unreadable. Do not run follow/tail commands, streaming log readers, or any unbounded command; every investigation command must be bounded and return promptly. Do not use heredocs, multiline scripts, or commands that wait on stdin. Prefer single-line commands over shell scripting.", + PlanKind::Repair => "For repair plans, return the minimal safe repair commands. Reference prior investigation evidence when config is unreadable. Back up the file before changing it and include validation/rediagnosis steps as needed. Do not invent OpenClaw subcommands. Use only the verified OpenClaw commands listed below or the `clawpal doctor ...` tools. Do not use `openclaw auth ...` commands. Do not use `openclaw doctor --json`; use `clawpal doctor probe-openclaw` or `clawpal doctor exec --tool doctor` instead. Do not use heredocs, multiline scripts, or commands that wait on stdin.", + }; + format!( + "Identity bootstrap for this session:\n\ +- Your name: ClawPal Remote Doctor\n\ +- Your creature: maintenance daemon\n\ +- Your vibe: direct, terse, operational\n\ +- Your emoji: none\n\ +- The user is: ClawPal desktop app\n\ +- The user timezone is: Asia/Shanghai\n\ +- Do not ask identity/bootstrap questions.\n\ +- Do not ask who you are or who the user is.\n\ +- Do not modify IDENTITY.md, USER.md, or workspace bootstrap files.\n\ +\n\ +You are ClawPal Remote Doctor planner.\n\ +Return ONLY one JSON object and no markdown.\n\ +Task: produce the next {kind_label} plan for OpenClaw.\n\ +Session: {session_id}\n\ +Round: {round}\n\ +Target location: {target_label}\n\ +Instance id: {instance_id}\n\ +Diagnosis JSON:\n{diagnosis_json}\n\n\ +Config context JSON:\n{config_context_json}\n\n\ +Previous command results JSON:\n{previous_results_json}\n\n\ +Available gateway tools:\n\ +- `clawpal doctor probe-openclaw`\n\ +- `clawpal doctor config-read [path]`\n\ +- `clawpal doctor config-read-raw`\n\ +- `clawpal doctor config-upsert `\n\ +- `clawpal doctor config-delete `\n\ +- `clawpal doctor config-write-raw-base64 `\n\ +- `clawpal doctor exec --tool [--args ]`\n\ +- Verified direct OpenClaw commands only:\n\ + - `openclaw --version`\n\ + - `openclaw gateway status`\n\ +You may invoke these tools before answering when you need fresh diagnostics or config state.\n\ +If you already have enough information, return the JSON plan directly.\n\n\ +Return this exact JSON schema:\n\ +{{\n \"planId\": \"string\",\n \"planKind\": \"{kind}\",\n \"summary\": \"string\",\n \"commands\": [{{\"argv\": [\"cmd\"], \"timeoutSec\": 60, \"purpose\": \"why\", \"continueOnFailure\": false}}],\n \"healthy\": false,\n \"done\": false,\n \"success\": false\n}}\n\ +Rules:\n\ +- {phase_rules}\n\ +- For repair plans, return shell/openclaw commands in commands.\n\ +- Keep commands empty when no command is needed.\n\ +- Output valid JSON only.", + kind = match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + } + ) +} + +fn parse_agent_plan_response(kind: PlanKind, text: &str) -> Result { + let json_block = extract_json_block(text) + .ok_or_else(|| format!("Remote doctor agent did not return JSON: {text}"))?; + let value: Value = serde_json::from_str(json_block) + .map_err(|error| format!("Failed to parse remote doctor agent JSON: {error}"))?; + parse_plan_response(kind, value) +} + +fn parse_invoke_argv(command: &str, args: &Value) -> Result, String> { + if let Some(argv) = args.get("argv").and_then(Value::as_array) { + let parsed = argv + .iter() + .map(|value| { + value + .as_str() + .map(str::to_string) + .ok_or_else(|| "invoke argv entries must be strings".to_string()) + }) + .collect::, _>>()?; + if parsed.is_empty() { + return Err("invoke argv cannot be empty".into()); + } + return Ok(parsed); + } + + let arg_string = args + .get("args") + .and_then(Value::as_str) + .or_else(|| args.get("command").and_then(Value::as_str)) + .unwrap_or(""); + let mut parsed = if arg_string.trim().is_empty() { + Vec::new() + } else { + shell_words::split(arg_string) + .map_err(|error| format!("Failed to parse invoke args: {error}"))? + }; + if parsed.first().map(String::as_str) != Some(command) { + parsed.insert(0, command.to_string()); + } + Ok(parsed) +} + +async fn execute_clawpal_command( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + match argv.get(1).map(String::as_str) { + Some("doctor") => execute_clawpal_doctor_command(app, pool, target_location, instance_id, argv).await, + other => Err(format!("Unsupported clawpal command in remote doctor agent session: {:?}", other)), + } +} + +async fn execute_clawpal_doctor_command( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + match argv.get(2).map(String::as_str) { + Some("probe-openclaw") => { + let version_result = execute_command( + pool, + target_location, + instance_id, + &["openclaw".into(), "--version".into()], + ) + .await?; + let which_result = match target_location { + TargetLocation::LocalOpenclaw => execute_command( + pool, + target_location, + instance_id, + &["sh".into(), "-lc".into(), "command -v openclaw || true".into()], + ) + .await?, + TargetLocation::RemoteOpenclaw => execute_command( + pool, + target_location, + instance_id, + &["sh".into(), "-lc".into(), "command -v openclaw || true".into()], + ) + .await?, + }; + Ok(json!({ + "ok": version_result.exit_code == Some(0), + "version": version_result.stdout.trim(), + "openclawPath": which_result.stdout.trim(), + })) + } + Some("config-read") => { + let maybe_path = argv.get(3).map(String::as_str).filter(|value| !value.starts_with("--")); + let raw = read_target_config_raw(app, target_location, instance_id).await?; + config_read_response(&raw, maybe_path) + } + Some("config-read-raw") => { + let raw = read_target_config_raw(app, target_location, instance_id).await?; + Ok(json!({ + "raw": raw, + })) + } + Some("config-delete") => { + let path = argv.get(3).ok_or("clawpal doctor config-delete requires a path")?; + let mut config = read_target_config(app, target_location, instance_id).await?; + apply_config_unset(&mut config, path)?; + write_target_config(app, target_location, instance_id, &config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + Ok(json!({ "deleted": true, "path": path })) + } + Some("config-write-raw-base64") => { + let encoded = argv + .get(3) + .ok_or("clawpal doctor config-write-raw-base64 requires a base64 payload")?; + let decoded = decode_base64_config_payload(encoded)?; + write_target_config_raw(app, target_location, instance_id, &decoded).await?; + restart_target_gateway(app, target_location, instance_id).await?; + Ok(json!({ + "written": true, + "bytes": decoded.len(), + })) + } + Some("config-upsert") => { + let path = argv.get(3).ok_or("clawpal doctor config-upsert requires a path")?; + let value_raw = argv.get(4).ok_or("clawpal doctor config-upsert requires a value")?; + let value: Value = serde_json::from_str(value_raw) + .map_err(|error| format!("Invalid JSON value for config-upsert: {error}"))?; + let mut config = read_target_config(app, target_location, instance_id).await?; + apply_config_set(&mut config, path, value)?; + write_target_config(app, target_location, instance_id, &config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + Ok(json!({ "upserted": true, "path": path })) + } + Some("exec") => { + let tool_idx = argv + .iter() + .position(|part| part == "--tool") + .ok_or("clawpal doctor exec requires --tool")?; + let tool = argv.get(tool_idx + 1).ok_or("clawpal doctor exec missing tool name")?; + let args_idx = argv.iter().position(|part| part == "--args"); + let mut exec_argv = vec![tool.clone()]; + if let Some(index) = args_idx { + if let Some(arg_string) = argv.get(index + 1) { + exec_argv.extend( + shell_words::split(arg_string) + .map_err(|error| format!("Failed to parse clawpal doctor exec args: {error}"))?, + ); + } + } + let result = execute_command(pool, target_location, instance_id, &exec_argv).await?; + Ok(json!({ + "argv": result.argv, + "exitCode": result.exit_code, + "stdout": result.stdout, + "stderr": result.stderr, + })) + } + other => Err(format!( + "Unsupported clawpal doctor subcommand in remote doctor agent session: {:?}", + other + )), + } +} + +fn config_read_response(raw: &str, path: Option<&str>) -> Result { + let context = build_config_excerpt_context(raw); + if let Some(parse_error) = context.config_parse_error { + return Ok(json!({ + "value": Value::Null, + "path": path, + "raw": context.config_excerpt_raw.unwrap_or_else(|| raw.to_string()), + "parseError": parse_error, + })); + } + + let value = if let Some(path) = path { + clawpal_core::doctor::select_json_value_from_str( + &serde_json::to_string_pretty(&context.config_excerpt).unwrap_or_else(|_| "{}".into()), + Some(path), + "remote doctor config", + )? + } else { + context.config_excerpt + }; + + Ok(json!({ + "value": value, + "path": path, + })) +} + +fn decode_base64_config_payload(encoded: &str) -> Result { + let bytes = base64::engine::general_purpose::STANDARD + .decode(encoded.trim()) + .map_err(|error| format!("Failed to decode base64 config payload: {error}"))?; + String::from_utf8(bytes) + .map_err(|error| format!("Base64 config payload is not valid UTF-8: {error}")) +} + +async fn execute_invoke_payload( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + payload: &Value, +) -> Result { + let command = payload + .get("command") + .and_then(Value::as_str) + .ok_or("invoke payload missing command")?; + let args = payload.get("args").cloned().unwrap_or(Value::Null); + let argv = parse_invoke_argv(command, &args)?; + match command { + "openclaw" => { + let result = execute_command(pool, target_location, instance_id, &argv).await?; + Ok(json!({ + "argv": result.argv, + "exitCode": result.exit_code, + "stdout": result.stdout, + "stderr": result.stderr, + })) + } + "clawpal" => execute_clawpal_command(app, pool, target_location, instance_id, &argv).await, + other => Err(format!("Unsupported invoke command in remote doctor agent session: {other}")), + } +} + +async fn run_agent_request_with_bridge( + app: &AppHandle, + client: &NodeClient, + bridge: &BridgeClient, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + agent_id: &str, + session_key: &str, + message: &str, +) -> Result { + let final_rx = client + .start_agent_request(agent_id, session_key, message) + .await?; + let mut invokes = bridge.subscribe_invokes(); + let final_future = async move { + final_rx + .await + .map_err(|_| "Agent request ended before a final chat response was received".to_string()) + }; + tokio::pin!(final_future); + + loop { + tokio::select! { + result = &mut final_future => { + return result; + } + event = invokes.recv() => { + let payload = match event { + Ok(payload) => payload, + Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => { + continue; + } + Err(tokio::sync::broadcast::error::RecvError::Closed) => { + return Err("Bridge invoke stream closed during agent request".into()); + } + }; + let invoke_id = payload.get("id").and_then(Value::as_str).unwrap_or("").to_string(); + let node_id = payload.get("nodeId").and_then(Value::as_str).unwrap_or("").to_string(); + let result = execute_invoke_payload(app, pool, target_location, instance_id, &payload).await; + match result { + Ok(value) => { + bridge.send_invoke_result(&invoke_id, &node_id, value).await?; + } + Err(error) => { + bridge.send_invoke_error(&invoke_id, &node_id, "EXEC_ERROR", &error).await?; + } + } + let _ = bridge.take_invoke(&invoke_id).await; + } + } + } +} + +fn shell_escape(value: &str) -> String { + format!("'{}'", value.replace('\'', "'\\''")) +} + +fn build_shell_command(argv: &[String]) -> String { + argv.iter() + .map(|part| shell_escape(part)) + .collect::>() + .join(" ") +} + +async fn execute_command( + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + let started = Instant::now(); + if argv.is_empty() { + return Err("Plan command argv cannot be empty".into()); + } + let result = match target_location { + TargetLocation::LocalOpenclaw => { + if argv[0] == "openclaw" { + let arg_refs = argv.iter().skip(1).map(String::as_str).collect::>(); + let output = run_openclaw(&arg_refs)?; + CommandResult { + argv: argv.to_vec(), + exit_code: Some(output.exit_code), + stdout: output.stdout, + stderr: output.stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } else { + let mut command = std::process::Command::new(&argv[0]); + command.args(argv.iter().skip(1)); + if let Some(openclaw_home) = get_active_openclaw_home_override() { + command.env("OPENCLAW_HOME", openclaw_home); + } + let output = command + .output() + .map_err(|error| format!("Failed to execute local command {:?}: {error}", argv))?; + CommandResult { + argv: argv.to_vec(), + exit_code: output.status.code(), + stdout: String::from_utf8_lossy(&output.stdout).to_string(), + stderr: String::from_utf8_lossy(&output.stderr).to_string(), + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + if argv[0] == "openclaw" { + let arg_refs = argv.iter().skip(1).map(String::as_str).collect::>(); + let output = run_openclaw_remote(pool, &host_id, &arg_refs).await?; + CommandResult { + argv: argv.to_vec(), + exit_code: Some(output.exit_code), + stdout: output.stdout, + stderr: output.stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } else { + let output = pool.exec_login(&host_id, &build_shell_command(argv)).await?; + CommandResult { + argv: argv.to_vec(), + exit_code: Some(output.exit_code as i32), + stdout: output.stdout, + stderr: output.stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } + } + }; + Ok(result) +} + +fn plan_command_uses_internal_clawpal_tool(argv: &[String]) -> bool { + argv.first().map(String::as_str) == Some("clawpal") +} + +fn validate_clawpal_exec_args(argv: &[String]) -> Result<(), String> { + if argv.get(0).map(String::as_str) != Some("clawpal") + || argv.get(1).map(String::as_str) != Some("doctor") + || argv.get(2).map(String::as_str) != Some("exec") + { + return Ok(()); + } + + let args_idx = argv.iter().position(|part| part == "--args"); + let Some(index) = args_idx else { + return Ok(()); + }; + let Some(arg_string) = argv.get(index + 1) else { + return Ok(()); + }; + if arg_string.contains('\n') || arg_string.contains("<<") { + return Err(format!( + "Unsupported clawpal doctor exec args: {}. Use bounded single-line commands without heredocs or stdin-driven scripts.", + argv.join(" ") + )); + } + Ok(()) +} + +fn validate_plan_command_argv(argv: &[String]) -> Result<(), String> { + if argv.is_empty() { + return Err("Plan command argv cannot be empty".into()); + } + validate_clawpal_exec_args(argv)?; + if argv[0] != "openclaw" { + return Ok(()); + } + + let supported = argv == ["openclaw", "--version"] || argv == ["openclaw", "gateway", "status"]; + if supported { + Ok(()) + } else { + Err(format!( + "Unsupported openclaw plan command: {}", + argv.join(" ") + )) + } +} + +fn plan_command_failure_message( + kind: PlanKind, + round: usize, + argv: &[String], + error: &str, +) -> String { + let kind_label = match kind { + PlanKind::Detect => "Detect", + PlanKind::Investigate => "Investigate", + PlanKind::Repair => "Repair", + }; + format!( + "{kind_label} command failed in round {round}: {}: {error}", + argv.join(" ") + ) +} + +fn command_result_stdout(value: &Value) -> String { + value.get("stdout") + .and_then(Value::as_str) + .map(str::to_string) + .unwrap_or_else(|| serde_json::to_string_pretty(value).unwrap_or_else(|_| value.to_string())) +} + +async fn execute_plan_command( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + let started = Instant::now(); + validate_plan_command_argv(argv)?; + if plan_command_uses_internal_clawpal_tool(argv) { + let value = execute_clawpal_command(app, pool, target_location, instance_id, argv).await?; + let exit_code = value + .get("exitCode") + .and_then(Value::as_i64) + .map(|code| code as i32) + .unwrap_or(0); + let stderr = value + .get("stderr") + .and_then(Value::as_str) + .unwrap_or("") + .to_string(); + return Ok(CommandResult { + argv: argv.to_vec(), + exit_code: Some(exit_code), + stdout: command_result_stdout(&value), + stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + }); + } + + execute_command(pool, target_location, instance_id, argv).await +} + +fn parse_plan_response(kind: PlanKind, value: Value) -> Result { + let mut response: PlanResponse = serde_json::from_value(value) + .map_err(|error| format!("Failed to parse remote doctor plan response: {error}"))?; + response.plan_kind = kind; + if response.plan_id.trim().is_empty() { + response.plan_id = format!("plan-{}", Uuid::new_v4()); + } + Ok(response) +} + +async fn request_plan( + client: &NodeClient, + method: &str, + kind: PlanKind, + session_id: &str, + round: usize, + target_location: TargetLocation, + instance_id: &str, + previous_results: &[CommandResult], +) -> Result { + let response = client + .send_request( + method, + json!({ + "sessionId": session_id, + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "targetLocation": match target_location { + TargetLocation::LocalOpenclaw => "local_openclaw", + TargetLocation::RemoteOpenclaw => "remote_openclaw", + }, + "instanceId": instance_id, + "hostId": instance_id.strip_prefix("ssh:"), + "previousResults": previous_results, + }), + ) + .await?; + parse_plan_response(kind, response) +} + +async fn request_agent_plan( + app: &AppHandle, + client: &NodeClient, + bridge_client: &BridgeClient, + pool: &SshConnectionPool, + session_id: &str, + round: usize, + kind: PlanKind, + target_location: TargetLocation, + instance_id: &str, + diagnosis: &RescuePrimaryDiagnosisResult, + config_context: &ConfigExcerptContext, + previous_results: &[CommandResult], +) -> Result { + let agent_session_key = remote_doctor_agent_session_key(session_id); + let prompt = build_agent_plan_prompt( + kind, + session_id, + round, + target_location, + instance_id, + diagnosis, + config_context, + previous_results, + ); + let text = if bridge_client.is_connected().await { + run_agent_request_with_bridge( + app, + client, + bridge_client, + pool, + target_location, + instance_id, + remote_doctor_agent_id(), + &agent_session_key, + &prompt, + ) + .await? + } else { + client + .run_agent_request(remote_doctor_agent_id(), &agent_session_key, &prompt) + .await? + }; + parse_agent_plan_response(kind, &text) +} + +fn agent_plan_step_types(plan: &PlanResponse) -> Vec { + if plan.commands.is_empty() { + return vec![format!( + "plan:{}", + match plan.plan_kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + } + )]; + } + plan.commands + .iter() + .map(|command| { + command + .argv + .first() + .cloned() + .unwrap_or_else(|| "empty-command".to_string()) + }) + .collect() +} + +async fn request_clawpal_server_plan( + client: &NodeClient, + session_id: &str, + round: usize, + instance_id: &str, + target_location: TargetLocation, + diagnosis: &RescuePrimaryDiagnosisResult, + config_context: &ConfigExcerptContext, +) -> Result { + let response = client + .send_request( + "remote_repair_plan.request", + json!({ + "requestId": format!("{session_id}-round-{round}"), + "targetId": instance_id, + "targetLocation": match target_location { + TargetLocation::LocalOpenclaw => "local_openclaw", + TargetLocation::RemoteOpenclaw => "remote_openclaw", + }, + "context": { + "configExcerpt": config_context.config_excerpt, + "configExcerptRaw": config_context.config_excerpt_raw, + "configParseError": config_context.config_parse_error, + "diagnosis": diagnosis_context(diagnosis), + } + }), + ) + .await?; + serde_json::from_value::(response) + .map_err(|error| format!("Failed to parse clawpal-server plan response: {error}")) +} + +async fn report_clawpal_server_step_result( + client: &NodeClient, + plan_id: &str, + step_index: usize, + step: &ClawpalServerPlanStep, + result: &CommandResult, +) { + let _ = client + .send_request( + "remote_repair_plan.step_result", + json!({ + "planId": plan_id, + "stepIndex": step_index, + "step": step, + "result": result, + }), + ) + .await; +} + +async fn report_clawpal_server_final_result( + client: &NodeClient, + plan_id: &str, + healthy: bool, + diagnosis: &RescuePrimaryDiagnosisResult, +) { + let _ = client + .send_request( + "remote_repair_plan.final_result", + json!({ + "planId": plan_id, + "healthy": healthy, + "diagnosis": diagnosis_context(diagnosis), + }), + ) + .await; +} + +async fn run_remote_doctor_repair_loop( + app: Option<&AppHandle>, + pool: &SshConnectionPool, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, + mut request_plan_fn: F, +) -> Result +where + F: FnMut(PlanKind, usize, Vec) -> Fut, + Fut: std::future::Future>, +{ + let mut previous_results: Vec = Vec::new(); + let mut last_command: Option> = None; + let mut last_plan_kind = PlanKind::Detect; + + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + emit_progress( + app, + session_id, + round, + "planning_detect", + format!("Requesting detection plan for round {round}"), + Some(PlanKind::Detect), + None, + ); + let detect_plan = + request_plan_fn(PlanKind::Detect, round, previous_results.clone()).await?; + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_received", + "round": round, + "planKind": "detect", + "planId": detect_plan.plan_id, + "summary": detect_plan.summary, + "commandCount": detect_plan.commands.len(), + "healthy": detect_plan.healthy, + "done": detect_plan.done, + }), + ); + if detect_plan.healthy || (detect_plan.done && detect_plan.commands.is_empty()) { + return Ok(RemoteDoctorRepairResult { + mode: "remoteDoctor".into(), + status: "completed".into(), + round, + phase: "completed".into(), + last_plan_kind: match last_plan_kind { + PlanKind::Detect => "detect".into(), + PlanKind::Investigate => "investigate".into(), + PlanKind::Repair => "repair".into(), + }, + latest_diagnosis_healthy: true, + last_command, + session_id: session_id.to_string(), + message: "Remote Doctor repair completed with a healthy detection result.".into(), + }); + } + previous_results.clear(); + for command in &detect_plan.commands { + last_command = Some(command.argv.clone()); + emit_progress( + app, + session_id, + round, + "executing_detect", + format!("Running detect command: {}", command.argv.join(" ")), + Some(PlanKind::Detect), + Some(command.argv.clone()), + ); + let command_result = + execute_command(pool, target_location, instance_id, &command.argv).await?; + append_remote_doctor_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": "detect", + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + previous_results.push(command_result); + return Err(format!( + "Detect command failed in round {round}: {}", + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + + emit_progress( + app, + session_id, + round, + "planning_repair", + format!("Requesting repair plan for round {round}"), + Some(PlanKind::Repair), + None, + ); + let repair_plan = + request_plan_fn(PlanKind::Repair, round, previous_results.clone()).await?; + last_plan_kind = PlanKind::Repair; + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_received", + "round": round, + "planKind": "repair", + "planId": repair_plan.plan_id, + "summary": repair_plan.summary, + "commandCount": repair_plan.commands.len(), + "success": repair_plan.success, + "done": repair_plan.done, + }), + ); + previous_results.clear(); + for command in &repair_plan.commands { + last_command = Some(command.argv.clone()); + emit_progress( + app, + session_id, + round, + "executing_repair", + format!("Running repair command: {}", command.argv.join(" ")), + Some(PlanKind::Repair), + Some(command.argv.clone()), + ); + let command_result = + execute_command(pool, target_location, instance_id, &command.argv).await?; + append_remote_doctor_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": "repair", + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + previous_results.push(command_result); + return Err(format!( + "Repair command failed in round {round}: {}", + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + } + + append_remote_doctor_log( + session_id, + json!({ + "event": "session_complete", + "status": "failed", + "reason": "round_limit_exceeded", + }), + ); + Err(format!( + "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean detection result" + )) +} + +async fn run_clawpal_server_repair_loop( + app: &AppHandle, + client: &NodeClient, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, +) -> Result { + let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "initial", 0, &diagnosis); + if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { + repair_rescue_gateway_if_needed(app, session_id, 0, target_location, instance_id, &mut diagnosis) + .await?; + } + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + 0, + PlanKind::Detect, + None, + "Remote Doctor repair skipped because diagnosis is already healthy.", + )); + } + + let mut last_command = None; + let mut round_observations: Vec = Vec::new(); + let mut last_step_types: Vec = Vec::new(); + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + emit_progress( + Some(app), + session_id, + round, + "planning_repair", + format!("Requesting remote repair plan for round {round}"), + Some(PlanKind::Repair), + None, + ); + let config_context = + build_config_excerpt_context(&read_target_config_raw(app, target_location, instance_id).await?); + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_request_context", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "instanceId": instance_id, + "targetLocation": target_location, + "configContext": config_excerpt_log_summary(&config_context), + "diagnosisIssueCount": diagnosis.issues.len(), + "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), + }), + ); + if config_context.config_parse_error.is_some() { + append_remote_doctor_log( + session_id, + json!({ + "event": "config_recovery_context", + "round": round, + "context": config_excerpt_log_summary(&config_context), + }), + ); + } + let plan = request_clawpal_server_plan( + client, + session_id, + round, + instance_id, + target_location, + &diagnosis, + &config_context, + ) + .await?; + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_received", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "planId": plan.plan_id, + "summary": plan.summary, + "stepCount": plan.steps.len(), + "stepTypeCounts": clawpal_server_step_type_summary(&plan.steps), + }), + ); + + let mut current_config = config_context.config_excerpt.clone(); + let mut rediagnosed = false; + let mut round_step_types = Vec::new(); + for (step_index, step) in plan.steps.iter().enumerate() { + round_step_types.push(step.step_type.clone()); + let mut result = CommandResult { + argv: Vec::new(), + exit_code: Some(0), + stdout: String::new(), + stderr: String::new(), + duration_ms: 0, + timed_out: false, + }; + let started = Instant::now(); + match step.step_type.as_str() { + "configSet" => { + let path = step.path.as_deref().ok_or("configSet step missing path")?; + let value = step.value.clone().ok_or("configSet step missing value")?; + emit_progress( + Some(app), + session_id, + round, + "executing_repair", + format!("Applying config set: {path}"), + Some(PlanKind::Repair), + None, + ); + apply_config_set(&mut current_config, path, value)?; + write_target_config(app, target_location, instance_id, ¤t_config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + result.argv = vec!["configSet".into(), path.into()]; + result.stdout = format!("Updated {path}"); + } + "configUnset" => { + let path = step.path.as_deref().ok_or("configUnset step missing path")?; + emit_progress( + Some(app), + session_id, + round, + "executing_repair", + format!("Applying config unset: {path}"), + Some(PlanKind::Repair), + None, + ); + apply_config_unset(&mut current_config, path)?; + write_target_config(app, target_location, instance_id, ¤t_config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + result.argv = vec!["configUnset".into(), path.into()]; + result.stdout = format!("Removed {path}"); + } + "doctorRediagnose" => { + emit_progress( + Some(app), + session_id, + round, + "planning_detect", + format!("Running rescue diagnosis after repair plan round {round}"), + Some(PlanKind::Detect), + None, + ); + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_step_rediagnose", round, &diagnosis); + rediagnosed = true; + result.argv = vec!["doctorRediagnose".into()]; + result.stdout = format!( + "Diagnosis status={} issues={}", + diagnosis.status, + diagnosis.issues.len() + ); + } + other => { + result.exit_code = Some(1); + result.stderr = format!("Unsupported clawpal-server step type: {other}"); + } + } + result.duration_ms = started.elapsed().as_millis() as u64; + last_command = Some(result.argv.clone()); + append_remote_doctor_log( + session_id, + json!({ + "event": "command_result", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "stepIndex": step_index, + "step": step, + "result": result, + }), + ); + report_clawpal_server_step_result(client, &plan.plan_id, step_index, step, &result).await; + if result.exit_code.unwrap_or(1) != 0 { + return Err(result.stderr); + } + } + + if !rediagnosed { + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_round", round, &diagnosis); + } + if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { + repair_rescue_gateway_if_needed( + app, + session_id, + round, + target_location, + instance_id, + &mut diagnosis, + ) + .await?; + } + last_step_types = round_step_types.clone(); + round_observations.push(RepairRoundObservation::new(round, &round_step_types, &diagnosis)); + if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { + let observation = round_observations + .last() + .expect("stalled observations should contain current round"); + append_remote_doctor_log( + session_id, + json!({ + "event": "repair_plan_stalled", + "protocol": "clawpal_server", + "round": round, + "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, + "latestStepTypes": observation.step_types, + "issues": observation.issue_summaries, + }), + ); + return Err(stalled_plan_error_message(observation)); + } + let healthy = diagnosis_is_healthy(&diagnosis); + report_clawpal_server_final_result(client, &plan.plan_id, healthy, &diagnosis).await; + if healthy { + return Ok(result_for_completion( + session_id, + round, + PlanKind::Repair, + last_command, + "Remote Doctor repair completed with a healthy rescue diagnosis.", + )); + } + } + + Err(round_limit_error_message(&diagnosis, &last_step_types)) +} + +async fn run_agent_planner_repair_loop( + app: &AppHandle, + client: &NodeClient, + bridge_client: &BridgeClient, + pool: &SshConnectionPool, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, +) -> Result { + let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "initial", 0, &diagnosis); + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + 0, + PlanKind::Detect, + None, + "Remote Doctor repair skipped because diagnosis is already healthy.", + )); + } + + let mut previous_results: Vec = Vec::new(); + let mut last_command = None; + let mut last_step_types: Vec = Vec::new(); + let mut round_observations: Vec = Vec::new(); + + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + let kind = next_agent_plan_kind_for_round(&diagnosis, &previous_results); + let config_context = + build_config_excerpt_context(&read_target_config_raw(app, target_location, instance_id).await?); + let phase = match kind { + PlanKind::Detect => "planning_detect", + PlanKind::Investigate => "planning_investigate", + PlanKind::Repair => "planning_repair", + }; + let line = match kind { + PlanKind::Detect => format!("Requesting detection plan for round {round}"), + PlanKind::Investigate => format!("Requesting investigation plan for round {round}"), + PlanKind::Repair => format!("Requesting repair plan for round {round}"), + }; + emit_progress(Some(app), session_id, round, phase, line, Some(kind), None); + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_request_context", + "protocol": "agent", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "instanceId": instance_id, + "targetLocation": target_location, + "configContext": config_excerpt_log_summary(&config_context), + "diagnosisIssueCount": diagnosis.issues.len(), + "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), + }), + ); + let plan = request_agent_plan( + app, + client, + bridge_client, + pool, + session_id, + round, + kind, + target_location, + instance_id, + &diagnosis, + &config_context, + &previous_results, + ) + .await?; + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_received", + "protocol": "agent", + "round": round, + "planKind": match plan.plan_kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "planId": plan.plan_id, + "summary": plan.summary, + "commandCount": plan.commands.len(), + "healthy": plan.healthy, + "done": plan.done, + "success": plan.success, + }), + ); + previous_results.clear(); + last_step_types = agent_plan_step_types(&plan); + for command in &plan.commands { + last_command = Some(command.argv.clone()); + emit_progress( + Some(app), + session_id, + round, + match kind { + PlanKind::Detect => "executing_detect", + PlanKind::Investigate => "executing_investigate", + PlanKind::Repair => "executing_repair", + }, + format!("Running {} command: {}", match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, command.argv.join(" ")), + Some(kind), + Some(command.argv.clone()), + ); + append_remote_doctor_log( + session_id, + json!({ + "event": "command_start", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "argv": command.argv, + "timeoutSec": command.timeout_sec, + "purpose": command.purpose, + }), + ); + let command_result = match execute_plan_command( + app, + pool, + target_location, + instance_id, + &command.argv, + ) + .await + { + Ok(result) => result, + Err(error) => { + return Err(plan_command_failure_message(kind, round, &command.argv, &error)); + } + }; + append_remote_doctor_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + return Err(format!( + "{} command failed in round {round}: {}", + match kind { + PlanKind::Detect => "Detect", + PlanKind::Investigate => "Investigate", + PlanKind::Repair => "Repair", + }, + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_round", round, &diagnosis); + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + round, + kind, + last_command, + "Remote Doctor repair completed with a healthy rescue diagnosis.", + )); + } + if matches!(kind, PlanKind::Repair) + && plan.done + && plan.commands.is_empty() + && diagnosis_has_only_non_auto_fixable_issues(&diagnosis) + { + return Ok(result_for_completion_with_warnings( + session_id, + round, + kind, + last_command, + "Remote Doctor completed all safe automatic repairs. Remaining issues are non-auto-fixable warnings.", + )); + } + + round_observations.push(RepairRoundObservation::new(round, &last_step_types, &diagnosis)); + if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { + let observation = round_observations + .last() + .expect("stalled observations should contain current round"); + append_remote_doctor_log( + session_id, + json!({ + "event": "repair_plan_stalled", + "protocol": "agent", + "round": round, + "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, + "latestStepTypes": observation.step_types, + "issues": observation.issue_summaries, + }), + ); + return Err(stalled_plan_error_message(observation)); + } + } + + Err(round_limit_error_message(&diagnosis, &last_step_types)) +} + +async fn start_remote_doctor_repair_impl( + app: AppHandle, + pool: &SshConnectionPool, + instance_id: String, + target_location: String, +) -> Result { + let target_location = parse_target_location(&target_location)?; + if matches!(target_location, TargetLocation::RemoteOpenclaw) { + ensure_remote_target_connected(pool, &instance_id).await?; + } + let session_id = Uuid::new_v4().to_string(); + let gateway = remote_doctor_gateway_config()?; + let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref())?; + log_dev(format!( + "[remote_doctor] start session={} instance_id={} target_location={:?} gateway_url={} auth_token_override={}", + session_id, + instance_id, + target_location, + gateway.url, + gateway.auth_token_override.is_some() + )); + append_remote_doctor_log( + &session_id, + json!({ + "event": "session_start", + "instanceId": instance_id, + "targetLocation": target_location, + "gatewayUrl": gateway.url, + "gatewayAuthTokenOverride": gateway.auth_token_override.is_some(), + }), + ); + + let client = NodeClient::new(); + client.connect(&gateway.url, app.clone(), creds).await?; + let bridge = BridgeClient::new(); + + let forced_protocol = configured_remote_doctor_protocol(); + let active_protocol = forced_protocol.unwrap_or(default_remote_doctor_protocol()); + let pool_ref: &SshConnectionPool = pool; + let app_handle = app.clone(); + let bridge_client = bridge.clone(); + let gateway_url = gateway.url.clone(); + let gateway_auth_override = gateway.auth_token_override.clone(); + if matches!(active_protocol, RemoteDoctorProtocol::AgentPlanner) + && gateway_url_is_local(&gateway_url) + { + ensure_local_remote_doctor_agent_ready()?; + } + if protocol_requires_bridge(active_protocol) { + ensure_agent_bridge_connected( + &app, + &bridge, + &gateway_url, + gateway_auth_override.as_deref(), + &session_id, + ) + .await; + } + let result = match active_protocol { + RemoteDoctorProtocol::AgentPlanner => { + let agent = run_agent_planner_repair_loop( + &app, + &client, + &bridge_client, + pool_ref, + &session_id, + &instance_id, + target_location, + ) + .await; + + if forced_protocol.is_none() + && matches!(&agent, Err(error) if is_unknown_method_error(error)) + { + append_remote_doctor_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "agent", + "to": "legacy_doctor", + "reason": agent.as_ref().err(), + }), + ); + run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let method = match kind { + PlanKind::Detect => detect_method_name(), + PlanKind::Investigate => repair_method_name(), + PlanKind::Repair => repair_method_name(), + }; + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + async move { + request_plan( + client, + &method, + kind, + session_id, + round, + target_location, + instance_id, + &previous_results, + ) + .await + } + }, + ) + .await + } else { + agent + } + } + RemoteDoctorProtocol::LegacyDoctor => { + let legacy = run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let method = match kind { + PlanKind::Detect => detect_method_name(), + PlanKind::Investigate => repair_method_name(), + PlanKind::Repair => repair_method_name(), + }; + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + async move { + request_plan( + client, + &method, + kind, + session_id, + round, + target_location, + instance_id, + &previous_results, + ) + .await + } + }, + ) + .await; + + if forced_protocol.is_none() + && matches!(&legacy, Err(error) if is_unknown_method_error(error)) + { + append_remote_doctor_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "legacy_doctor", + "to": "clawpal_server", + "reason": legacy.as_ref().err(), + }), + ); + log_dev(format!( + "[remote_doctor] session={} protocol fallback legacy_doctor -> clawpal_server", + session_id + )); + run_clawpal_server_repair_loop(&app, &client, &session_id, &instance_id, target_location) + .await + } else { + legacy + } + } + RemoteDoctorProtocol::ClawpalServer => { + let clawpal_server = + run_clawpal_server_repair_loop(&app, &client, &session_id, &instance_id, target_location) + .await; + if forced_protocol.is_none() + && matches!(&clawpal_server, Err(error) if is_unknown_method_error(error)) + { + append_remote_doctor_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "clawpal_server", + "to": "agent", + "reason": clawpal_server.as_ref().err(), + }), + ); + let agent = run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + let app_handle = app_handle.clone(); + let bridge_client = bridge_client.clone(); + let gateway_url = gateway_url.clone(); + let gateway_auth_override = gateway_auth_override.clone(); + let empty_diagnosis = empty_diagnosis(); + let empty_config = empty_config_excerpt_context(); + async move { + ensure_agent_bridge_connected( + &app_handle, + &bridge_client, + &gateway_url, + gateway_auth_override.as_deref(), + session_id, + ) + .await; + let text = if bridge_client.is_connected().await { + run_agent_request_with_bridge( + &app_handle, + client, + &bridge_client, + pool_ref, + target_location, + instance_id, + remote_doctor_agent_id(), + &remote_doctor_agent_session_key(session_id), + &build_agent_plan_prompt( + kind, + session_id, + round, + target_location, + instance_id, + &empty_diagnosis, + &empty_config, + &previous_results, + ), + ) + .await? + } else { + client + .run_agent_request( + remote_doctor_agent_id(), + &remote_doctor_agent_session_key(session_id), + &build_agent_plan_prompt( + kind, + session_id, + round, + target_location, + instance_id, + &empty_diagnosis, + &empty_config, + &previous_results, + ), + ) + .await? + }; + parse_agent_plan_response(kind, &text) + } + }, + ) + .await; + if matches!(&agent, Err(error) if is_unknown_method_error(error)) { + append_remote_doctor_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "agent", + "to": "legacy_doctor", + "reason": agent.as_ref().err(), + }), + ); + run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let method = match kind { + PlanKind::Detect => detect_method_name(), + PlanKind::Investigate => repair_method_name(), + PlanKind::Repair => repair_method_name(), + }; + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + async move { + request_plan( + client, + &method, + kind, + session_id, + round, + target_location, + instance_id, + &previous_results, + ) + .await + } + }, + ) + .await + } else { + agent + } + } else { + clawpal_server + } + } + }; + + let _ = client.disconnect().await; + let _ = bridge.disconnect().await; + + match result { + Ok(done) => { + append_remote_doctor_log( + &session_id, + json!({ + "event": "session_complete", + "status": "completed", + "latestDiagnosisHealthy": done.latest_diagnosis_healthy, + }), + ); + Ok(done) + } + Err(error) => { + append_remote_doctor_log( + &session_id, + json!({ + "event": "session_complete", + "status": "failed", + "reason": error, + }), + ); + Err(error) + } + } +} + +#[tauri::command] +pub async fn start_remote_doctor_repair( + app: AppHandle, + pool: State<'_, SshConnectionPool>, + instance_id: String, + target_location: String, +) -> Result { + start_remote_doctor_repair_impl(app, &pool, instance_id, target_location).await +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::cli_runner::{set_active_clawpal_data_override, set_active_openclaw_home_override}; + use crate::ssh::SshHostConfig; + use std::net::TcpStream; + use tauri::test::mock_app; + + #[test] + fn build_shell_command_escapes_single_quotes() { + let command = build_shell_command(&["echo".into(), "a'b".into()]); + assert_eq!(command, "'echo' 'a'\\''b'"); + } + + #[test] + fn parse_target_location_rejects_unknown_values() { + let error = parse_target_location("elsewhere").unwrap_err(); + assert!(error.contains("Unsupported target location")); + } + + #[test] + fn apply_config_set_creates_missing_object_path() { + let mut value = json!({}); + apply_config_set( + &mut value, + "models.providers.openai.baseUrl", + json!("http://127.0.0.1:3000/v1"), + ) + .expect("config set"); + assert_eq!( + value.pointer("/models/providers/openai/baseUrl").and_then(Value::as_str), + Some("http://127.0.0.1:3000/v1") + ); + } + + #[test] + fn apply_config_unset_removes_existing_leaf() { + let mut value = json!({ + "models": { + "providers": { + "openai": { + "baseUrl": "http://127.0.0.1:3000/v1", + "models": [{"id": "gpt-4.1"}] + } + } + } + }); + apply_config_unset(&mut value, "models.providers.openai.baseUrl").expect("config unset"); + assert!(value.pointer("/models/providers/openai/baseUrl").is_none()); + assert!(value.pointer("/models/providers/openai/models").is_some()); + } + + #[test] + fn parse_agent_plan_response_reads_json_payload() { + let text = r#"preface +{"planId":"detect-1","planKind":"detect","summary":"ok","commands":[{"argv":["openclaw","doctor","--json"]}],"healthy":false,"done":false,"success":false} +"#; + let plan = parse_agent_plan_response(PlanKind::Detect, text).expect("parse plan"); + assert_eq!(plan.plan_id, "detect-1"); + assert_eq!(plan.commands[0].argv, vec!["openclaw", "doctor", "--json"]); + } + + #[test] + fn build_agent_plan_prompt_mentions_target_and_schema() { + let prompt = build_agent_plan_prompt( + PlanKind::Repair, + "sess-1", + 3, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &sample_diagnosis(Vec::new()), + &ConfigExcerptContext { + config_excerpt: json!({"ok": true}), + config_excerpt_raw: None, + config_parse_error: None, + }, + &[], + ); + assert!(prompt.contains("Task: produce the next repair plan")); + assert!(prompt.contains("Target location: remote_openclaw")); + assert!(prompt.contains("\"planKind\": \"repair\"")); + assert!(prompt.contains("\"configExcerpt\"")); + assert!(prompt.contains("clawpal doctor probe-openclaw")); + assert!(prompt.contains("openclaw gateway status")); + assert!(prompt.contains("Output valid JSON only.")); + } + + #[test] + fn default_remote_doctor_protocol_prefers_agent() { + assert_eq!( + default_remote_doctor_protocol(), + RemoteDoctorProtocol::AgentPlanner + ); + } + + #[test] + fn unreadable_config_requires_investigate_plan_kind() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "primary.config.unreadable", + "code": "primary.config.unreadable", + "severity": "error", + "message": "Primary configuration could not be read", + "autoFixable": false, + "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", + "source": "primary" + })]); + assert_eq!(next_agent_plan_kind(&diagnosis), PlanKind::Investigate); + } + + #[test] + fn unreadable_config_switches_to_repair_after_investigation_results_exist() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "primary.config.unreadable", + "code": "primary.config.unreadable", + "severity": "error", + "message": "Primary configuration could not be read", + "autoFixable": false, + "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", + "source": "primary" + })]); + let previous_results = vec![CommandResult { + argv: vec!["clawpal".into(), "doctor".into(), "config-read-raw".into()], + exit_code: Some(0), + stdout: "{\"raw\":\"{\\n ddd\\n}\"}".into(), + stderr: String::new(), + duration_ms: 1, + timed_out: false, + }]; + assert_eq!( + next_agent_plan_kind_for_round(&diagnosis, &previous_results), + PlanKind::Repair + ); + } + + #[test] + fn non_auto_fixable_warning_only_diagnosis_is_terminal() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "rescue.gateway.unhealthy", + "code": "rescue.gateway.unhealthy", + "severity": "warn", + "message": "Rescue gateway is not healthy", + "autoFixable": false, + "fixHint": "Inspect rescue gateway logs before using failover", + "source": "rescue" + })]); + assert!(diagnosis_has_only_non_auto_fixable_issues(&diagnosis)); + } + + #[test] + fn investigate_prompt_requires_read_only_diagnosis_steps() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "primary.config.unreadable", + "code": "primary.config.unreadable", + "severity": "error", + "message": "Primary configuration could not be read", + "autoFixable": false, + "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", + "source": "primary" + })]); + let prompt = build_agent_plan_prompt( + PlanKind::Investigate, + "sess-1", + 1, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &diagnosis, + &build_config_excerpt_context("{\n ddd\n}"), + &[], + ); + assert!(prompt.contains("read-only")); + assert!(prompt.contains("Do not modify files")); + assert!(prompt.contains("\"planKind\": \"investigate\"")); + assert!(prompt.contains("configParseError")); + } + + #[test] + fn investigate_prompt_discourages_long_running_log_commands() { + let prompt = build_agent_plan_prompt( + PlanKind::Investigate, + "sess-1", + 1, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &sample_diagnosis(Vec::new()), + &empty_config_excerpt_context(), + &[], + ); + assert!(prompt.contains("Do not run follow/tail commands")); + assert!(prompt.contains("bounded")); + assert!(prompt.contains("Do not use heredocs")); + } + + #[test] + fn repair_prompt_discourages_unverified_openclaw_subcommands() { + let prompt = build_agent_plan_prompt( + PlanKind::Repair, + "sess-1", + 2, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &sample_diagnosis(Vec::new()), + &empty_config_excerpt_context(), + &[], + ); + assert!(prompt.contains("Do not invent OpenClaw subcommands")); + assert!(prompt.contains("Do not use `openclaw auth")); + assert!(prompt.contains("Do not use `openclaw doctor --json`")); + assert!(!prompt.contains("- `openclaw doctor --json`")); + } + + #[test] + fn remote_doctor_agent_id_is_dedicated() { + assert_eq!(remote_doctor_agent_id(), "clawpal-remote-doctor"); + assert!(!remote_doctor_agent_session_key("sess-1").contains("main")); + assert!( + remote_doctor_agent_session_key("sess-1") + .starts_with("agent:clawpal-remote-doctor:") + ); + } + + #[test] + fn ensure_local_remote_doctor_agent_creates_workspace_bootstrap_files() { + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-agent-test-{}", + Uuid::new_v4() + )); + let home_dir = temp_root.join("home"); + let clawpal_dir = temp_root.join("clawpal"); + let openclaw_dir = home_dir.join(".openclaw"); + std::fs::create_dir_all(&openclaw_dir).expect("create openclaw dir"); + std::fs::create_dir_all(&clawpal_dir).expect("create clawpal dir"); + std::fs::write( + openclaw_dir.join("openclaw.json"), + r#"{ + "gateway": { "port": 18789, "auth": { "token": "gw-test-token" } }, + "agents": { + "defaults": { "model": "openai/gpt-4o-mini" }, + "list": [{ "id": "main", "workspace": "~/.openclaw/workspaces/main" }] + } +} +"#, + ) + .expect("write config"); + + set_active_openclaw_home_override(Some(home_dir.to_string_lossy().to_string())) + .expect("set openclaw override"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal override"); + + let result = ensure_local_remote_doctor_agent_ready(); + + set_active_openclaw_home_override(None).expect("clear openclaw override"); + set_active_clawpal_data_override(None).expect("clear clawpal override"); + + if let Err(error) = &result { + let _ = std::fs::remove_dir_all(&temp_root); + panic!("ensure agent ready: {error}"); + } + + let cfg: Value = serde_json::from_str( + &std::fs::read_to_string(openclaw_dir.join("openclaw.json")).expect("read config"), + ) + .expect("parse config"); + let agent = cfg["agents"]["list"] + .as_array() + .and_then(|agents| { + agents.iter().find(|agent| { + agent.get("id").and_then(Value::as_str) == Some(remote_doctor_agent_id()) + }) + }) + .expect("dedicated agent entry"); + let workspace = agent["workspace"] + .as_str() + .expect("agent workspace") + .replace("~/", &format!("{}/", home_dir.to_string_lossy())); + for file_name in ["IDENTITY.md", "USER.md", "BOOTSTRAP.md", "AGENTS.md"] { + let content = + std::fs::read_to_string(std::path::Path::new(&workspace).join(file_name)) + .unwrap_or_else(|error| panic!("read {file_name}: {error}")); + assert!(!content.trim().is_empty(), "{file_name} should not be empty"); + } + + let _ = std::fs::remove_dir_all(&temp_root); + } + + #[test] + fn only_agent_planner_protocol_requires_bridge() { + assert!(protocol_requires_bridge(RemoteDoctorProtocol::AgentPlanner)); + assert!(!protocol_requires_bridge(RemoteDoctorProtocol::ClawpalServer)); + assert!(!protocol_requires_bridge(RemoteDoctorProtocol::LegacyDoctor)); + } + + #[test] + fn clawpal_server_protocol_skips_local_rescue_preflight() { + assert!(!protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer)); + assert!(!protocol_runs_rescue_preflight(RemoteDoctorProtocol::AgentPlanner)); + } + + #[test] + fn remote_target_host_id_candidates_include_exact_and_stripped_ids() { + assert_eq!( + remote_target_host_id_candidates("ssh:15-235-214-81"), + vec!["ssh:15-235-214-81".to_string(), "15-235-214-81".to_string()] + ); + assert_eq!( + remote_target_host_id_candidates("e2e-remote-doctor"), + vec!["e2e-remote-doctor".to_string()] + ); + } + + #[test] + fn primary_remote_target_host_id_prefers_exact_instance_id() { + assert_eq!( + primary_remote_target_host_id("ssh:15-235-214-81").unwrap(), + "ssh:15-235-214-81" + ); + } + + #[test] + fn parse_invoke_argv_supports_command_string_payloads() { + let argv = parse_invoke_argv( + "clawpal", + &json!({ + "command": "doctor config-read models.providers.openai" + }), + ) + .expect("parse invoke argv"); + assert_eq!( + argv, + vec![ + "clawpal", + "doctor", + "config-read", + "models.providers.openai" + ] + ); + } + + #[test] + fn plan_commands_treat_clawpal_as_internal_tool() { + assert!(plan_command_uses_internal_clawpal_tool(&[ + "clawpal".to_string(), + "doctor".to_string(), + "config-read".to_string(), + ])); + assert!(!plan_command_uses_internal_clawpal_tool(&[ + "openclaw".to_string(), + "doctor".to_string(), + ])); + } + + #[test] + fn unsupported_openclaw_subcommand_is_rejected_early() { + let error = validate_plan_command_argv(&[ + "openclaw".to_string(), + "auth".to_string(), + "list".to_string(), + ]) + .unwrap_err(); + assert!(error.contains("Unsupported openclaw plan command")); + assert!(error.contains("openclaw auth list")); + } + + #[test] + fn openclaw_doctor_json_is_rejected_early() { + let error = validate_plan_command_argv(&[ + "openclaw".to_string(), + "doctor".to_string(), + "--json".to_string(), + ]) + .unwrap_err(); + assert!(error.contains("Unsupported openclaw plan command")); + assert!(error.contains("openclaw doctor --json")); + } + + #[test] + fn multiline_clawpal_exec_is_rejected_early() { + let error = validate_plan_command_argv(&[ + "clawpal".to_string(), + "doctor".to_string(), + "exec".to_string(), + "--tool".to_string(), + "python3".to_string(), + "--args".to_string(), + "- <<'PY'\nprint('hi')\nPY".to_string(), + ]) + .unwrap_err(); + assert!(error.contains("Unsupported clawpal doctor exec args")); + assert!(error.contains("heredocs")); + } + + #[test] + fn plan_command_failure_message_mentions_command_and_error() { + let error = plan_command_failure_message( + PlanKind::Investigate, + 2, + &["openclaw".to_string(), "gateway".to_string(), "logs".to_string()], + "ssh command failed: russh exec timed out after 25s", + ); + assert!(error.contains("Investigate command failed in round 2")); + assert!(error.contains("openclaw gateway logs")); + assert!(error.contains("timed out after 25s")); + } + + fn sample_diagnosis(issues: Vec) -> RescuePrimaryDiagnosisResult { + serde_json::from_value(json!({ + "status": if issues.is_empty() { "healthy" } else { "broken" }, + "checkedAt": "2026-03-18T00:00:00Z", + "targetProfile": "primary", + "rescueProfile": "rescue", + "rescueConfigured": true, + "rescuePort": 18789, + "summary": { + "status": if issues.is_empty() { "healthy" } else { "broken" }, + "headline": if issues.is_empty() { "Healthy" } else { "Broken" }, + "recommendedAction": if issues.is_empty() { "No action needed" } else { "Repair issues" }, + "fixableIssueCount": issues.len(), + "selectedFixIssueIds": issues.iter().filter_map(|issue| issue.get("id").and_then(Value::as_str)).collect::>(), + "rootCauseHypotheses": [], + "fixSteps": [], + "confidence": 0.8, + "citations": [], + "versionAwareness": null + }, + "sections": [], + "checks": [], + "issues": issues + })) + .expect("sample diagnosis") + } + + #[test] + fn diagnosis_issue_summaries_capture_code_severity_and_message() { + let diagnosis = sample_diagnosis(vec![ + json!({ + "id": "gateway.unhealthy", + "code": "gateway.unhealthy", + "severity": "high", + "message": "Gateway is unhealthy", + "autoFixable": true, + "fixHint": "Restart gateway", + "source": "gateway" + }), + json!({ + "id": "providers.base_url", + "code": "invalid.base_url", + "severity": "medium", + "message": "Provider base URL is invalid", + "autoFixable": true, + "fixHint": "Reset baseUrl", + "source": "config" + }), + ]); + + let summary = diagnosis_issue_summaries(&diagnosis); + assert_eq!(summary.len(), 2); + assert_eq!(summary[0]["code"], "gateway.unhealthy"); + assert_eq!(summary[0]["severity"], "high"); + assert_eq!(summary[0]["title"], "Gateway is unhealthy"); + assert_eq!(summary[0]["target"], "gateway"); + assert_eq!(summary[1]["code"], "invalid.base_url"); + } + + #[test] + fn repeated_rediagnose_only_rounds_are_detected_as_stalled() { + let diagnosis = sample_diagnosis(vec![ + json!({ + "id": "providers.base_url", + "code": "invalid.base_url", + "severity": "medium", + "message": "Provider base URL is invalid", + "autoFixable": true, + "fixHint": "Reset baseUrl", + "source": "config" + }), + ]); + let step_types = vec!["doctorRediagnose".to_string()]; + + assert!(!repair_plan_stalled( + &[ + RepairRoundObservation::new(1, &step_types, &diagnosis), + RepairRoundObservation::new(2, &step_types, &diagnosis), + ], + 3, + )); + assert!(repair_plan_stalled( + &[ + RepairRoundObservation::new(1, &step_types, &diagnosis), + RepairRoundObservation::new(2, &step_types, &diagnosis), + RepairRoundObservation::new(3, &step_types, &diagnosis), + ], + 3, + )); + } + + #[test] + fn round_limit_error_message_includes_latest_issues_and_step_types() { + let diagnosis = sample_diagnosis(vec![ + json!({ + "id": "providers.base_url", + "code": "invalid.base_url", + "severity": "medium", + "message": "Provider base URL is invalid", + "autoFixable": true, + "fixHint": "Reset baseUrl", + "source": "config" + }), + ]); + let error = round_limit_error_message( + &diagnosis, + &["doctorRediagnose".to_string()], + ); + assert!(error.contains("invalid.base_url")); + assert!(error.contains("doctorRediagnose")); + assert!(error.contains("Provider base URL is invalid")); + } + + #[test] + fn unreadable_config_context_uses_raw_excerpt_and_parse_error() { + let context = build_config_excerpt_context("{\n ddd\n}"); + assert!(context.config_excerpt.is_null()); + assert!(context.config_excerpt_raw.as_deref().unwrap_or_default().contains("ddd")); + assert!( + context + .config_parse_error + .as_deref() + .unwrap_or_default() + .contains("key must be a string") + ); + } + + #[test] + fn unreadable_config_context_summary_marks_excerpt_missing() { + let context = build_config_excerpt_context("{\n ddd\n}"); + let summary = config_excerpt_log_summary(&context); + assert_eq!(summary["configExcerptPresent"], json!(false)); + assert_eq!(summary["configExcerptRawPresent"], json!(true)); + assert!( + summary["configParseError"] + .as_str() + .unwrap_or_default() + .contains("key must be a string") + ); + } + + #[test] + fn config_read_response_returns_raw_context_for_unreadable_json() { + let value = config_read_response("{\n ddd\n}", None).expect("config read response"); + assert!(value["value"].is_null()); + assert!(value["raw"].as_str().unwrap_or_default().contains("ddd")); + assert!( + value["parseError"] + .as_str() + .unwrap_or_default() + .contains("key must be a string") + ); + } + + #[test] + fn decode_base64_config_payload_reads_utf8_text() { + use base64::Engine as _; + let encoded = base64::engine::general_purpose::STANDARD.encode("{\"ok\":true}"); + let decoded = decode_base64_config_payload(&encoded).expect("decode payload"); + assert_eq!(decoded, "{\"ok\":true}"); + } + + #[test] + fn diagnosis_missing_rescue_profile_is_detected() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "rescue.profile.missing", + "code": "rescue.profile.missing", + "severity": "error", + "message": "Rescue profile \"rescue\" is not configured", + "autoFixable": false, + "fixHint": "Activate Rescue Bot first", + "source": "rescue" + })]); + assert!(diagnosis_missing_rescue_profile(&diagnosis)); + } + + #[test] + fn diagnosis_unhealthy_rescue_gateway_is_detected() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "rescue.gateway.unhealthy", + "code": "rescue.gateway.unhealthy", + "severity": "warn", + "message": "Rescue gateway is not healthy", + "autoFixable": false, + "fixHint": "Inspect rescue gateway logs before using failover", + "source": "rescue" + })]); + assert!(diagnosis_unhealthy_rescue_gateway(&diagnosis)); + } + + #[test] + fn rescue_setup_command_result_reports_activation() { + let result = rescue_setup_command_result("activate", "rescue", true, true, "active"); + assert_eq!(result.argv, vec!["manage_rescue_bot", "activate", "rescue"]); + assert_eq!(result.exit_code, Some(0)); + assert!(result.stdout.contains("configured=true")); + assert!(result.stdout.contains("active=true")); + } + + #[test] + fn rescue_setup_activation_error_mentions_runtime_state() { + let error = rescue_activation_error_message( + "rescue", + false, + "configured_inactive", + &["manage_rescue_bot status rescue".to_string(), "openclaw --profile rescue gateway status".to_string()], + ); + assert!(error.contains("rescue")); + assert!(error.contains("configured_inactive")); + assert!(error.contains("did not become active")); + assert!(error.contains("manage_rescue_bot status rescue")); + assert!(error.contains("openclaw --profile rescue gateway status")); + } + + #[test] + fn rescue_activation_diagnostic_commands_include_status_and_gateway_checks() { + let commands = rescue_activation_diagnostic_commands("rescue"); + let rendered = commands + .iter() + .map(|command| command.join(" ")) + .collect::>(); + assert!(rendered.contains(&"manage_rescue_bot status rescue".to_string())); + assert!(rendered.contains(&"openclaw --profile rescue gateway status".to_string())); + assert!(rendered.contains(&"openclaw --profile rescue config get gateway.port --json".to_string())); + } + + const E2E_CONTAINER_NAME: &str = "clawpal-e2e-remote-doctor"; + const E2E_SSH_PORT: u16 = 2399; + const E2E_ROOT_PASSWORD: &str = "clawpal-remote-doctor-pass"; + const E2E_DOCKERFILE: &str = r#" +FROM ubuntu:22.04 +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update && apt-get install -y openssh-server && rm -rf /var/lib/apt/lists/* && mkdir /var/run/sshd +RUN echo "root:ROOTPASS" | chpasswd && \ + sed -i 's/#PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config && \ + sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config && \ + echo "PasswordAuthentication yes" >> /etc/ssh/sshd_config +RUN mkdir -p /root/.openclaw +RUN cat > /root/.openclaw/openclaw.json <<'EOF' +{ + "gateway": { "port": 18789, "auth": { "token": "gw-test-token" } }, + "auth": { + "profiles": { + "openai-default": { + "provider": "openai", + "apiKey": "sk-test" + } + } + }, + "models": { + "providers": { + "openai": { + "baseUrl": "http://127.0.0.1:9/v1", + "models": [{ "id": "gpt-4o-mini", "name": "gpt-4o-mini" }] + } + } + }, + "agents": { + "defaults": { "model": "openai/gpt-4o-mini" }, + "list": [ { "id": "main", "model": "anthropic/claude-sonnet-4-20250514" } ] + }, + "channels": { + "discord": { + "guilds": { + "guild-1": { + "channels": { + "general": { "model": "openai/gpt-4o-mini" } + } + } + } + } + } +} +EOF +RUN cat > /usr/local/bin/openclaw <<'EOF' && chmod +x /usr/local/bin/openclaw +#!/bin/sh +STATE_DIR="${OPENCLAW_STATE_DIR:-${OPENCLAW_HOME:-$HOME/.openclaw}}" +CONFIG_PATH="$STATE_DIR/openclaw.json" +PROFILE="primary" +if [ "$1" = "--profile" ]; then + PROFILE="$2" + shift 2 +fi +case "$1" in + --version) + echo "openclaw 2026.3.2-test" + ;; + doctor) + if grep -q '127.0.0.1:9/v1' "$CONFIG_PATH"; then + echo '{"ok":false,"score":40,"issues":[{"id":"primary.models.base_url","code":"invalid.base_url","severity":"error","message":"provider baseUrl points to test blackhole","autoFixable":true,"fixHint":"Remove the bad baseUrl override"}]}' + else + echo '{"ok":true,"score":100,"issues":[],"checks":[{"id":"test","status":"ok"}]}' + fi + ;; + agents) + if [ "$2" = "list" ] && [ "$3" = "--json" ]; then + echo '[{"id":"main"}]' + else + echo "unsupported openclaw agents command" >&2 + exit 1 + fi + ;; + models) + if [ "$2" = "list" ] && [ "$3" = "--all" ] && [ "$4" = "--json" ] && [ "$5" = "--no-color" ]; then + echo '{"models":[{"key":"openai/gpt-4o-mini","provider":"openai","id":"gpt-4o-mini","name":"gpt-4o-mini","baseUrl":"https://api.openai.com/v1"}],"providers":{"openai":{"baseUrl":"https://api.openai.com/v1"}}}' + else + echo "unsupported openclaw models command" >&2 + exit 1 + fi + ;; + config) + if [ "$2" = "get" ] && [ "$3" = "gateway.port" ] && [ "$4" = "--json" ]; then + if [ "$PROFILE" = "rescue" ]; then + echo '19789' + else + echo '18789' + fi + else + echo "unsupported openclaw config command: $*" >&2 + exit 1 + fi + ;; + gateway) + case "$2" in + status) + if [ "$PROFILE" = "rescue" ] && [ "${OPENCLAW_RESCUE_GATEWAY_ACTIVE:-1}" != "1" ]; then + echo '{"running":false,"healthy":false,"gateway":{"running":false},"health":{"ok":false}}' + else + echo '{"running":true,"healthy":true,"gateway":{"running":true},"health":{"ok":true}}' + fi + ;; + restart|start|stop) + echo '{"ok":true}' + ;; + *) + echo "unsupported openclaw gateway command: $*" >&2 + exit 1 + ;; + esac + ;; + *) + echo "unsupported openclaw command: $*" >&2 + exit 1 + ;; +esac +EOF +EXPOSE 22 +CMD ["/usr/sbin/sshd", "-D"] +"#; + + fn should_run_docker_e2e() -> bool { + std::env::var("CLAWPAL_RUN_REMOTE_DOCTOR_E2E").ok().as_deref() == Some("1") + } + + fn live_gateway_url() -> Option { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) + } + + fn live_gateway_token() -> Option { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) + } + + fn live_gateway_instance_id() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_INSTANCE_ID") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) + .unwrap_or_else(|| "local".to_string()) + } + + fn live_gateway_target_location() -> TargetLocation { + match std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TARGET_LOCATION") + .ok() + .as_deref() + { + Some("remote_openclaw") => TargetLocation::RemoteOpenclaw, + _ => TargetLocation::LocalOpenclaw, + } + } + + fn live_gateway_protocol() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_PROTOCOL") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) + .unwrap_or_else(|| "clawpal_server".to_string()) + } + + fn docker_available() -> bool { + Command::new("docker") + .args(["info"]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .map(|status| status.success()) + .unwrap_or(false) + } + + fn cleanup_e2e_container() { + let _ = Command::new("docker") + .args(["rm", "-f", E2E_CONTAINER_NAME]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status(); + let _ = Command::new("docker") + .args(["rmi", "-f", &format!("{E2E_CONTAINER_NAME}:latest")]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status(); + } + + fn build_e2e_image() -> Result<(), String> { + let dockerfile = E2E_DOCKERFILE.replace("ROOTPASS", E2E_ROOT_PASSWORD); + let output = Command::new("docker") + .args(["build", "-t", &format!("{E2E_CONTAINER_NAME}:latest"), "-f", "-", "."]) + .stdin(std::process::Stdio::piped()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .current_dir(std::env::temp_dir()) + .spawn() + .and_then(|mut child| { + if let Some(ref mut stdin) = child.stdin { + stdin.write_all(dockerfile.as_bytes())?; + } + child.wait_with_output() + }) + .map_err(|error| format!("docker build failed: {error}"))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } + Ok(()) + } + + fn start_e2e_container() -> Result<(), String> { + start_e2e_container_with_env(&[]) + } + + fn start_e2e_container_with_env(env: &[(&str, &str)]) -> Result<(), String> { + let mut args = vec![ + "run".to_string(), + "-d".to_string(), + "--name".to_string(), + E2E_CONTAINER_NAME.to_string(), + ]; + for (key, value) in env { + args.push("-e".to_string()); + args.push(format!("{key}={value}")); + } + args.extend([ + "-p".to_string(), + format!("{E2E_SSH_PORT}:22"), + format!("{E2E_CONTAINER_NAME}:latest"), + ]); + let output = Command::new("docker") + .args(&args) + .output() + .map_err(|error| format!("docker run failed: {error}"))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } + Ok(()) + } + + fn wait_for_ssh(timeout_secs: u64) -> Result<(), String> { + let start = Instant::now(); + while start.elapsed().as_secs() < timeout_secs { + if TcpStream::connect(format!("127.0.0.1:{E2E_SSH_PORT}")).is_ok() { + std::thread::sleep(std::time::Duration::from_millis(500)); + return Ok(()); + } + std::thread::sleep(std::time::Duration::from_millis(300)); + } + Err("timeout waiting for ssh".into()) + } + + fn e2e_host_config() -> SshHostConfig { + SshHostConfig { + id: "e2e-remote-doctor".into(), + label: "E2E Remote Doctor".into(), + host: "127.0.0.1".into(), + port: E2E_SSH_PORT, + username: "root".into(), + auth_method: "password".into(), + key_path: None, + password: Some(E2E_ROOT_PASSWORD.into()), + passphrase: None, + } + } + + #[tokio::test] + async fn remote_doctor_docker_e2e_loop_completes() { + if !should_run_docker_e2e() { + eprintln!("skip: set CLAWPAL_RUN_REMOTE_DOCTOR_E2E=1 to enable"); + return; + } + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let temp_root = std::env::temp_dir().join(format!("clawpal-remote-doctor-e2e-{}", Uuid::new_v4())); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())).expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + let pool = SshConnectionPool::new(); + let cfg = e2e_host_config(); + pool.connect(&cfg).await.expect("ssh connect"); + + let session_id = Uuid::new_v4().to_string(); + let marker = "/tmp/clawpal-remote-doctor-fixed"; + let result = run_remote_doctor_repair_loop( + Option::<&AppHandle>::None, + &pool, + &session_id, + &format!("ssh:{}", cfg.id), + TargetLocation::RemoteOpenclaw, + |kind, round, previous_results| async move { + match (kind, round) { + (PlanKind::Detect, 1) => Ok(PlanResponse { + plan_id: "detect-1".into(), + plan_kind: PlanKind::Detect, + summary: "Initial detect".into(), + commands: vec![PlanCommand { + argv: vec!["openclaw".into(), "--version".into()], + timeout_sec: Some(10), + purpose: Some("collect version".into()), + continue_on_failure: Some(false), + }], + healthy: false, + done: false, + success: false, + }), + (PlanKind::Repair, 1) => { + assert_eq!(previous_results.len(), 1); + Ok(PlanResponse { + plan_id: "repair-1".into(), + plan_kind: PlanKind::Repair, + summary: "Write marker".into(), + commands: vec![PlanCommand { + argv: vec![ + "sh".into(), + "-lc".into(), + format!("printf 'fixed' > {marker}"), + ], + timeout_sec: Some(10), + purpose: Some("mark repaired".into()), + continue_on_failure: Some(false), + }], + healthy: false, + done: false, + success: false, + }) + } + (PlanKind::Detect, 2) => { + assert_eq!(previous_results.len(), 1); + assert_eq!( + previous_results[0].stdout.trim(), + "", + "repair command should not print to stdout" + ); + Ok(PlanResponse { + plan_id: "detect-2".into(), + plan_kind: PlanKind::Detect, + summary: "Marker exists".into(), + commands: Vec::new(), + healthy: true, + done: true, + success: true, + }) + } + _ => Err(format!("unexpected planner request: {:?} round {}", kind, round)), + } + }, + ) + .await + .expect("remote doctor loop should complete"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + assert_eq!(result.round, 2); + + let marker_result = pool + .exec(&cfg.id, &format!("test -f {marker}")) + .await + .expect("marker check"); + assert_eq!(marker_result.exit_code, 0); + + let log_path = clawpal_dir.join("doctor").join("remote").join(format!("{session_id}.jsonl")); + let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor log"); + assert!(log_text.contains("\"planKind\":\"detect\"")); + assert!(log_text.contains("\"planKind\":\"repair\"")); + let _ = std::fs::remove_dir_all(temp_root); + set_active_clawpal_data_override(None).expect("clear clawpal data"); + } + + #[tokio::test] + async fn remote_doctor_docker_e2e_rescue_activation_fails_when_gateway_stays_inactive() { + if !should_run_docker_e2e() { + eprintln!("skip: set CLAWPAL_RUN_REMOTE_DOCTOR_E2E=1 to enable"); + return; + } + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container_with_env(&[("OPENCLAW_RESCUE_GATEWAY_ACTIVE", "0")]).expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let pool = app_handle.state::(); + let cfg = e2e_host_config(); + pool.connect(&cfg).await.expect("ssh connect"); + + let error = ensure_rescue_profile_ready( + &app_handle, + TargetLocation::RemoteOpenclaw, + &format!("ssh:{}", cfg.id), + ) + .await + .expect_err("rescue activation should fail when gateway remains inactive"); + + assert!(error.message.contains("did not become active")); + assert!(error.message.contains("configured_inactive")); + assert!( + error + .diagnostics + .iter() + .any(|result| result.argv.join(" ") == "manage_rescue_bot status rescue") + ); + } + + #[tokio::test] + async fn remote_doctor_live_gateway_uses_configured_url_and_token() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = + std::env::temp_dir().join(format!("clawpal-remote-doctor-live-{}", Uuid::new_v4())); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let gateway = remote_doctor_gateway_config().expect("gateway config"); + assert_eq!(gateway.url, url); + assert_eq!(gateway.auth_token_override.as_deref(), Some(token.as_str())); + + let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref()) + .expect("gateway credentials"); + assert!(creds.is_some(), "expected token override credentials"); + + let client = NodeClient::new(); + client + .connect(&gateway.url, app.handle().clone(), creds) + .await + .expect("connect live remote doctor gateway"); + assert!(client.is_connected().await); + match live_gateway_protocol().as_str() { + "clawpal_server" => { + let response = client + .send_request( + "remote_repair_plan.request", + json!({ + "requestId": format!("live-e2e-{}", Uuid::new_v4()), + "targetId": live_gateway_instance_id(), + "context": { + "configExcerpt": { + "models": { + "providers": { + "openai-codex": { + "baseUrl": "http://127.0.0.1:9/v1" + } + } + } + } + } + }), + ) + .await + .expect("request clawpal-server remote repair plan"); + let plan_id = response + .get("planId") + .and_then(|value| value.as_str()) + .unwrap_or_default(); + assert!( + !plan_id.trim().is_empty(), + "clawpal-server response should include a plan id" + ); + let steps = response + .get("steps") + .and_then(|value| value.as_array()) + .cloned() + .unwrap_or_default(); + assert!( + !steps.is_empty(), + "clawpal-server response should include repair steps" + ); + } + _ => { + let detect_plan = request_plan( + &client, + &detect_method_name(), + PlanKind::Detect, + &format!("live-e2e-{}", Uuid::new_v4()), + 1, + live_gateway_target_location(), + &live_gateway_instance_id(), + &[], + ) + .await + .expect("request live detection plan"); + assert!( + !detect_plan.plan_id.trim().is_empty(), + "live detection plan should include a plan id" + ); + } + } + client.disconnect().await.expect("disconnect"); + + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); + } + + #[tokio::test] + async fn remote_doctor_live_gateway_full_repair_loop_completes() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = std::env::temp_dir() + .join(format!("clawpal-remote-doctor-live-loop-{}", Uuid::new_v4())); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let cfg = e2e_host_config(); + let pool = app_handle.state::(); + pool.connect(&cfg).await.expect("ssh connect"); + + let gateway = remote_doctor_gateway_config().expect("gateway config"); + let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref()) + .expect("gateway credentials"); + let client = NodeClient::new(); + client + .connect(&gateway.url, app_handle.clone(), creds) + .await + .expect("connect live remote doctor gateway"); + + let session_id = Uuid::new_v4().to_string(); + let result = run_clawpal_server_repair_loop( + &app_handle, + &client, + &session_id, + &format!("ssh:{}", cfg.id), + TargetLocation::RemoteOpenclaw, + ) + .await + .expect("full live remote doctor repair loop should complete"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + + client.disconnect().await.expect("disconnect"); + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); + } + + #[tokio::test] + async fn remote_doctor_live_start_command_remote_target_completes_without_bridge_pairing() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = std::env::temp_dir() + .join(format!("clawpal-remote-doctor-live-start-{}", Uuid::new_v4())); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); + let pool = app_handle.state::(); + + let result = start_remote_doctor_repair_impl( + app_handle.clone(), + &pool, + format!("ssh:{}", cfg.id), + "remote_openclaw".to_string(), + ) + .await + .expect("start command should complete remote repair"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + + let log_path = clawpal_dir + .join("doctor") + .join("remote") + .join(format!("{}.jsonl", result.session_id)); + let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor session log"); + assert!( + !log_text.contains("\"event\":\"bridge_connect_failed\""), + "clawpal_server path should not attempt bridge pairing: {log_text}" + ); + + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); + } + + #[tokio::test] + async fn remote_doctor_live_gateway_repairs_unreadable_remote_config() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = std::env::temp_dir() + .join(format!("clawpal-remote-doctor-live-raw-config-{}", Uuid::new_v4())); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); + let pool = app_handle.state::(); + pool.connect(&cfg).await.expect("ssh connect"); + pool.exec_login( + &cfg.id, + "cat > ~/.openclaw/openclaw.json <<'EOF'\n{\n ddd\n}\nEOF", + ) + .await + .expect("corrupt remote config"); + + let result = start_remote_doctor_repair_impl( + app_handle.clone(), + &pool, + cfg.id.clone(), + "remote_openclaw".to_string(), + ) + .await + .expect("start command should repair unreadable config"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + + let repaired = pool + .exec_login(&cfg.id, "python3 - <<'PY'\nimport json, pathlib\njson.load(open(pathlib.Path.home()/'.openclaw'/'openclaw.json'))\nprint('ok')\nPY") + .await + .expect("read repaired config"); + assert_eq!(repaired.exit_code, 0, "repaired config should be valid JSON: {}", repaired.stderr); + assert_eq!(repaired.stdout.trim(), "ok"); + + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); + } +} diff --git a/src/components/DoctorRecoveryOverview.tsx b/src/components/DoctorRecoveryOverview.tsx index c746ba33..6447ec05 100644 --- a/src/components/DoctorRecoveryOverview.tsx +++ b/src/components/DoctorRecoveryOverview.tsx @@ -24,6 +24,7 @@ interface DoctorRecoveryOverviewProps { repairResult: RescuePrimaryRepairResult | null; repairError: string | null; onRepairAll: () => void; + onRemoteDoctorRepair?: () => void; onRepairIssue: (issueId: string) => void; showRepairActions?: boolean; } @@ -94,6 +95,7 @@ export function DoctorRecoveryOverview({ repairResult, repairError, onRepairAll, + onRemoteDoctorRepair, onRepairIssue, showRepairActions = true, }: DoctorRecoveryOverviewProps) { @@ -185,14 +187,28 @@ export function DoctorRecoveryOverview({
{showRepairActions ? ( - + <> + + {onRemoteDoctorRepair ? ( + + ) : null} + {fixText} + ) : null} {!showRepairActions && affectedSections.length > 0 ? (
diff --git a/src/components/SettingsAlphaFeaturesCard.tsx b/src/components/SettingsAlphaFeaturesCard.tsx index 66a1a452..c7a2fd53 100644 --- a/src/components/SettingsAlphaFeaturesCard.tsx +++ b/src/components/SettingsAlphaFeaturesCard.tsx @@ -1,17 +1,30 @@ import { useTranslation } from "react-i18next"; +import { Input } from "@/components/ui/input"; import { Checkbox } from "@/components/ui/checkbox"; import { Label } from "@/components/ui/label"; import { DisclosureCard } from "@/components/DisclosureCard"; interface SettingsAlphaFeaturesCardProps { showSshTransferSpeedUi: boolean; + remoteDoctorGatewayUrl: string; + remoteDoctorGatewayAuthToken: string; onSshTransferSpeedUiToggle: (checked: boolean) => void; + onRemoteDoctorGatewayUrlChange: (value: string) => void; + onRemoteDoctorGatewayUrlSave: () => void; + onRemoteDoctorGatewayAuthTokenChange: (value: string) => void; + onRemoteDoctorGatewayAuthTokenSave: () => void; } export function SettingsAlphaFeaturesCard({ showSshTransferSpeedUi, + remoteDoctorGatewayUrl, + remoteDoctorGatewayAuthToken, onSshTransferSpeedUiToggle, + onRemoteDoctorGatewayUrlChange, + onRemoteDoctorGatewayUrlSave, + onRemoteDoctorGatewayAuthTokenChange, + onRemoteDoctorGatewayAuthTokenSave, }: SettingsAlphaFeaturesCardProps) { const { t } = useTranslation(); @@ -32,6 +45,53 @@ export function SettingsAlphaFeaturesCard({

{t("settings.alphaEnableSshTransferSpeedUiHint")}

+
+ +
+ onRemoteDoctorGatewayUrlChange(event.target.value)} + placeholder={t("settings.remoteDoctorGatewayUrlPlaceholder")} + /> + +
+

+ {t("settings.remoteDoctorGatewayUrlHint")} +

+
+
+ +
+ onRemoteDoctorGatewayAuthTokenChange(event.target.value)} + placeholder={t("settings.remoteDoctorGatewayAuthTokenPlaceholder")} + /> + +
+

+ {t("settings.remoteDoctorGatewayAuthTokenHint")} +

+
); } diff --git a/src/components/__tests__/DoctorRecoveryOverview.test.tsx b/src/components/__tests__/DoctorRecoveryOverview.test.tsx index 44a3ed72..ef15f7a1 100644 --- a/src/components/__tests__/DoctorRecoveryOverview.test.tsx +++ b/src/components/__tests__/DoctorRecoveryOverview.test.tsx @@ -113,6 +113,7 @@ describe("DoctorRecoveryOverview", () => { repairResult: null, repairError: null, onRepairAll: () => {}, + onRemoteDoctorRepair: () => {}, onRepairIssue: () => {}, }), }), @@ -193,6 +194,7 @@ describe("DoctorRecoveryOverview", () => { repairResult: null, repairError: null, onRepairAll: () => {}, + onRemoteDoctorRepair: () => {}, onRepairIssue: () => {}, }), }), @@ -203,6 +205,76 @@ describe("DoctorRecoveryOverview", () => { expect(html).not.toContain("Fix 2 issues"); }); + test("shows local and remote doctor repair actions", async () => { + await i18n.changeLanguage("en"); + const diagnosis: RescuePrimaryDiagnosisResult = { + status: "broken", + checkedAt: "2026-03-07T00:00:00Z", + targetProfile: "primary", + rescueProfile: "rescue", + rescueConfigured: true, + rescuePort: 19789, + summary: { + status: "broken", + headline: "Gateway needs attention first", + recommendedAction: "Apply 1 fix and re-run recovery", + fixableIssueCount: 1, + selectedFixIssueIds: ["field.agents"], + rootCauseHypotheses: [], + fixSteps: [], + confidence: undefined, + citations: [], + versionAwareness: undefined, + }, + sections: [ + { + key: "agents", + title: "Agents", + status: "degraded", + summary: "Agents has 1 recommended change", + docsUrl: "https://docs.openclaw.ai/agents", + rootCauseHypotheses: [], + fixSteps: [], + confidence: undefined, + citations: [], + versionAwareness: undefined, + items: [ + { + id: "field.agents", + label: "Missing agent defaults", + status: "warn", + detail: "Initialize agents.defaults.model", + autoFixable: true, + issueId: "field.agents", + }, + ], + }, + ], + checks: [], + issues: [], + }; + + const html = renderToStaticMarkup( + React.createElement(I18nextProvider, { + i18n, + children: React.createElement(DoctorRecoveryOverview, { + diagnosis, + checkLoading: false, + repairing: false, + progressLine: null, + repairResult: null, + repairError: null, + onRepairAll: () => {}, + onRemoteDoctorRepair: () => {}, + onRepairIssue: () => {}, + }), + }), + ); + + expect(html).toContain("Local Repair"); + expect(html).toContain("Remote Doctor Repair"); + }); + test("shows the broken badge once when summary, section, and item describe the same blocker", async () => { await i18n.changeLanguage("en"); const diagnosis: RescuePrimaryDiagnosisResult = { diff --git a/src/components/__tests__/SettingsAlphaFeaturesCard.test.tsx b/src/components/__tests__/SettingsAlphaFeaturesCard.test.tsx index e845fe24..85bff0ee 100644 --- a/src/components/__tests__/SettingsAlphaFeaturesCard.test.tsx +++ b/src/components/__tests__/SettingsAlphaFeaturesCard.test.tsx @@ -15,12 +15,20 @@ describe("SettingsAlphaFeaturesCard", () => { i18n, children: React.createElement(SettingsAlphaFeaturesCard, { showSshTransferSpeedUi: false, + remoteDoctorGatewayUrl: "", + remoteDoctorGatewayAuthToken: "", onSshTransferSpeedUiToggle: () => {}, + onRemoteDoctorGatewayUrlChange: () => {}, + onRemoteDoctorGatewayUrlSave: () => {}, + onRemoteDoctorGatewayAuthTokenChange: () => {}, + onRemoteDoctorGatewayAuthTokenSave: () => {}, }), }), ); expect(html).toContain("SSH transfer speed"); + expect(html).toContain("Remote Doctor Gateway URL"); + expect(html).toContain("Remote Doctor Gateway Auth Token"); expect(html).not.toContain("ClawPal Logs"); expect(html).not.toContain("OpenClaw Gateway Logs"); expect(html).not.toContain("OpenClaw Context"); diff --git a/src/lib/__tests__/doctor-page-features.test.ts b/src/lib/__tests__/doctor-page-features.test.ts index 990a4b19..88c65233 100644 --- a/src/lib/__tests__/doctor-page-features.test.ts +++ b/src/lib/__tests__/doctor-page-features.test.ts @@ -1,5 +1,6 @@ import { describe, expect, test } from "bun:test"; +import type { RemoteDoctorRepairResult } from "../types"; import { resolveDoctorPageFeatureVisibility } from "../doctor-page-features"; describe("resolveDoctorPageFeatureVisibility", () => { @@ -10,4 +11,21 @@ describe("resolveDoctorPageFeatureVisibility", () => { showRescueBot: true, }); }); + + test("accepts remote doctor repair result shape", () => { + const result: RemoteDoctorRepairResult = { + mode: "remoteDoctor", + status: "completed", + round: 3, + phase: "reporting_detect", + lastPlanKind: "detect", + latestDiagnosisHealthy: true, + lastCommand: ["openclaw", "doctor", "--json"], + sessionId: "session-1", + message: "Remote Doctor repair completed.", + }; + + expect(result.mode).toBe("remoteDoctor"); + expect(result.latestDiagnosisHealthy).toBe(true); + }); }); diff --git a/src/lib/__tests__/use-api-extra.test.ts b/src/lib/__tests__/use-api-extra.test.ts index d5ef6093..131bbb11 100644 --- a/src/lib/__tests__/use-api-extra.test.ts +++ b/src/lib/__tests__/use-api-extra.test.ts @@ -9,6 +9,7 @@ import { invalidateGlobalReadCache, shouldLogRemoteInvokeMetric, } from "../use-api"; +import { api } from "../api"; describe("hasGuidanceEmitted", () => { test("returns true when _guidanceEmitted is true", () => { @@ -166,3 +167,17 @@ describe("shouldLogRemoteInvokeMetric", () => { expect(typeof result).toBe("boolean"); }); }); + +describe("remote doctor api bindings", () => { + test("exposes startRemoteDoctorRepair binding", () => { + expect(typeof api.startRemoteDoctorRepair).toBe("function"); + }); + + test("exposes remote doctor gateway url preference binding", () => { + expect(typeof api.setRemoteDoctorGatewayUrlPreference).toBe("function"); + }); + + test("exposes remote doctor gateway auth token preference binding", () => { + expect(typeof api.setRemoteDoctorGatewayAuthTokenPreference).toBe("function"); + }); +}); diff --git a/src/lib/api.ts b/src/lib/api.ts index e596b015..06038365 100644 --- a/src/lib/api.ts +++ b/src/lib/api.ts @@ -1,5 +1,5 @@ import { invoke } from "@tauri-apps/api/core"; -import type { AgentOverview, AgentSessionAnalysis, AppPreferences, ApplyQueueResult, ApplyResult, BackupInfo, Binding, BugReportSettings, BugReportStats, ChannelNode, ChannelsConfigSnapshot, ChannelsRuntimeSnapshot, CronConfigSnapshot, CronJob, CronRun, CronRuntimeSnapshot, DiscordGuildChannel, DiscoveredInstance, DockerInstance, EnsureAccessResult, GuidanceAction, HistoryItem, InstallMethodCapability, InstallOrchestratorDecision, InstallSession, InstallStepResult, InstallTargetDecision, InstanceConfigSnapshot, InstanceRuntimeSnapshot, InstanceStatus, StatusExtra, ModelCatalogProvider, ModelProfile, PendingCommand, PrecheckIssue, PreviewQueueResult, PreviewResult, ProfilePushResult, ProviderAuthSuggestion, Recipe, RecordInstallExperienceResult, RegisteredInstance, RelatedSecretPushResult, RemoteAuthSyncResult, RescueBotAction, RescueBotManageResult, RescuePrimaryDiagnosisResult, RescuePrimaryRepairResult, ResolvedApiKey, SshConfigHostSuggestion, SshConnectionProfile, SshDiagnosticReport, SshHost, SshIntent, SshTransferStats, SystemStatus, DoctorReport, SessionFile, WatchdogStatus } from "./types"; +import type { AgentOverview, AgentSessionAnalysis, AppPreferences, ApplyQueueResult, ApplyResult, BackupInfo, Binding, BugReportSettings, BugReportStats, ChannelNode, ChannelsConfigSnapshot, ChannelsRuntimeSnapshot, CronConfigSnapshot, CronJob, CronRun, CronRuntimeSnapshot, DiscordGuildChannel, DiscoveredInstance, DockerInstance, EnsureAccessResult, GuidanceAction, HistoryItem, InstallMethodCapability, InstallOrchestratorDecision, InstallSession, InstallStepResult, InstallTargetDecision, InstanceConfigSnapshot, InstanceRuntimeSnapshot, InstanceStatus, StatusExtra, ModelCatalogProvider, ModelProfile, PendingCommand, PrecheckIssue, PreviewQueueResult, PreviewResult, ProfilePushResult, ProviderAuthSuggestion, Recipe, RecordInstallExperienceResult, RegisteredInstance, RelatedSecretPushResult, RemoteAuthSyncResult, RemoteDoctorRepairResult, RescueBotAction, RescueBotManageResult, RescuePrimaryDiagnosisResult, RescuePrimaryRepairResult, ResolvedApiKey, SshConfigHostSuggestion, SshConnectionProfile, SshDiagnosticReport, SshHost, SshIntent, SshTransferStats, SystemStatus, DoctorReport, SessionFile, WatchdogStatus } from "./types"; export const api = { setActiveOpenclawHome: (path: string | null): Promise => @@ -20,6 +20,10 @@ export const api = { invoke("capture_frontend_error", { message, stack, level }), setSshTransferSpeedUiPreference: (showUi: boolean): Promise => invoke("set_ssh_transfer_speed_ui_preference", { showUi }), + setRemoteDoctorGatewayUrlPreference: (gatewayUrl: string | null): Promise => + invoke("set_remote_doctor_gateway_url_preference", { gatewayUrl }), + setRemoteDoctorGatewayAuthTokenPreference: (authToken: string | null): Promise => + invoke("set_remote_doctor_gateway_auth_token_preference", { authToken }), explainOperationError: ( instanceId: string, operation: string, @@ -188,6 +192,14 @@ export const api = { tempProviderProfileId: tempProviderProfileId ?? null, currentDiagnosis: currentDiagnosis ?? null, }), + startRemoteDoctorRepair: ( + instanceId: string, + targetLocation: "local_openclaw" | "remote_openclaw", + ): Promise => + invoke("start_remote_doctor_repair", { + instanceId, + targetLocation, + }), manageRescueBot: (action: RescueBotAction, profile?: string, rescuePort?: number): Promise => invoke("manage_rescue_bot", { action, profile: profile ?? null, rescuePort: rescuePort ?? null }), getRescueBotStatus: (profile?: string, rescuePort?: number): Promise => diff --git a/src/lib/types.ts b/src/lib/types.ts index ff2b3f42..6ca8133b 100644 --- a/src/lib/types.ts +++ b/src/lib/types.ts @@ -216,6 +216,8 @@ export interface RelatedSecretPushResult { export interface AppPreferences { showSshTransferSpeedUi: boolean; + remoteDoctorGatewayUrl: string | null; + remoteDoctorGatewayAuthToken: string | null; } export interface SshTransferStats { @@ -659,6 +661,50 @@ export interface RescuePrimaryRepairResult { after: RescuePrimaryDiagnosisResult; } +export type DoctorRepairMode = "localRepair" | "remoteDoctor"; + +export type RemoteDoctorPlanKind = "detect" | "investigate" | "repair"; + +export type RemoteDoctorSessionStatus = "running" | "completed" | "completed_with_warnings" | "failed"; + +export interface RemoteDoctorCommandPlan { + argv: string[]; + timeoutSec?: number; + purpose?: string; + continueOnFailure?: boolean; +} + +export interface RemoteDoctorCommandResult { + argv: string[]; + exitCode: number | null; + stdout: string; + stderr: string; + durationMs: number; + timedOut: boolean; +} + +export interface RemoteDoctorRepairResult { + mode: "remoteDoctor"; + status: RemoteDoctorSessionStatus; + round: number; + phase: string; + lastPlanKind: RemoteDoctorPlanKind; + latestDiagnosisHealthy: boolean; + lastCommand?: string[] | null; + sessionId: string; + message: string; +} + +export interface RemoteDoctorProgressEvent { + sessionId: string; + mode: "remoteDoctor"; + round: number; + phase: string; + line: string; + planKind?: RemoteDoctorPlanKind | null; + command?: string[] | null; +} + // Cron export type WatchdogJobStatus = "ok" | "pending" | "triggered" | "retrying" | "escalated"; diff --git a/src/lib/use-api.ts b/src/lib/use-api.ts index 88bc41bf..f27121f7 100644 --- a/src/lib/use-api.ts +++ b/src/lib/use-api.ts @@ -759,6 +759,10 @@ export function useApi() { api.remoteRepairDoctorAssistant, "repairDoctorAssistant", ), + startRemoteDoctorRepair: () => api.startRemoteDoctorRepair( + instanceId, + isRemote ? "remote_openclaw" : "local_openclaw", + ), getRescueBotStatus: dispatchCached( "getRescueBotStatus", isRemote ? 8_000 : 5_000, @@ -979,6 +983,14 @@ export function useApi() { api.setSshTransferSpeedUiPreference, ["getAppPreferences"], ), + setRemoteDoctorGatewayUrlPreference: withGlobalInvalidation( + api.setRemoteDoctorGatewayUrlPreference, + ["getAppPreferences"], + ), + setRemoteDoctorGatewayAuthTokenPreference: withGlobalInvalidation( + api.setRemoteDoctorGatewayAuthTokenPreference, + ["getAppPreferences"], + ), ensureAccessProfile: api.ensureAccessProfile, recordInstallExperience: api.recordInstallExperience, openUrl: api.openUrl, diff --git a/src/locales/en.json b/src/locales/en.json index 1592d994..e419cdc2 100644 --- a/src/locales/en.json +++ b/src/locales/en.json @@ -316,6 +316,16 @@ "settings.alphaEnableGatewayLogsUiHint": "When enabled, show a gateway logs icon in Doctor Claw.", "settings.alphaEnableOpenclawContextUi": "Show OpenClaw Context (Alpha)", "settings.alphaEnableOpenclawContextUiHint": "When enabled, add a Context page to the sidebar with collapsible Sessions and Backups.", + "settings.remoteDoctorGatewayUrl": "Remote Doctor Gateway URL", + "settings.remoteDoctorGatewayUrlPlaceholder": "ws://127.0.0.1:3000/ws", + "settings.remoteDoctorGatewayUrlHint": "Used by Remote Doctor Repair. For clawpal-server, use a websocket endpoint like ws://127.0.0.1:3000/ws. Leave empty to fall back to the local gateway port from openclaw.json.", + "settings.remoteDoctorGatewayUrlSaved": "Remote Doctor gateway URL saved", + "settings.remoteDoctorGatewayUrlSaveFailed": "Failed to save Remote Doctor gateway URL: {{error}}", + "settings.remoteDoctorGatewayAuthToken": "Remote Doctor Gateway Auth Token", + "settings.remoteDoctorGatewayAuthTokenPlaceholder": "gateway auth token", + "settings.remoteDoctorGatewayAuthTokenHint": "Optional override token for Remote Doctor Repair. Leave empty to reuse the local gateway auth token.", + "settings.remoteDoctorGatewayAuthTokenSaved": "Remote Doctor gateway auth token saved", + "settings.remoteDoctorGatewayAuthTokenSaveFailed": "Failed to save Remote Doctor gateway auth token: {{error}}", "settings.zeroclawModelAuto": "Use Auto", "settings.zeroclawModelPlaceholder": "e.g. anthropic/claude-sonnet-4-5", "settings.zeroclawModelSaved": "Doctor Claw model preference saved", @@ -589,12 +599,15 @@ "doctor.primaryRepairComingSoon": "Auto repair via Rescue Bot is coming next.", "doctor.fixSafeIssues": "Fix {{count}} issue", "doctor.fixSafeIssues_plural": "Fix {{count}} issues", + "doctor.localRepair": "Local Repair", + "doctor.remoteDoctorRepair": "Remote Doctor Repair", "doctor.optimizeIssues": "Optimize {{count}} issue", "doctor.optimizeIssues_plural": "Optimize {{count}} issues", "doctor.optimizeOneIssue": "Optimize 1 issue", "doctor.optimizeManyIssues": "Optimize {{count}} issues", "doctor.optimize": "Optimize", "doctor.repairing": "Repairing...", + "doctor.remoteDoctorRepairing": "Running remote Doctor repair...", "doctor.configureTempProvider": "Configure temp gateway provider", "doctor.editTempProvider": "Edit temp gateway provider", "doctor.tempProviderHint": "This profile is used only to give the temporary repair gateway inference. Prefer a provider with a static API key.", diff --git a/src/locales/zh.json b/src/locales/zh.json index c5c9b248..49d85c70 100644 --- a/src/locales/zh.json +++ b/src/locales/zh.json @@ -315,6 +315,16 @@ "settings.alphaEnableGatewayLogsUiHint": "开启后,在 Doctor Claw 中显示 Gateway 日志图标。", "settings.alphaEnableOpenclawContextUi": "显示 OpenClaw Context(实验)", "settings.alphaEnableOpenclawContextUiHint": "开启后,在侧边栏显示 Context 页面,里面包含可展开的 Sessions 和 Backups。", + "settings.remoteDoctorGatewayUrl": "远程 Doctor Gateway 地址", + "settings.remoteDoctorGatewayUrlPlaceholder": "ws://127.0.0.1:3000/ws", + "settings.remoteDoctorGatewayUrlHint": "用于“远程 Doctor 修复”。如果接 clawpal-server,请填写类似 ws://127.0.0.1:3000/ws 的 websocket 地址。留空时回退到 openclaw.json 中的本地 gateway 端口。", + "settings.remoteDoctorGatewayUrlSaved": "远程 Doctor Gateway 地址已保存", + "settings.remoteDoctorGatewayUrlSaveFailed": "保存远程 Doctor Gateway 地址失败:{{error}}", + "settings.remoteDoctorGatewayAuthToken": "远程 Doctor Gateway Auth Token", + "settings.remoteDoctorGatewayAuthTokenPlaceholder": "gateway auth token", + "settings.remoteDoctorGatewayAuthTokenHint": "用于“远程 Doctor 修复”的可选 token 覆盖。留空时复用本地 gateway auth token。", + "settings.remoteDoctorGatewayAuthTokenSaved": "远程 Doctor Gateway Auth Token 已保存", + "settings.remoteDoctorGatewayAuthTokenSaveFailed": "保存远程 Doctor Gateway Auth Token 失败:{{error}}", "settings.zeroclawModelAuto": "使用自动选择", "settings.zeroclawModelPlaceholder": "例如 anthropic/claude-sonnet-4-5", "settings.zeroclawModelSaved": "小龙虾模型偏好已保存", @@ -587,11 +597,14 @@ "doctor.primaryRepairFailedCount": "失败:{{count}}", "doctor.primaryRepairComingSoon": "Rescue Bot 自动修复将在下一步提供。", "doctor.fixSafeIssues": "修复 {{count}} 个问题", + "doctor.localRepair": "本地修复", + "doctor.remoteDoctorRepair": "远程 Doctor 修复", "doctor.optimizeIssues": "优化 {{count}} 个问题", "doctor.optimizeOneIssue": "优化 1 个问题", "doctor.optimizeManyIssues": "优化 {{count}} 个问题", "doctor.optimize": "优化", "doctor.repairing": "修复中...", + "doctor.remoteDoctorRepairing": "远程 Doctor 修复中...", "doctor.configureTempProvider": "配置临时网关推理源", "doctor.editTempProvider": "编辑临时网关推理源", "doctor.tempProviderHint": "此配置仅用于为临时修复网关提供推理能力,建议使用带有静态 API Key 的 Provider。", diff --git a/src/pages/Doctor.tsx b/src/pages/Doctor.tsx index 97136a26..37459adf 100644 --- a/src/pages/Doctor.tsx +++ b/src/pages/Doctor.tsx @@ -22,6 +22,7 @@ import { } from "@/lib/data-load-log"; import type { ModelProfile, + RemoteDoctorProgressEvent, RescueBotRuntimeState, RescuePrimaryDiagnosisResult, RescuePrimaryRepairResult, @@ -78,6 +79,7 @@ export function Doctor(_: DoctorProps) { const [statusProgress, setStatusProgress] = useState(0.16); const [tempProviderDialogOpen, setTempProviderDialogOpen] = useState(false); const [tempProviderProfileId, setTempProviderProfileId] = useState(null); + const [activeRepairMode, setActiveRepairMode] = useState<"localRepair" | "remoteDoctor" | null>(null); const busy = diagnosisLoading || repairing; const liveReadsReady = ua.instanceToken !== 0; @@ -134,6 +136,26 @@ export function Doctor(_: DoctorProps) { }; }, []); + useEffect(() => { + let disposed = false; + let unlisten: (() => void) | null = null; + void listen("doctor:remote-repair-progress", (event) => { + if (disposed) return; + const payload = event.payload; + setStatusLine(payload.line?.trim() || null); + }).then((fn) => { + if (disposed) { + fn(); + return; + } + unlisten = fn; + }); + return () => { + disposed = true; + if (unlisten) unlisten(); + }; + }, []); + useEffect(() => { if (!busy && diagnosis && !needsRepair && !error && !pendingTempProviderSetup) { setStatusLine(null); @@ -145,8 +167,8 @@ export function Doctor(_: DoctorProps) { setLogsOpen(true); }, []); - const runDiagnosis = useCallback(async () => { - if (!liveReadsReady || busy) return; + const runDiagnosis = useCallback(async (options?: { force?: boolean }) => { + if (!liveReadsReady || (busy && !options?.force)) return; if (isRemote && !isConnected) { setError(t("doctor.rescueBotConnectRequired", { defaultValue: "Connect to SSH first." })); return; @@ -218,6 +240,7 @@ export function Doctor(_: DoctorProps) { return; } setRepairing(true); + setActiveRepairMode("localRepair"); setError(null); setStatusLine( t("doctor.fixSafeIssues", { @@ -295,15 +318,46 @@ export function Doctor(_: DoctorProps) { }); } finally { setRepairing(false); + setActiveRepairMode(null); } }, [busy, diagnosis, isConnected, isRemote, liveReadsReady, repairableCount, t, tempProviderProfileId, ua]); + const runRemoteDoctorRepair = useCallback(async () => { + if (!liveReadsReady || busy || !diagnosis) return; + if (isRemote && !isConnected) { + setError(t("doctor.rescueBotConnectRequired", { defaultValue: "Connect to SSH first." })); + return; + } + setRepairing(true); + setActiveRepairMode("remoteDoctor"); + setError(null); + setStatusLine(t("doctor.remoteDoctorRepairing", { defaultValue: "Running remote Doctor repair..." })); + setStatusProgress(0.18); + try { + const result = await ua.startRemoteDoctorRepair(); + setStatusProgress(1); + setStatusLine(result.message); + if (result.status === "completed" || result.status === "completed_with_warnings") { + await runDiagnosis({ force: true }); + } + } catch (cause) { + const text = cause instanceof Error ? cause.message : String(cause); + setError(text); + setStatusLine(text); + } finally { + setRepairing(false); + setActiveRepairMode(null); + } + }, [busy, diagnosis, isConnected, isRemote, liveReadsReady, runDiagnosis, t, ua]); + const buttonLabel = useMemo(() => { if (diagnosisLoading) { return t("doctor.analyzing", { defaultValue: "Diagnosing..." }); } if (repairing) { - return t("doctor.repairing", { defaultValue: "Repairing..." }); + return activeRepairMode === "remoteDoctor" + ? t("doctor.remoteDoctorRepairing", { defaultValue: "Running remote Doctor repair..." }) + : t("doctor.repairing", { defaultValue: "Repairing..." }); } if (pendingTempProviderSetup) { return t( @@ -322,7 +376,7 @@ export function Doctor(_: DoctorProps) { }); } return t("doctor.diagnose", { defaultValue: "Diagnose" }); - }, [diagnosisLoading, needsRepair, pendingTempProviderSetup, repairableCount, repairing, t, tempProviderProfileId]); + }, [activeRepairMode, diagnosisLoading, needsRepair, pendingTempProviderSetup, repairableCount, repairing, t, tempProviderProfileId]); const buttonIcon = diagnosisLoading || repairing ? @@ -455,8 +509,9 @@ export function Doctor(_: DoctorProps) { repairResult={repairResult} repairError={null} onRepairAll={() => void runRepair()} + onRemoteDoctorRepair={() => void runRemoteDoctorRepair()} onRepairIssue={(_issueId) => void runRepair()} - showRepairActions={false} + showRepairActions /> ) : null} diff --git a/src/pages/Settings.tsx b/src/pages/Settings.tsx index 7029bbf2..62fce311 100644 --- a/src/pages/Settings.tsx +++ b/src/pages/Settings.tsx @@ -256,6 +256,8 @@ export function Settings({ const [authSuggestion, setAuthSuggestion] = useState(null); const [testingProfileId, setTestingProfileId] = useState(null); const [showSshTransferSpeedUi, setShowSshTransferSpeedUi] = useState(false); + const [remoteDoctorGatewayUrl, setRemoteDoctorGatewayUrl] = useState(""); + const [remoteDoctorGatewayAuthToken, setRemoteDoctorGatewayAuthToken] = useState(""); const [catalogRefreshed, setCatalogRefreshed] = useState(false); @@ -359,6 +361,8 @@ export function Settings({ ua.getAppPreferences() .then((prefs) => { setShowSshTransferSpeedUi(Boolean(prefs.showSshTransferSpeedUi)); + setRemoteDoctorGatewayUrl(prefs.remoteDoctorGatewayUrl ?? ""); + setRemoteDoctorGatewayAuthToken(prefs.remoteDoctorGatewayAuthToken ?? ""); }) .catch((e) => console.error("Failed to load app preferences:", e)); }, [ua]); @@ -652,6 +656,32 @@ export function Settings({ }); }, [t, ua]); + const handleRemoteDoctorGatewayUrlSave = useCallback(() => { + const nextValue = remoteDoctorGatewayUrl.trim(); + ua.setRemoteDoctorGatewayUrlPreference(nextValue || null) + .then((prefs) => { + setRemoteDoctorGatewayUrl(prefs.remoteDoctorGatewayUrl ?? ""); + toast.success(t("settings.remoteDoctorGatewayUrlSaved")); + }) + .catch((e) => { + const errorText = e instanceof Error ? e.message : String(e); + toast.error(t("settings.remoteDoctorGatewayUrlSaveFailed", { error: errorText })); + }); + }, [remoteDoctorGatewayUrl, t, ua]); + + const handleRemoteDoctorGatewayAuthTokenSave = useCallback(() => { + const nextValue = remoteDoctorGatewayAuthToken.trim(); + ua.setRemoteDoctorGatewayAuthTokenPreference(nextValue || null) + .then((prefs) => { + setRemoteDoctorGatewayAuthToken(prefs.remoteDoctorGatewayAuthToken ?? ""); + toast.success(t("settings.remoteDoctorGatewayAuthTokenSaved")); + }) + .catch((e) => { + const errorText = e instanceof Error ? e.message : String(e); + toast.error(t("settings.remoteDoctorGatewayAuthTokenSaveFailed", { error: errorText })); + }); + }, [remoteDoctorGatewayAuthToken, t, ua]); + return (

{t('settings.title')}

@@ -882,7 +912,13 @@ export function Settings({ {showPreferences && ( )} diff --git a/src/pages/__tests__/Doctor.test.tsx b/src/pages/__tests__/Doctor.test.tsx index 010ea9ef..62072375 100644 --- a/src/pages/__tests__/Doctor.test.tsx +++ b/src/pages/__tests__/Doctor.test.tsx @@ -55,7 +55,8 @@ describe("Doctor page rescue header", () => { expect(html).toContain("flex flex-col items-center"); expect(html).toContain("role=\"img\""); expect(html).toContain("alt=\"Diagnose\""); - expect(html).toContain("src=\"/Users/ChenYu/Documents/Github/clawpal/src/assets/doctor.png\""); + expect(html).toContain("src=\""); + expect(html).toContain("/src/assets/doctor.png"); expect(html).toContain("aria-label=\"Open logs\""); expect(html).toContain(">Diagnose<"); expect(html).toContain("Run a structured check before attempting repairs on the primary profile."); From ad32460c490618a4220bf25c0e4a6edd441c094b Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 14:11:41 +0800 Subject: [PATCH 03/20] feat: init remote doctor setting --- src/App.tsx | 20 ++++++++++ src/components/SettingsAlphaFeaturesCard.tsx | 4 ++ src/lib/remote-doctor-navigation.ts | 18 +++++++++ src/locales/en.json | 3 ++ src/locales/zh.json | 3 ++ src/pages/Doctor.tsx | 42 ++++++++++++++++++++ src/pages/Settings.tsx | 36 +++++++++++++++++ 7 files changed, 126 insertions(+) create mode 100644 src/lib/remote-doctor-navigation.ts diff --git a/src/App.tsx b/src/App.tsx index de55dd39..d0cce858 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -48,6 +48,7 @@ import { buildSshPassphraseConnectErrorMessage, } from "@/lib/sshConnectErrors"; import { buildFriendlySshError, extractErrorText } from "@/lib/sshDiagnostic"; +import { OPEN_REMOTE_DOCTOR_SETTINGS_EVENT } from "@/lib/remote-doctor-navigation"; const Home = lazy(() => import("./pages/Home").then((m) => ({ default: m.Home }))); const Recipes = lazy(() => import("./pages/Recipes").then((m) => ({ default: m.Recipes }))); @@ -1258,6 +1259,25 @@ export function App() { }, 1400); }, [navigateRoute]); + useEffect(() => { + if (typeof window === "undefined") return; + const handleOpenRemoteDoctorSettings = () => { + setInStart(true); + setStartSection("settings"); + navigateRoute("home"); + }; + window.addEventListener( + OPEN_REMOTE_DOCTOR_SETTINGS_EVENT, + handleOpenRemoteDoctorSettings as EventListener, + ); + return () => { + window.removeEventListener( + OPEN_REMOTE_DOCTOR_SETTINGS_EVENT, + handleOpenRemoteDoctorSettings as EventListener, + ); + }; + }, [navigateRoute]); + const showSidebar = true; // Derive openTabs array for InstanceTabBar diff --git a/src/components/SettingsAlphaFeaturesCard.tsx b/src/components/SettingsAlphaFeaturesCard.tsx index c7a2fd53..380a62d1 100644 --- a/src/components/SettingsAlphaFeaturesCard.tsx +++ b/src/components/SettingsAlphaFeaturesCard.tsx @@ -1,3 +1,4 @@ +import type { Ref } from "react"; import { useTranslation } from "react-i18next"; import { Input } from "@/components/ui/input"; @@ -9,6 +10,7 @@ interface SettingsAlphaFeaturesCardProps { showSshTransferSpeedUi: boolean; remoteDoctorGatewayUrl: string; remoteDoctorGatewayAuthToken: string; + remoteDoctorGatewayUrlInputRef?: Ref; onSshTransferSpeedUiToggle: (checked: boolean) => void; onRemoteDoctorGatewayUrlChange: (value: string) => void; onRemoteDoctorGatewayUrlSave: () => void; @@ -20,6 +22,7 @@ export function SettingsAlphaFeaturesCard({ showSshTransferSpeedUi, remoteDoctorGatewayUrl, remoteDoctorGatewayAuthToken, + remoteDoctorGatewayUrlInputRef, onSshTransferSpeedUiToggle, onRemoteDoctorGatewayUrlChange, onRemoteDoctorGatewayUrlSave, @@ -52,6 +55,7 @@ export function SettingsAlphaFeaturesCard({
onRemoteDoctorGatewayUrlChange(event.target.value)} placeholder={t("settings.remoteDoctorGatewayUrlPlaceholder")} diff --git a/src/lib/remote-doctor-navigation.ts b/src/lib/remote-doctor-navigation.ts new file mode 100644 index 00000000..adc5e99a --- /dev/null +++ b/src/lib/remote-doctor-navigation.ts @@ -0,0 +1,18 @@ +export const OPEN_REMOTE_DOCTOR_SETTINGS_EVENT = "clawpal:open-remote-doctor-settings"; + +const PENDING_REMOTE_DOCTOR_SETTINGS_FOCUS_KEY = "clawpal:pending-remote-doctor-settings-focus"; + +export function requestRemoteDoctorSettingsFocus() { + if (typeof window === "undefined") return; + window.sessionStorage.setItem(PENDING_REMOTE_DOCTOR_SETTINGS_FOCUS_KEY, "1"); + window.dispatchEvent(new CustomEvent(OPEN_REMOTE_DOCTOR_SETTINGS_EVENT)); +} + +export function consumePendingRemoteDoctorSettingsFocus(): boolean { + if (typeof window === "undefined") return false; + const pending = window.sessionStorage.getItem(PENDING_REMOTE_DOCTOR_SETTINGS_FOCUS_KEY) === "1"; + if (pending) { + window.sessionStorage.removeItem(PENDING_REMOTE_DOCTOR_SETTINGS_FOCUS_KEY); + } + return pending; +} diff --git a/src/locales/en.json b/src/locales/en.json index e419cdc2..ef9edd16 100644 --- a/src/locales/en.json +++ b/src/locales/en.json @@ -608,6 +608,9 @@ "doctor.optimize": "Optimize", "doctor.repairing": "Repairing...", "doctor.remoteDoctorRepairing": "Running remote Doctor repair...", + "doctor.remoteDoctorGatewayRequiredTitle": "Configure Remote Doctor first", + "doctor.remoteDoctorGatewayRequiredDescription": "Remote Doctor Repair requires a configured gateway URL. Open Settings now?", + "doctor.openRemoteDoctorSettings": "Open Settings", "doctor.configureTempProvider": "Configure temp gateway provider", "doctor.editTempProvider": "Edit temp gateway provider", "doctor.tempProviderHint": "This profile is used only to give the temporary repair gateway inference. Prefer a provider with a static API key.", diff --git a/src/locales/zh.json b/src/locales/zh.json index 49d85c70..cfb76f00 100644 --- a/src/locales/zh.json +++ b/src/locales/zh.json @@ -605,6 +605,9 @@ "doctor.optimize": "优化", "doctor.repairing": "修复中...", "doctor.remoteDoctorRepairing": "远程 Doctor 修复中...", + "doctor.remoteDoctorGatewayRequiredTitle": "请先配置远程 Doctor", + "doctor.remoteDoctorGatewayRequiredDescription": "远程 Doctor 修复需要先配置 gateway URL。现在打开设置页吗?", + "doctor.openRemoteDoctorSettings": "打开设置", "doctor.configureTempProvider": "配置临时网关推理源", "doctor.editTempProvider": "编辑临时网关推理源", "doctor.tempProviderHint": "此配置仅用于为临时修复网关提供推理能力,建议使用带有静态 API Key 的 Provider。", diff --git a/src/pages/Doctor.tsx b/src/pages/Doctor.tsx index 37459adf..c8aca054 100644 --- a/src/pages/Doctor.tsx +++ b/src/pages/Doctor.tsx @@ -12,10 +12,21 @@ import { DoctorLogsDialog } from "@/components/DoctorLogsDialog"; import { DoctorRecoveryOverview } from "@/components/DoctorRecoveryOverview"; import { DoctorTempProviderDialog } from "@/components/DoctorTempProviderDialog"; import { RescueAsciiHeader } from "@/components/RescueAsciiHeader"; +import { + AlertDialog, + AlertDialogAction, + AlertDialogCancel, + AlertDialogContent, + AlertDialogDescription, + AlertDialogFooter, + AlertDialogHeader, + AlertDialogTitle, +} from "@/components/ui/alert-dialog"; import { Button } from "@/components/ui/button"; import { Card, CardContent, CardHeader } from "@/components/ui/card"; import { useInstance } from "@/lib/instance-context"; import { localizeDoctorReportText } from "@/lib/doctor-report-i18n"; +import { requestRemoteDoctorSettingsFocus } from "@/lib/remote-doctor-navigation"; import { createDataLoadRequestId, emitDataLoadMetric, @@ -80,6 +91,7 @@ export function Doctor(_: DoctorProps) { const [tempProviderDialogOpen, setTempProviderDialogOpen] = useState(false); const [tempProviderProfileId, setTempProviderProfileId] = useState(null); const [activeRepairMode, setActiveRepairMode] = useState<"localRepair" | "remoteDoctor" | null>(null); + const [remoteDoctorConfigPromptOpen, setRemoteDoctorConfigPromptOpen] = useState(false); const busy = diagnosisLoading || repairing; const liveReadsReady = ua.instanceToken !== 0; @@ -328,6 +340,11 @@ export function Doctor(_: DoctorProps) { setError(t("doctor.rescueBotConnectRequired", { defaultValue: "Connect to SSH first." })); return; } + const prefs = await ua.getAppPreferences(); + if (!prefs.remoteDoctorGatewayUrl?.trim()) { + setRemoteDoctorConfigPromptOpen(true); + return; + } setRepairing(true); setActiveRepairMode("remoteDoctor"); setError(null); @@ -350,6 +367,11 @@ export function Doctor(_: DoctorProps) { } }, [busy, diagnosis, isConnected, isRemote, liveReadsReady, runDiagnosis, t, ua]); + const handleOpenRemoteDoctorSettings = useCallback(() => { + setRemoteDoctorConfigPromptOpen(false); + requestRemoteDoctorSettingsFocus(); + }, []); + const buttonLabel = useMemo(() => { if (diagnosisLoading) { return t("doctor.analyzing", { defaultValue: "Diagnosing..." }); @@ -529,6 +551,26 @@ export function Doctor(_: DoctorProps) { initialProfileId={tempProviderProfileId} onSaved={handleTempProviderSaved} /> + + + + + {t("doctor.remoteDoctorGatewayRequiredTitle")} + + + {t("doctor.remoteDoctorGatewayRequiredDescription")} + + + + + {t("doctor.cancel")} + + + {t("doctor.openRemoteDoctorSettings")} + + + +
); } diff --git a/src/pages/Settings.tsx b/src/pages/Settings.tsx index 62fce311..bad07eeb 100644 --- a/src/pages/Settings.tsx +++ b/src/pages/Settings.tsx @@ -25,6 +25,10 @@ import { Checkbox } from "@/components/ui/checkbox"; import { Badge } from "@/components/ui/badge"; import { Button } from "@/components/ui/button"; import { SettingsAlphaFeaturesCard } from "@/components/SettingsAlphaFeaturesCard"; +import { + consumePendingRemoteDoctorSettingsFocus, + OPEN_REMOTE_DOCTOR_SETTINGS_EVENT, +} from "@/lib/remote-doctor-navigation"; import { getSettingsProfileUiState } from "./settings-profile-ui"; import { Select, @@ -258,6 +262,7 @@ export function Settings({ const [showSshTransferSpeedUi, setShowSshTransferSpeedUi] = useState(false); const [remoteDoctorGatewayUrl, setRemoteDoctorGatewayUrl] = useState(""); const [remoteDoctorGatewayAuthToken, setRemoteDoctorGatewayAuthToken] = useState(""); + const remoteDoctorGatewayUrlInputRef = useRef(null); const [catalogRefreshed, setCatalogRefreshed] = useState(false); @@ -272,6 +277,36 @@ export function Settings({ getVersion().then(setAppVersion).catch(() => {}); }, []); + const focusRemoteDoctorGatewayUrlInput = useCallback(() => { + const input = remoteDoctorGatewayUrlInputRef.current; + if (!input) return; + input.scrollIntoView({ behavior: "smooth", block: "center" }); + window.setTimeout(() => { + input.focus(); + input.select(); + }, 60); + }, []); + + useEffect(() => { + if (section !== "all" && section !== "preferences") return; + if (consumePendingRemoteDoctorSettingsFocus()) { + focusRemoteDoctorGatewayUrlInput(); + } + const handleOpenRemoteDoctorSettings = () => { + focusRemoteDoctorGatewayUrlInput(); + }; + window.addEventListener( + OPEN_REMOTE_DOCTOR_SETTINGS_EVENT, + handleOpenRemoteDoctorSettings as EventListener, + ); + return () => { + window.removeEventListener( + OPEN_REMOTE_DOCTOR_SETTINGS_EVENT, + handleOpenRemoteDoctorSettings as EventListener, + ); + }; + }, [focusRemoteDoctorGatewayUrlInput, section]); + const handleCheckForUpdates = useCallback(async () => { setAppUpdateChecking(true); setAppUpdate(null); @@ -914,6 +949,7 @@ export function Settings({ showSshTransferSpeedUi={showSshTransferSpeedUi} remoteDoctorGatewayUrl={remoteDoctorGatewayUrl} remoteDoctorGatewayAuthToken={remoteDoctorGatewayAuthToken} + remoteDoctorGatewayUrlInputRef={remoteDoctorGatewayUrlInputRef} onSshTransferSpeedUiToggle={handleSshTransferSpeedUiToggle} onRemoteDoctorGatewayUrlChange={setRemoteDoctorGatewayUrl} onRemoteDoctorGatewayUrlSave={handleRemoteDoctorGatewayUrlSave} From a340e6340e7138ca6d213c79501dc7c301f42bdb Mon Sep 17 00:00:00 2001 From: OpenClaw Bot Date: Thu, 19 Mar 2026 06:43:17 +0000 Subject: [PATCH 04/20] style: cargo fmt + remove subcrate Cargo.lock + restore agents.md redirect - Format openclaw-gateway-client test files (protocol_roundtrip, tls_fingerprint) - Remove openclaw-gateway-client/Cargo.lock (workspace members share root lockfile) - Restore agents.md redirect stub (content lives in AGENTS.md) --- agents.md | 117 +- openclaw-gateway-client/Cargo.lock | 1422 ----------------- .../tests/protocol_roundtrip.rs | 21 +- .../tests/tls_fingerprint.rs | 10 +- 4 files changed, 22 insertions(+), 1548 deletions(-) delete mode 100644 openclaw-gateway-client/Cargo.lock diff --git a/agents.md b/agents.md index 822c690a..f061a817 100644 --- a/agents.md +++ b/agents.md @@ -1,115 +1,2 @@ -# AGENTS.md - -ClawPal 是基于 Tauri 的 OpenClaw 桌面伴侣应用,覆盖安装、配置、Doctor 诊断、版本回滚、远程 SSH 管理和多平台打包发布。 - -技术栈:Tauri v2 + Rust + React + TypeScript + Bun - -## 目录说明 - -``` -src/ # 前端(React/TypeScript) -src/lib/api.ts # 前端对 Tauri command 的统一封装 -src-tauri/src/commands/ # Tauri command 层(参数校验、权限检查、错误映射) -src-tauri/src/commands/mod.rs # Command 路由与公共逻辑 -clawpal-core/ # 核心业务逻辑(与 Tauri 解耦) -clawpal-cli/ # CLI 接口 -docs/architecture/ # 模块边界、分层原则、核心数据流 -docs/decisions/ # 关键设计决策(ADR) -docs/plans/ # 任务计划与实施方案 -docs/runbooks/ # 启动、调试、发布、回滚、故障处理 -docs/testing/ # 测试矩阵与验证策略 -harness/fixtures/ # 最小稳定测试数据 -harness/artifacts/ # 日志、截图、trace、失败产物收集 -Makefile # 统一命令入口 -``` - -## 启动命令 - -本项目使用 `Makefile` 作为统一命令入口(无需额外安装,macOS/Linux 自带 `make`): - -```bash -make install # 安装前端依赖 -make dev # 启动开发模式(前端 + Tauri) -make dev-frontend # 仅启动前端 -make test-unit # 运行所有单元测试(前端 + Rust) -make lint # 运行所有 lint(TypeScript + Rust fmt + clippy) -make fmt # 自动修复 Rust 格式 -make build # 构建 Tauri 应用(debug) -make ci # 本地运行完整 CI 检查 -make doctor # 检查开发环境依赖 -``` - -完整命令列表:`make help` - -底层命令(不使用 make 时): - -```bash -bun install # 安装前端依赖 -bun run dev:tauri # 启动开发模式(前端 + Tauri) -bun run dev # 仅启动前端 -cargo test --workspace # Rust 单元测试 -bun test # 前端单元测试 -bun run typecheck # TypeScript 类型检查 -cargo fmt --check # Rust 格式检查 -cargo clippy # Rust lint -``` - -## 代码分层约束 - -### UI 层 (`src/`) -- 不直接在组件中使用 `invoke("xxx")`,通过 `src/lib/api.ts` 封装调用 -- 不直接访问原生能力 -- 不拼接 command 名称和错误字符串 - -### Command 层 (`src-tauri/src/commands/`) -- 保持薄层:参数校验、权限检查、错误映射、事件分发 -- 不堆积业务编排逻辑 -- 不直接写文件系统或数据库 - -### Domain 层 (`clawpal-core/`) -- 核心业务规则和用例编排 -- 尽量不依赖 `tauri::*` -- 输入输出保持普通 Rust 类型 - -### Adapter 层 -- 所有原生副作用(文件系统、shell、通知、剪贴板、updater)从 adapter 层进入 -- 须提供测试替身(mock/fake) - -## 提交与 PR 要求 - -- Conventional Commits: `feat:` / `fix:` / `docs:` / `refactor:` / `chore:` -- 分支命名: `feat/*` / `fix/*` / `chore/*` -- PR 变更建议 ≤ 500 行(不含自动生成文件) -- PR 必须通过所有 CI gate -- 涉及 UI 改动须附截图 -- 涉及权限/安全改动须附 capability 变更说明 - -## 新增 Command 检查清单 - -- [ ] Command 定义在 `src-tauri/src/commands/` 对应模块 -- [ ] 参数校验和错误映射完整 -- [ ] 已在 `lib.rs` 的 `invoke_handler!` 中注册 -- [ ] 前端 API 封装已更新 -- [ ] 相关文档已更新 - -## 安全约束 - -- 禁止提交明文密钥或配置路径泄露 -- Command 白名单制,新增原生能力必须补文档和验证 -- 对 `~/.openclaw` 的读写需包含异常回退和用户可见提示 -- 默认最小权限原则 - -## 常见排查路径 - -- **Command 调用失败** → 见 `docs/runbooks/command-debugging.md` -- **本地开发启动** → 见 `docs/runbooks/local-development.md` -- **版本发布** → 见 `docs/runbooks/release-process.md` -- **打包后行为与 dev 不一致** → 检查资源路径、权限配置、签名、窗口事件 -- **跨平台差异** → 检查 adapter 层平台分支和 CI 构建日志 - -## 参考文档 - -- [Harness Engineering 标准](https://github.com/lay2dev/clawpal/issues/123) -- [落地计划](docs/plans/2026-03-16-harness-engineering-standard.md) -- [架构设计](docs/architecture/design.md) -- [测试矩阵](docs/testing/business-flow-test-matrix.md) + +Moved to [`AGENTS.md`](AGENTS.md). diff --git a/openclaw-gateway-client/Cargo.lock b/openclaw-gateway-client/Cargo.lock deleted file mode 100644 index 6b8d1068..00000000 --- a/openclaw-gateway-client/Cargo.lock +++ /dev/null @@ -1,1422 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "anyhow" -version = "1.0.102" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - -[[package]] -name = "base64ct" -version = "1.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" - -[[package]] -name = "bitflags" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "bumpalo" -version = "3.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" - -[[package]] -name = "bytes" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" - -[[package]] -name = "cfg-if" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" - -[[package]] -name = "const-oid" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" - -[[package]] -name = "cpufeatures" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" -dependencies = [ - "libc", -] - -[[package]] -name = "crypto-common" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "curve25519-dalek" -version = "4.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" -dependencies = [ - "cfg-if", - "cpufeatures", - "curve25519-dalek-derive", - "digest", - "fiat-crypto", - "rustc_version", - "subtle", - "zeroize", -] - -[[package]] -name = "curve25519-dalek-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "data-encoding" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" - -[[package]] -name = "der" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" -dependencies = [ - "const-oid", - "zeroize", -] - -[[package]] -name = "diff" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "crypto-common", -] - -[[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "ed25519" -version = "2.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" -dependencies = [ - "pkcs8", - "signature", -] - -[[package]] -name = "ed25519-dalek" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" -dependencies = [ - "curve25519-dalek", - "ed25519", - "rand_core 0.6.4", - "serde", - "sha2", - "subtle", - "zeroize", -] - -[[package]] -name = "equivalent" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" - -[[package]] -name = "errno" -version = "0.3.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" -dependencies = [ - "libc", - "windows-sys", -] - -[[package]] -name = "fastrand" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" - -[[package]] -name = "fiat-crypto" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" - -[[package]] -name = "foldhash" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" - -[[package]] -name = "form_urlencoded" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "futures" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" - -[[package]] -name = "futures-executor" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" - -[[package]] -name = "futures-macro" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "futures-sink" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" - -[[package]] -name = "futures-task" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" - -[[package]] -name = "futures-util" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "slab", -] - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "getrandom" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" -dependencies = [ - "cfg-if", - "libc", - "r-efi 5.3.0", - "wasip2", -] - -[[package]] -name = "getrandom" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" -dependencies = [ - "cfg-if", - "libc", - "r-efi 6.0.0", - "wasip2", - "wasip3", -] - -[[package]] -name = "hashbrown" -version = "0.15.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" -dependencies = [ - "foldhash", -] - -[[package]] -name = "hashbrown" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "http" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" -dependencies = [ - "bytes", - "itoa", -] - -[[package]] -name = "httparse" -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" - -[[package]] -name = "icu_collections" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" -dependencies = [ - "displaydoc", - "potential_utf", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_locale_core" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_normalizer" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" -dependencies = [ - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" - -[[package]] -name = "icu_properties" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" -dependencies = [ - "icu_collections", - "icu_locale_core", - "icu_properties_data", - "icu_provider", - "zerotrie", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" - -[[package]] -name = "icu_provider" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" -dependencies = [ - "displaydoc", - "icu_locale_core", - "writeable", - "yoke", - "zerofrom", - "zerotrie", - "zerovec", -] - -[[package]] -name = "id-arena" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" - -[[package]] -name = "idna" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" -dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" -dependencies = [ - "icu_normalizer", - "icu_properties", -] - -[[package]] -name = "indexmap" -version = "2.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" -dependencies = [ - "equivalent", - "hashbrown 0.16.1", - "serde", - "serde_core", -] - -[[package]] -name = "itoa" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" - -[[package]] -name = "js-sys" -version = "0.3.91" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "leb128fmt" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" - -[[package]] -name = "libc" -version = "0.2.183" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" - -[[package]] -name = "linux-raw-sys" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" - -[[package]] -name = "litemap" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" - -[[package]] -name = "log" -version = "0.4.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" - -[[package]] -name = "memchr" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" - -[[package]] -name = "mio" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" -dependencies = [ - "libc", - "wasi", - "windows-sys", -] - -[[package]] -name = "once_cell" -version = "1.21.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" - -[[package]] -name = "openclaw-gateway-client" -version = "0.1.0" -dependencies = [ - "base64", - "ed25519-dalek", - "futures", - "hex", - "pretty_assertions", - "rand_core 0.6.4", - "serde", - "serde_json", - "sha2", - "tempfile", - "thiserror", - "tokio", - "tokio-tungstenite", - "tracing", - "url", - "uuid", -] - -[[package]] -name = "percent-encoding" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" - -[[package]] -name = "pin-project-lite" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" - -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - -[[package]] -name = "potential_utf" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" -dependencies = [ - "zerovec", -] - -[[package]] -name = "ppv-lite86" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "pretty_assertions" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" -dependencies = [ - "diff", - "yansi", -] - -[[package]] -name = "prettyplease" -version = "0.2.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" -dependencies = [ - "proc-macro2", - "syn", -] - -[[package]] -name = "proc-macro2" -version = "1.0.106" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "r-efi" -version = "5.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" - -[[package]] -name = "r-efi" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" - -[[package]] -name = "rand" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" -dependencies = [ - "rand_chacha", - "rand_core 0.9.5", -] - -[[package]] -name = "rand_chacha" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" -dependencies = [ - "ppv-lite86", - "rand_core 0.9.5", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.17", -] - -[[package]] -name = "rand_core" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" -dependencies = [ - "getrandom 0.3.4", -] - -[[package]] -name = "rustc_version" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" -dependencies = [ - "semver", -] - -[[package]] -name = "rustix" -version = "1.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys", - "windows-sys", -] - -[[package]] -name = "rustversion" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" - -[[package]] -name = "semver" -version = "1.0.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" - -[[package]] -name = "serde" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde_core" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_json" -version = "1.0.149" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" -dependencies = [ - "itoa", - "memchr", - "serde", - "serde_core", - "zmij", -] - -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sha2" -version = "0.10.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "signature" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" -dependencies = [ - "rand_core 0.6.4", -] - -[[package]] -name = "slab" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" - -[[package]] -name = "smallvec" -version = "1.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" - -[[package]] -name = "socket2" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" -dependencies = [ - "libc", - "windows-sys", -] - -[[package]] -name = "spki" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "stable_deref_trait" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "syn" -version = "2.0.117" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "synstructure" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tempfile" -version = "3.27.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd" -dependencies = [ - "fastrand", - "getrandom 0.4.2", - "once_cell", - "rustix", - "windows-sys", -] - -[[package]] -name = "thiserror" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tinystr" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" -dependencies = [ - "displaydoc", - "zerovec", -] - -[[package]] -name = "tokio" -version = "1.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" -dependencies = [ - "bytes", - "libc", - "mio", - "pin-project-lite", - "socket2", - "tokio-macros", - "windows-sys", -] - -[[package]] -name = "tokio-macros" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tokio-tungstenite" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" -dependencies = [ - "futures-util", - "log", - "tokio", - "tungstenite", -] - -[[package]] -name = "tracing" -version = "0.1.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" -dependencies = [ - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tracing-core" -version = "0.1.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" -dependencies = [ - "once_cell", -] - -[[package]] -name = "tungstenite" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" -dependencies = [ - "bytes", - "data-encoding", - "http", - "httparse", - "log", - "rand", - "sha1", - "thiserror", - "utf-8", -] - -[[package]] -name = "typenum" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" - -[[package]] -name = "unicode-ident" -version = "1.0.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" - -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - -[[package]] -name = "url" -version = "2.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", -] - -[[package]] -name = "utf-8" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - -[[package]] -name = "uuid" -version = "1.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" -dependencies = [ - "getrandom 0.4.2", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "version_check" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "wasi" -version = "0.11.1+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" - -[[package]] -name = "wasip2" -version = "1.0.2+wasi-0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" -dependencies = [ - "wit-bindgen", -] - -[[package]] -name = "wasip3" -version = "0.4.0+wasi-0.3.0-rc-2026-01-06" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" -dependencies = [ - "wit-bindgen", -] - -[[package]] -name = "wasm-bindgen" -version = "0.2.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" -dependencies = [ - "cfg-if", - "once_cell", - "rustversion", - "wasm-bindgen-macro", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" -dependencies = [ - "bumpalo", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "wasm-encoder" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" -dependencies = [ - "leb128fmt", - "wasmparser", -] - -[[package]] -name = "wasm-metadata" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" -dependencies = [ - "anyhow", - "indexmap", - "wasm-encoder", - "wasmparser", -] - -[[package]] -name = "wasmparser" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" -dependencies = [ - "bitflags", - "hashbrown 0.15.5", - "indexmap", - "semver", -] - -[[package]] -name = "windows-link" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" - -[[package]] -name = "windows-sys" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" -dependencies = [ - "windows-link", -] - -[[package]] -name = "wit-bindgen" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" -dependencies = [ - "wit-bindgen-rust-macro", -] - -[[package]] -name = "wit-bindgen-core" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" -dependencies = [ - "anyhow", - "heck", - "wit-parser", -] - -[[package]] -name = "wit-bindgen-rust" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" -dependencies = [ - "anyhow", - "heck", - "indexmap", - "prettyplease", - "syn", - "wasm-metadata", - "wit-bindgen-core", - "wit-component", -] - -[[package]] -name = "wit-bindgen-rust-macro" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" -dependencies = [ - "anyhow", - "prettyplease", - "proc-macro2", - "quote", - "syn", - "wit-bindgen-core", - "wit-bindgen-rust", -] - -[[package]] -name = "wit-component" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" -dependencies = [ - "anyhow", - "bitflags", - "indexmap", - "log", - "serde", - "serde_derive", - "serde_json", - "wasm-encoder", - "wasm-metadata", - "wasmparser", - "wit-parser", -] - -[[package]] -name = "wit-parser" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" -dependencies = [ - "anyhow", - "id-arena", - "indexmap", - "log", - "semver", - "serde", - "serde_derive", - "serde_json", - "unicode-xid", - "wasmparser", -] - -[[package]] -name = "writeable" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" - -[[package]] -name = "yansi" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" - -[[package]] -name = "yoke" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" -dependencies = [ - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - -[[package]] -name = "zerocopy" -version = "0.8.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2578b716f8a7a858b7f02d5bd870c14bf4ddbbcf3a4c05414ba6503640505e3" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e6cc098ea4d3bd6246687de65af3f920c430e236bee1e3bf2e441463f08a02f" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "zerofrom" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - -[[package]] -name = "zeroize" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" - -[[package]] -name = "zerotrie" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", -] - -[[package]] -name = "zerovec" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "zmij" -version = "1.0.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/openclaw-gateway-client/tests/protocol_roundtrip.rs b/openclaw-gateway-client/tests/protocol_roundtrip.rs index 25eed560..70afa53d 100644 --- a/openclaw-gateway-client/tests/protocol_roundtrip.rs +++ b/openclaw-gateway-client/tests/protocol_roundtrip.rs @@ -34,7 +34,13 @@ fn deserializes_response_frame() { let frame: GatewayFrame = serde_json::from_value(raw).expect("deserialize response frame"); - let GatewayFrame::Response(ResponseFrame { id, ok, payload, error }) = frame else { + let GatewayFrame::Response(ResponseFrame { + id, + ok, + payload, + error, + }) = frame + else { panic!("expected response frame"); }; @@ -55,7 +61,13 @@ fn deserializes_event_frame() { let frame: GatewayFrame = serde_json::from_value(raw).expect("deserialize event frame"); - let GatewayFrame::Event(EventFrame { event, payload, seq, state_version }) = frame else { + let GatewayFrame::Event(EventFrame { + event, + payload, + seq, + state_version, + }) = frame + else { panic!("expected event frame"); }; @@ -76,5 +88,8 @@ fn omits_absent_optional_fields() { let encoded = serde_json::to_value(&frame).expect("serialize event frame"); - assert_eq!(encoded, Value::from(json!({ "type": "event", "event": "tick" }))); + assert_eq!( + encoded, + Value::from(json!({ "type": "event", "event": "tick" })) + ); } diff --git a/openclaw-gateway-client/tests/tls_fingerprint.rs b/openclaw-gateway-client/tests/tls_fingerprint.rs index ff001861..2f94e8b6 100644 --- a/openclaw-gateway-client/tests/tls_fingerprint.rs +++ b/openclaw-gateway-client/tests/tls_fingerprint.rs @@ -3,14 +3,8 @@ use openclaw_gateway_client::tls::normalize_fingerprint; #[test] fn normalizes_sha256_fingerprint_variants() { - assert_eq!( - normalize_fingerprint("AA:bb:cc"), - Some("AA:BB:CC".into()) - ); - assert_eq!( - normalize_fingerprint("aabbcc"), - Some("AA:BB:CC".into()) - ); + assert_eq!(normalize_fingerprint("AA:bb:cc"), Some("AA:BB:CC".into())); + assert_eq!(normalize_fingerprint("aabbcc"), Some("AA:BB:CC".into())); assert_eq!(normalize_fingerprint(""), None); } From 1e1b1af69499f576ed499c6fa31b5434590134f0 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 14:49:25 +0800 Subject: [PATCH 05/20] style: cargo fmt --- openclaw-gateway-client/src/auth_store.rs | 7 +- openclaw-gateway-client/src/client.rs | 56 +- openclaw-gateway-client/src/lib.rs | 2 +- openclaw-gateway-client/src/node.rs | 4 +- openclaw-gateway-client/src/tls.rs | 4 +- .../tests/client_handshake.rs | 10 +- openclaw-gateway-client/tests/client_rpc.rs | 9 +- .../tests/connect_payload.rs | 11 +- openclaw-gateway-client/tests/node_client.rs | 17 +- .../tests/protocol_roundtrip.rs | 2 +- src-tauri/src/bridge_client.rs | 11 +- src-tauri/src/commands/preferences.rs | 8 +- src-tauri/src/lib.rs | 7 +- src-tauri/src/node_client.rs | 14 +- src-tauri/src/remote_doctor.rs | 565 ++++++++++++------ 15 files changed, 487 insertions(+), 240 deletions(-) diff --git a/openclaw-gateway-client/src/auth_store.rs b/openclaw-gateway-client/src/auth_store.rs index e9db15e0..f2eb9269 100644 --- a/openclaw-gateway-client/src/auth_store.rs +++ b/openclaw-gateway-client/src/auth_store.rs @@ -29,7 +29,9 @@ impl FileAuthStore { } fn path_for(&self, device_id: &str, role: &str) -> PathBuf { - self.root.join(sanitize(device_id)).join(format!("{}.json", sanitize(role))) + self.root + .join(sanitize(device_id)) + .join(format!("{}.json", sanitize(role))) } } @@ -64,7 +66,8 @@ impl AuthStore for FileAuthStore { } fn sanitize(value: &str) -> String { - value.chars() + value + .chars() .map(|ch| { if ch.is_ascii_alphanumeric() || ch == '-' || ch == '_' { ch diff --git a/openclaw-gateway-client/src/client.rs b/openclaw-gateway-client/src/client.rs index 9efc6112..92147ba8 100644 --- a/openclaw-gateway-client/src/client.rs +++ b/openclaw-gateway-client/src/client.rs @@ -92,8 +92,12 @@ impl GatewayClientBuilder { } pub fn build(self) -> Result { - let url = Url::parse(&self.url.ok_or_else(|| Error::Config("url is required".into()))?) - .map_err(|err| Error::Config(err.to_string()))?; + let url = Url::parse( + &self + .url + .ok_or_else(|| Error::Config("url is required".into()))?, + ) + .map_err(|err| Error::Config(err.to_string()))?; let tls_fingerprint = self .tls_fingerprint .as_deref() @@ -147,7 +151,12 @@ impl GatewayClient { let challenge = read_until_challenge(&mut reader).await?; let nonce = challenge .payload - .and_then(|payload| payload.get("nonce").and_then(|value| value.as_str()).map(str::to_string)) + .and_then(|payload| { + payload + .get("nonce") + .and_then(|value| value.as_str()) + .map(str::to_string) + }) .ok_or_else(|| Error::Protocol("connect challenge missing nonce".into()))?; let request_id = Uuid::new_v4().to_string(); @@ -188,7 +197,8 @@ impl GatewayClient { }; match frame { GatewayFrame::Response(response) => { - let sender = read_inner.pending.lock().await.remove(&response.id); + let sender = + read_inner.pending.lock().await.remove(&response.id); if let Some(sender) = sender { let result = if response.ok { Ok(response.payload.unwrap_or(Value::Null)) @@ -278,7 +288,9 @@ impl Clone for GatewayClientHandle { async fn read_until_challenge( reader: &mut futures::stream::SplitStream< - tokio_tungstenite::WebSocketStream>, + tokio_tungstenite::WebSocketStream< + tokio_tungstenite::MaybeTlsStream, + >, >, ) -> Result { while let Some(message) = reader.next().await { @@ -286,19 +298,27 @@ async fn read_until_challenge( if !message.is_text() { continue; } - let frame: GatewayFrame = serde_json::from_str(message.to_text().map_err(|err| Error::Transport(err.to_string()))?)?; + let frame: GatewayFrame = serde_json::from_str( + message + .to_text() + .map_err(|err| Error::Transport(err.to_string()))?, + )?; if let GatewayFrame::Event(event) = frame { if event.event == "connect.challenge" { return Ok(event); } } } - Err(Error::Protocol("connection closed before connect.challenge".into())) + Err(Error::Protocol( + "connection closed before connect.challenge".into(), + )) } async fn read_until_connect_response( reader: &mut futures::stream::SplitStream< - tokio_tungstenite::WebSocketStream>, + tokio_tungstenite::WebSocketStream< + tokio_tungstenite::MaybeTlsStream, + >, >, request_id: &str, ) -> Result { @@ -307,7 +327,11 @@ async fn read_until_connect_response( if !message.is_text() { continue; } - let frame: GatewayFrame = serde_json::from_str(message.to_text().map_err(|err| Error::Transport(err.to_string()))?)?; + let frame: GatewayFrame = serde_json::from_str( + message + .to_text() + .map_err(|err| Error::Transport(err.to_string()))?, + )?; if let GatewayFrame::Response(ResponseFrame { id, ok, @@ -321,13 +345,21 @@ async fn read_until_connect_response( if !ok { return Err(Error::Protocol( error - .and_then(|value| value.get("message").and_then(|msg| msg.as_str()).map(str::to_string)) + .and_then(|value| { + value + .get("message") + .and_then(|msg| msg.as_str()) + .map(str::to_string) + }) .unwrap_or_else(|| "connect failed".into()), )); } - let payload = payload.ok_or_else(|| Error::Protocol("connect response missing payload".into()))?; + let payload = payload + .ok_or_else(|| Error::Protocol("connect response missing payload".into()))?; return serde_json::from_value(payload).map_err(Error::from); } } - Err(Error::Protocol("connection closed before connect response".into())) + Err(Error::Protocol( + "connection closed before connect response".into(), + )) } diff --git a/openclaw-gateway-client/src/lib.rs b/openclaw-gateway-client/src/lib.rs index 99d0c5dc..9636349a 100644 --- a/openclaw-gateway-client/src/lib.rs +++ b/openclaw-gateway-client/src/lib.rs @@ -1,6 +1,6 @@ -pub mod error; pub mod auth_store; pub mod client; +pub mod error; pub mod identity; pub mod node; pub mod protocol; diff --git a/openclaw-gateway-client/src/node.rs b/openclaw-gateway-client/src/node.rs index badaa64f..5b4d49c9 100644 --- a/openclaw-gateway-client/src/node.rs +++ b/openclaw-gateway-client/src/node.rs @@ -71,6 +71,8 @@ impl NodeClient { if let Some(payload) = payload { params.insert("payload".into(), payload); } - self.handle.request("node.event", Some(Value::Object(params))).await + self.handle + .request("node.event", Some(Value::Object(params))) + .await } } diff --git a/openclaw-gateway-client/src/tls.rs b/openclaw-gateway-client/src/tls.rs index dedc38a8..814a2054 100644 --- a/openclaw-gateway-client/src/tls.rs +++ b/openclaw-gateway-client/src/tls.rs @@ -3,7 +3,9 @@ pub fn normalize_fingerprint(input: &str) -> Option { .chars() .filter(|ch| *ch != ':' && !ch.is_ascii_whitespace()) .collect(); - if compact.is_empty() || compact.len() % 2 != 0 || !compact.chars().all(|ch| ch.is_ascii_hexdigit()) + if compact.is_empty() + || compact.len() % 2 != 0 + || !compact.chars().all(|ch| ch.is_ascii_hexdigit()) { return None; } diff --git a/openclaw-gateway-client/tests/client_handshake.rs b/openclaw-gateway-client/tests/client_handshake.rs index ac000961..6d2c58bb 100644 --- a/openclaw-gateway-client/tests/client_handshake.rs +++ b/openclaw-gateway-client/tests/client_handshake.rs @@ -7,7 +7,9 @@ use tokio_tungstenite::{accept_async, tungstenite::Message}; #[tokio::test] async fn waits_for_connect_challenge_and_sends_connect_request() { - let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind test server"); + let listener = TcpListener::bind("127.0.0.1:0") + .await + .expect("bind test server"); let addr = listener.local_addr().expect("local addr"); let (tx, rx) = oneshot::channel::(); @@ -25,7 +27,11 @@ async fn waits_for_connect_challenge_and_sends_connect_request() { .await .expect("send challenge"); - let message = ws.next().await.expect("message").expect("websocket message"); + let message = ws + .next() + .await + .expect("message") + .expect("websocket message"); let text = message.into_text().expect("text frame"); let value: Value = serde_json::from_str(&text).expect("json request"); let req_id = value["id"].as_str().expect("request id").to_string(); diff --git a/openclaw-gateway-client/tests/client_rpc.rs b/openclaw-gateway-client/tests/client_rpc.rs index 9d832835..f4715646 100644 --- a/openclaw-gateway-client/tests/client_rpc.rs +++ b/openclaw-gateway-client/tests/client_rpc.rs @@ -7,7 +7,9 @@ use tokio_tungstenite::{accept_async, tungstenite::Message}; #[tokio::test] async fn request_receives_matching_response_and_events_are_broadcast() { - let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind test server"); + let listener = TcpListener::bind("127.0.0.1:0") + .await + .expect("bind test server"); let addr = listener.local_addr().expect("local addr"); let (req_tx, mut req_rx) = mpsc::unbounded_channel::(); let (ready_tx, ready_rx) = oneshot::channel::<()>(); @@ -34,7 +36,10 @@ async fn request_receives_matching_response_and_events_are_broadcast() { .into_text() .expect("text"); let connect_value: Value = serde_json::from_str(&connect_text).expect("connect json"); - let connect_id = connect_value["id"].as_str().expect("connect id").to_string(); + let connect_id = connect_value["id"] + .as_str() + .expect("connect id") + .to_string(); ws.send(Message::text( json!({ diff --git a/openclaw-gateway-client/tests/connect_payload.rs b/openclaw-gateway-client/tests/connect_payload.rs index ac755518..d5c1b214 100644 --- a/openclaw-gateway-client/tests/connect_payload.rs +++ b/openclaw-gateway-client/tests/connect_payload.rs @@ -121,14 +121,21 @@ fn deserializes_hello_ok_response_payload() { let frame: GatewayFrame = serde_json::from_value(raw).expect("deserialize hello response"); - let GatewayFrame::Response(ResponseFrame { payload: Some(payload), .. }) = frame else { + let GatewayFrame::Response(ResponseFrame { + payload: Some(payload), + .. + }) = frame + else { panic!("expected response"); }; let hello: HelloOk = serde_json::from_value(payload).expect("decode hello payload"); assert_eq!(hello.server_name.as_deref(), Some("gateway.local")); assert_eq!(hello.policy.tick_interval_ms, Some(30_000)); - assert_eq!(hello.auth.and_then(|auth| auth.device_token), Some("next-device-token".into())); + assert_eq!( + hello.auth.and_then(|auth| auth.device_token), + Some("next-device-token".into()) + ); } #[test] diff --git a/openclaw-gateway-client/tests/node_client.rs b/openclaw-gateway-client/tests/node_client.rs index bf7f9c18..4f6b1a4a 100644 --- a/openclaw-gateway-client/tests/node_client.rs +++ b/openclaw-gateway-client/tests/node_client.rs @@ -8,7 +8,9 @@ use tokio_tungstenite::{accept_async, tungstenite::Message}; #[tokio::test] async fn node_client_decodes_invoke_requests_and_sends_results_and_events() { - let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind test server"); + let listener = TcpListener::bind("127.0.0.1:0") + .await + .expect("bind test server"); let addr = listener.local_addr().expect("local addr"); let (capture_tx, capture_rx) = oneshot::channel::<(Value, Value)>(); @@ -34,7 +36,10 @@ async fn node_client_decodes_invoke_requests_and_sends_results_and_events() { .into_text() .expect("text"); let connect_value: Value = serde_json::from_str(&connect_text).expect("connect json"); - let connect_id = connect_value["id"].as_str().expect("connect id").to_string(); + let connect_id = connect_value["id"] + .as_str() + .expect("connect id") + .to_string(); ws.send(Message::text( json!({ @@ -101,7 +106,8 @@ async fn node_client_decodes_invoke_requests_and_sends_results_and_events() { .expect("node event frame") .into_text() .expect("text"); - let node_event_value: Value = serde_json::from_str(&node_event_text).expect("node event json"); + let node_event_value: Value = + serde_json::from_str(&node_event_text).expect("node event json"); let node_event_id = node_event_value["id"] .as_str() .expect("node event id") @@ -154,7 +160,10 @@ async fn node_client_decodes_invoke_requests_and_sends_results_and_events() { assert_eq!(invoke_result["params"]["id"], "invoke-1"); assert_eq!(invoke_result["params"]["nodeId"], "node-1"); assert_eq!(invoke_result["params"]["ok"], true); - assert_eq!(invoke_result["params"]["payload"], json!({ "echoed": true })); + assert_eq!( + invoke_result["params"]["payload"], + json!({ "echoed": true }) + ); assert_eq!(node_event["method"], "node.event"); assert_eq!(node_event["params"]["event"], "exec.finished"); diff --git a/openclaw-gateway-client/tests/protocol_roundtrip.rs b/openclaw-gateway-client/tests/protocol_roundtrip.rs index 70afa53d..699d8331 100644 --- a/openclaw-gateway-client/tests/protocol_roundtrip.rs +++ b/openclaw-gateway-client/tests/protocol_roundtrip.rs @@ -1,6 +1,6 @@ use openclaw_gateway_client::protocol::{EventFrame, GatewayFrame, RequestFrame, ResponseFrame}; use pretty_assertions::assert_eq; -use serde_json::{json, Value}; +use serde_json::{Value, json}; #[test] fn serializes_request_frame() { diff --git a/src-tauri/src/bridge_client.rs b/src-tauri/src/bridge_client.rs index 22b5bbd9..18b1e371 100644 --- a/src-tauri/src/bridge_client.rs +++ b/src-tauri/src/bridge_client.rs @@ -472,8 +472,15 @@ impl BridgeClient { return; }; if let Ok(frame) = serde_json::from_str::(text) { - Self::handle_frame(frame, inner_ref, invokes_ref, invoke_events, expired_ref, app) - .await; + Self::handle_frame( + frame, + inner_ref, + invokes_ref, + invoke_events, + expired_ref, + app, + ) + .await; } } diff --git a/src-tauri/src/commands/preferences.rs b/src-tauri/src/commands/preferences.rs index d5a8b88f..7d345628 100644 --- a/src-tauri/src/commands/preferences.rs +++ b/src-tauri/src/commands/preferences.rs @@ -99,9 +99,8 @@ fn save_app_preferences_from_paths( stored.show_ssh_transfer_speed_ui = prefs.show_ssh_transfer_speed_ui; stored.remote_doctor_gateway_url = normalize_remote_doctor_gateway_url(prefs.remote_doctor_gateway_url.clone()); - stored.remote_doctor_gateway_auth_token = normalize_remote_doctor_gateway_auth_token( - prefs.remote_doctor_gateway_auth_token.clone(), - ); + stored.remote_doctor_gateway_auth_token = + normalize_remote_doctor_gateway_auth_token(prefs.remote_doctor_gateway_auth_token.clone()); save_stored_preferences_from_paths(paths, &stored) } @@ -172,8 +171,7 @@ pub fn set_remote_doctor_gateway_auth_token_preference( ) -> Result { let paths = resolve_paths(); let mut prefs = load_app_preferences_from_paths(&paths); - prefs.remote_doctor_gateway_auth_token = - normalize_remote_doctor_gateway_auth_token(auth_token); + prefs.remote_doctor_gateway_auth_token = normalize_remote_doctor_gateway_auth_token(auth_token); save_app_preferences_from_paths(&paths, &prefs)?; Ok(prefs) } diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index f565fa5c..691f6235 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -55,10 +55,9 @@ use crate::commands::{ repair_doctor_assistant, repair_primary_via_rescue, resolve_api_keys, resolve_provider_auth, restart_gateway, restore_from_backup, rollback, run_doctor_command, run_openclaw_upgrade, set_active_clawpal_data_dir, set_active_openclaw_home, set_agent_model, - set_bug_report_settings, set_global_model, set_session_model_override, - set_remote_doctor_gateway_auth_token_preference, set_remote_doctor_gateway_url_preference, - set_ssh_transfer_speed_ui_preference, - setup_agent_identity, sftp_list_dir, sftp_read_file, + set_bug_report_settings, set_global_model, set_remote_doctor_gateway_auth_token_preference, + set_remote_doctor_gateway_url_preference, set_session_model_override, + set_ssh_transfer_speed_ui_preference, setup_agent_identity, sftp_list_dir, sftp_read_file, sftp_remove_file, sftp_write_file, ssh_connect, ssh_connect_with_passphrase, ssh_disconnect, ssh_exec, ssh_status, start_watchdog, stop_watchdog, test_model_profile, trigger_cron_job, uninstall_watchdog, upsert_model_profile, upsert_ssh_host, diff --git a/src-tauri/src/node_client.rs b/src-tauri/src/node_client.rs index d5077127..b2288cb4 100644 --- a/src-tauri/src/node_client.rs +++ b/src-tauri/src/node_client.rs @@ -109,13 +109,8 @@ impl NodeClient { .await; } Ok(Message::Binary(bytes)) => { - Self::handle_message_payload( - &bytes, - &inner_ref, - &chat_ref, - &app_clone, - ) - .await; + Self::handle_message_payload(&bytes, &inner_ref, &chat_ref, &app_clone) + .await; } Ok(Message::Close(_)) => { let _ = app_clone @@ -292,10 +287,7 @@ impl NodeClient { Ok(rx) } - pub async fn await_agent_final( - &self, - rx: oneshot::Receiver, - ) -> Result { + pub async fn await_agent_final(&self, rx: oneshot::Receiver) -> Result { match tokio::time::timeout(std::time::Duration::from_secs(180), rx).await { Ok(Ok(text)) => Ok(text), Ok(Err(_)) => { diff --git a/src-tauri/src/remote_doctor.rs b/src-tauri/src/remote_doctor.rs index d813559c..e73f8d8f 100644 --- a/src-tauri/src/remote_doctor.rs +++ b/src-tauri/src/remote_doctor.rs @@ -19,9 +19,9 @@ use crate::commands::logs::log_dev; use crate::commands::preferences::load_app_preferences_from_paths; use crate::commands::{agent::create_agent, agent::setup_agent_identity}; use crate::commands::{ - diagnose_primary_via_rescue, manage_rescue_bot, read_raw_config, remote_diagnose_primary_via_rescue, - remote_manage_rescue_bot, remote_read_raw_config, remote_restart_gateway, remote_write_raw_config, - restart_gateway, RescuePrimaryDiagnosisResult, + diagnose_primary_via_rescue, manage_rescue_bot, read_raw_config, + remote_diagnose_primary_via_rescue, remote_manage_rescue_bot, remote_read_raw_config, + remote_restart_gateway, remote_write_raw_config, restart_gateway, RescuePrimaryDiagnosisResult, }; use crate::config_io::read_openclaw_config; use crate::models::resolve_paths; @@ -437,16 +437,13 @@ fn ensure_local_remote_doctor_agent_ready() -> Result<(), String> { } } - setup_agent_identity( - agent_id.clone(), - "ClawPal Remote Doctor".to_string(), - None, - )?; + setup_agent_identity(agent_id.clone(), "ClawPal Remote Doctor".to_string(), None)?; let paths = resolve_paths(); let cfg = read_openclaw_config(&paths)?; - let workspace = clawpal_core::doctor::resolve_agent_workspace_from_config(&cfg, &agent_id, None) - .map(|path| shellexpand::tilde(&path).to_string())?; + let workspace = + clawpal_core::doctor::resolve_agent_workspace_from_config(&cfg, &agent_id, None) + .map(|path| shellexpand::tilde(&path).to_string())?; create_dir_all(&workspace) .map_err(|error| format!("Failed to create remote doctor workspace: {error}"))?; @@ -603,8 +600,13 @@ async fn run_rescue_diagnosis( TargetLocation::LocalOpenclaw => diagnose_primary_via_rescue(None, None).await, TargetLocation::RemoteOpenclaw => { let host_id = primary_remote_target_host_id(instance_id)?; - remote_diagnose_primary_via_rescue(app.state::(), host_id, None, None) - .await + remote_diagnose_primary_via_rescue( + app.state::(), + host_id, + None, + None, + ) + .await } } } @@ -621,7 +623,8 @@ async fn read_target_config( remote_read_raw_config(app.state::(), host_id).await? } }; - serde_json::from_str::(&raw).map_err(|error| format!("Failed to parse target config: {error}")) + serde_json::from_str::(&raw) + .map_err(|error| format!("Failed to parse target config: {error}")) } async fn read_target_config_raw( @@ -757,7 +760,9 @@ async fn restart_target_gateway( } fn diagnosis_is_healthy(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { - diagnosis.status == "healthy" && diagnosis.summary.status == "healthy" && diagnosis.issues.is_empty() + diagnosis.status == "healthy" + && diagnosis.summary.status == "healthy" + && diagnosis.issues.is_empty() } fn diagnosis_context(diagnosis: &RescuePrimaryDiagnosisResult) -> Value { @@ -827,14 +832,20 @@ fn rescue_setup_command_result( } } -fn rescue_bot_manage_command_result(result: &crate::commands::RescueBotManageResult) -> CommandResult { +fn rescue_bot_manage_command_result( + result: &crate::commands::RescueBotManageResult, +) -> CommandResult { CommandResult { argv: vec![ "manage_rescue_bot".into(), result.action.clone(), result.profile.clone(), ], - exit_code: Some(if result.active || result.configured { 0 } else { 1 }), + exit_code: Some(if result.active || result.configured { + 0 + } else { + 1 + }), stdout: format!( "configured={} active={} runtimeState={} rescuePort={} mainPort={} commands={}", result.configured, @@ -886,7 +897,11 @@ fn rescue_activation_error_message( format!( "Rescue profile \"{}\" was {} but did not become active (runtime state: {}).", profile, - if configured { "configured" } else { "not configured" }, + if configured { + "configured" + } else { + "not configured" + }, runtime_state ) + &suffix } @@ -944,7 +959,14 @@ async fn execute_rescue_activation_diagnostic_command( }; } - match execute_command(&app.state::(), target_location, instance_id, argv).await { + match execute_command( + &app.state::(), + target_location, + instance_id, + argv, + ) + .await + { Ok(result) => result, Err(error) => CommandResult { argv: argv.to_vec(), @@ -965,7 +987,10 @@ async fn collect_rescue_activation_failure_diagnostics( ) -> Vec { let mut results = Vec::new(); for argv in rescue_activation_diagnostic_commands(profile) { - results.push(execute_rescue_activation_diagnostic_command(app, target_location, instance_id, &argv).await); + results.push( + execute_rescue_activation_diagnostic_command(app, target_location, instance_id, &argv) + .await, + ); } results } @@ -983,18 +1008,34 @@ async fn ensure_rescue_profile_ready( ) -> Result { let started = Instant::now(); let result = match target_location { - TargetLocation::LocalOpenclaw => manage_rescue_bot("activate".into(), Some("rescue".into()), None) - .await - .map_err(|error| RescueActivationFailure { - message: error, - activation_result: rescue_setup_command_result("activate", "rescue", false, false, "activation_failed"), - diagnostics: Vec::new(), - })?, + TargetLocation::LocalOpenclaw => { + manage_rescue_bot("activate".into(), Some("rescue".into()), None) + .await + .map_err(|error| RescueActivationFailure { + message: error, + activation_result: rescue_setup_command_result( + "activate", + "rescue", + false, + false, + "activation_failed", + ), + diagnostics: Vec::new(), + })? + } TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id).map_err(|error| RescueActivationFailure { - message: error, - activation_result: rescue_setup_command_result("activate", "rescue", false, false, "activation_failed"), - diagnostics: Vec::new(), + let host_id = primary_remote_target_host_id(instance_id).map_err(|error| { + RescueActivationFailure { + message: error, + activation_result: rescue_setup_command_result( + "activate", + "rescue", + false, + false, + "activation_failed", + ), + diagnostics: Vec::new(), + } })?; remote_manage_rescue_bot( app.state::(), @@ -1006,7 +1047,13 @@ async fn ensure_rescue_profile_ready( .await .map_err(|error| RescueActivationFailure { message: error, - activation_result: rescue_setup_command_result("activate", "rescue", false, false, "activation_failed"), + activation_result: rescue_setup_command_result( + "activate", + "rescue", + false, + false, + "activation_failed", + ), diagnostics: Vec::new(), })? } @@ -1020,8 +1067,13 @@ async fn ensure_rescue_profile_ready( ); command_result.duration_ms = started.elapsed().as_millis() as u64; if !result.active { - let diagnostics = - collect_rescue_activation_failure_diagnostics(app, target_location, instance_id, &result.profile).await; + let diagnostics = collect_rescue_activation_failure_diagnostics( + app, + target_location, + instance_id, + &result.profile, + ) + .await; let suggested_checks = diagnostics .iter() .map(|result| result.argv.join(" ")) @@ -1048,7 +1100,9 @@ async fn repair_rescue_gateway_if_needed( instance_id: &str, diagnosis: &mut RescuePrimaryDiagnosisResult, ) -> Result<(), String> { - if !(diagnosis_missing_rescue_profile(diagnosis) || diagnosis_unhealthy_rescue_gateway(diagnosis)) { + if !(diagnosis_missing_rescue_profile(diagnosis) + || diagnosis_unhealthy_rescue_gateway(diagnosis)) + { return Ok(()); } @@ -1097,7 +1151,12 @@ async fn repair_rescue_gateway_if_needed( Ok(()) } -fn append_diagnosis_log(session_id: &str, stage: &str, round: usize, diagnosis: &RescuePrimaryDiagnosisResult) { +fn append_diagnosis_log( + session_id: &str, + stage: &str, + round: usize, + diagnosis: &RescuePrimaryDiagnosisResult, +) { append_remote_doctor_log( session_id, json!({ @@ -1182,14 +1241,19 @@ fn ensure_object(value: &mut Value) -> Result<&mut serde_json::Map Result<(), String> { - let segments = path.split('.').filter(|segment| !segment.trim().is_empty()).collect::>(); + let segments = path + .split('.') + .filter(|segment| !segment.trim().is_empty()) + .collect::>(); if segments.is_empty() { return Err("Config set path cannot be empty".into()); } let mut cursor = root; for segment in &segments[..segments.len() - 1] { let object = ensure_object(cursor)?; - cursor = object.entry((*segment).to_string()).or_insert_with(|| json!({})); + cursor = object + .entry((*segment).to_string()) + .or_insert_with(|| json!({})); } let object = ensure_object(cursor)?; object.insert(segments[segments.len() - 1].to_string(), value); @@ -1197,13 +1261,19 @@ fn apply_config_set(root: &mut Value, path: &str, value: Value) -> Result<(), St } fn apply_config_unset(root: &mut Value, path: &str) -> Result<(), String> { - let segments = path.split('.').filter(|segment| !segment.trim().is_empty()).collect::>(); + let segments = path + .split('.') + .filter(|segment| !segment.trim().is_empty()) + .collect::>(); if segments.is_empty() { return Err("Config unset path cannot be empty".into()); } let mut cursor = root; for segment in &segments[..segments.len() - 1] { - let Some(next) = cursor.as_object_mut().and_then(|object| object.get_mut(*segment)) else { + let Some(next) = cursor + .as_object_mut() + .and_then(|object| object.get_mut(*segment)) + else { return Ok(()); }; cursor = next; @@ -1352,8 +1422,13 @@ async fn execute_clawpal_command( argv: &[String], ) -> Result { match argv.get(1).map(String::as_str) { - Some("doctor") => execute_clawpal_doctor_command(app, pool, target_location, instance_id, argv).await, - other => Err(format!("Unsupported clawpal command in remote doctor agent session: {:?}", other)), + Some("doctor") => { + execute_clawpal_doctor_command(app, pool, target_location, instance_id, argv).await + } + other => Err(format!( + "Unsupported clawpal command in remote doctor agent session: {:?}", + other + )), } } @@ -1374,20 +1449,32 @@ async fn execute_clawpal_doctor_command( ) .await?; let which_result = match target_location { - TargetLocation::LocalOpenclaw => execute_command( - pool, - target_location, - instance_id, - &["sh".into(), "-lc".into(), "command -v openclaw || true".into()], - ) - .await?, - TargetLocation::RemoteOpenclaw => execute_command( - pool, - target_location, - instance_id, - &["sh".into(), "-lc".into(), "command -v openclaw || true".into()], - ) - .await?, + TargetLocation::LocalOpenclaw => { + execute_command( + pool, + target_location, + instance_id, + &[ + "sh".into(), + "-lc".into(), + "command -v openclaw || true".into(), + ], + ) + .await? + } + TargetLocation::RemoteOpenclaw => { + execute_command( + pool, + target_location, + instance_id, + &[ + "sh".into(), + "-lc".into(), + "command -v openclaw || true".into(), + ], + ) + .await? + } }; Ok(json!({ "ok": version_result.exit_code == Some(0), @@ -1396,7 +1483,10 @@ async fn execute_clawpal_doctor_command( })) } Some("config-read") => { - let maybe_path = argv.get(3).map(String::as_str).filter(|value| !value.starts_with("--")); + let maybe_path = argv + .get(3) + .map(String::as_str) + .filter(|value| !value.starts_with("--")); let raw = read_target_config_raw(app, target_location, instance_id).await?; config_read_response(&raw, maybe_path) } @@ -1407,7 +1497,9 @@ async fn execute_clawpal_doctor_command( })) } Some("config-delete") => { - let path = argv.get(3).ok_or("clawpal doctor config-delete requires a path")?; + let path = argv + .get(3) + .ok_or("clawpal doctor config-delete requires a path")?; let mut config = read_target_config(app, target_location, instance_id).await?; apply_config_unset(&mut config, path)?; write_target_config(app, target_location, instance_id, &config).await?; @@ -1427,8 +1519,12 @@ async fn execute_clawpal_doctor_command( })) } Some("config-upsert") => { - let path = argv.get(3).ok_or("clawpal doctor config-upsert requires a path")?; - let value_raw = argv.get(4).ok_or("clawpal doctor config-upsert requires a value")?; + let path = argv + .get(3) + .ok_or("clawpal doctor config-upsert requires a path")?; + let value_raw = argv + .get(4) + .ok_or("clawpal doctor config-upsert requires a value")?; let value: Value = serde_json::from_str(value_raw) .map_err(|error| format!("Invalid JSON value for config-upsert: {error}"))?; let mut config = read_target_config(app, target_location, instance_id).await?; @@ -1442,15 +1538,16 @@ async fn execute_clawpal_doctor_command( .iter() .position(|part| part == "--tool") .ok_or("clawpal doctor exec requires --tool")?; - let tool = argv.get(tool_idx + 1).ok_or("clawpal doctor exec missing tool name")?; + let tool = argv + .get(tool_idx + 1) + .ok_or("clawpal doctor exec missing tool name")?; let args_idx = argv.iter().position(|part| part == "--args"); let mut exec_argv = vec![tool.clone()]; if let Some(index) = args_idx { if let Some(arg_string) = argv.get(index + 1) { - exec_argv.extend( - shell_words::split(arg_string) - .map_err(|error| format!("Failed to parse clawpal doctor exec args: {error}"))?, - ); + exec_argv.extend(shell_words::split(arg_string).map_err(|error| { + format!("Failed to parse clawpal doctor exec args: {error}") + })?); } } let result = execute_command(pool, target_location, instance_id, &exec_argv).await?; @@ -1527,7 +1624,9 @@ async fn execute_invoke_payload( })) } "clawpal" => execute_clawpal_command(app, pool, target_location, instance_id, &argv).await, - other => Err(format!("Unsupported invoke command in remote doctor agent session: {other}")), + other => Err(format!( + "Unsupported invoke command in remote doctor agent session: {other}" + )), } } @@ -1547,9 +1646,9 @@ async fn run_agent_request_with_bridge( .await?; let mut invokes = bridge.subscribe_invokes(); let final_future = async move { - final_rx - .await - .map_err(|_| "Agent request ended before a final chat response was received".to_string()) + final_rx.await.map_err(|_| { + "Agent request ended before a final chat response was received".to_string() + }) }; tokio::pin!(final_future); @@ -1609,7 +1708,11 @@ async fn execute_command( let result = match target_location { TargetLocation::LocalOpenclaw => { if argv[0] == "openclaw" { - let arg_refs = argv.iter().skip(1).map(String::as_str).collect::>(); + let arg_refs = argv + .iter() + .skip(1) + .map(String::as_str) + .collect::>(); let output = run_openclaw(&arg_refs)?; CommandResult { argv: argv.to_vec(), @@ -1625,9 +1728,9 @@ async fn execute_command( if let Some(openclaw_home) = get_active_openclaw_home_override() { command.env("OPENCLAW_HOME", openclaw_home); } - let output = command - .output() - .map_err(|error| format!("Failed to execute local command {:?}: {error}", argv))?; + let output = command.output().map_err(|error| { + format!("Failed to execute local command {:?}: {error}", argv) + })?; CommandResult { argv: argv.to_vec(), exit_code: output.status.code(), @@ -1641,7 +1744,11 @@ async fn execute_command( TargetLocation::RemoteOpenclaw => { let host_id = primary_remote_target_host_id(instance_id)?; if argv[0] == "openclaw" { - let arg_refs = argv.iter().skip(1).map(String::as_str).collect::>(); + let arg_refs = argv + .iter() + .skip(1) + .map(String::as_str) + .collect::>(); let output = run_openclaw_remote(pool, &host_id, &arg_refs).await?; CommandResult { argv: argv.to_vec(), @@ -1652,7 +1759,9 @@ async fn execute_command( timed_out: false, } } else { - let output = pool.exec_login(&host_id, &build_shell_command(argv)).await?; + let output = pool + .exec_login(&host_id, &build_shell_command(argv)) + .await?; CommandResult { argv: argv.to_vec(), exit_code: Some(output.exit_code as i32), @@ -1733,10 +1842,13 @@ fn plan_command_failure_message( } fn command_result_stdout(value: &Value) -> String { - value.get("stdout") + value + .get("stdout") .and_then(Value::as_str) .map(str::to_string) - .unwrap_or_else(|| serde_json::to_string_pretty(value).unwrap_or_else(|_| value.to_string())) + .unwrap_or_else(|| { + serde_json::to_string_pretty(value).unwrap_or_else(|_| value.to_string()) + }) } async fn execute_plan_command( @@ -2133,8 +2245,15 @@ async fn run_clawpal_server_repair_loop( let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; append_diagnosis_log(session_id, "initial", 0, &diagnosis); if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { - repair_rescue_gateway_if_needed(app, session_id, 0, target_location, instance_id, &mut diagnosis) - .await?; + repair_rescue_gateway_if_needed( + app, + session_id, + 0, + target_location, + instance_id, + &mut diagnosis, + ) + .await?; } if diagnosis_is_healthy(&diagnosis) { return Ok(result_for_completion( @@ -2159,8 +2278,9 @@ async fn run_clawpal_server_repair_loop( Some(PlanKind::Repair), None, ); - let config_context = - build_config_excerpt_context(&read_target_config_raw(app, target_location, instance_id).await?); + let config_context = build_config_excerpt_context( + &read_target_config_raw(app, target_location, instance_id).await?, + ); append_remote_doctor_log( session_id, json!({ @@ -2243,7 +2363,10 @@ async fn run_clawpal_server_repair_loop( result.stdout = format!("Updated {path}"); } "configUnset" => { - let path = step.path.as_deref().ok_or("configUnset step missing path")?; + let path = step + .path + .as_deref() + .ok_or("configUnset step missing path")?; emit_progress( Some(app), session_id, @@ -2298,7 +2421,8 @@ async fn run_clawpal_server_repair_loop( "result": result, }), ); - report_clawpal_server_step_result(client, &plan.plan_id, step_index, step, &result).await; + report_clawpal_server_step_result(client, &plan.plan_id, step_index, step, &result) + .await; if result.exit_code.unwrap_or(1) != 0 { return Err(result.stderr); } @@ -2320,7 +2444,11 @@ async fn run_clawpal_server_repair_loop( .await?; } last_step_types = round_step_types.clone(); - round_observations.push(RepairRoundObservation::new(round, &round_step_types, &diagnosis)); + round_observations.push(RepairRoundObservation::new( + round, + &round_step_types, + &diagnosis, + )); if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { let observation = round_observations .last() @@ -2382,8 +2510,9 @@ async fn run_agent_planner_repair_loop( for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { let kind = next_agent_plan_kind_for_round(&diagnosis, &previous_results); - let config_context = - build_config_excerpt_context(&read_target_config_raw(app, target_location, instance_id).await?); + let config_context = build_config_excerpt_context( + &read_target_config_raw(app, target_location, instance_id).await?, + ); let phase = match kind { PlanKind::Detect => "planning_detect", PlanKind::Investigate => "planning_investigate", @@ -2460,11 +2589,15 @@ async fn run_agent_planner_repair_loop( PlanKind::Investigate => "executing_investigate", PlanKind::Repair => "executing_repair", }, - format!("Running {} command: {}", match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, command.argv.join(" ")), + format!( + "Running {} command: {}", + match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + command.argv.join(" ") + ), Some(kind), Some(command.argv.clone()), ); @@ -2483,20 +2616,20 @@ async fn run_agent_planner_repair_loop( "purpose": command.purpose, }), ); - let command_result = match execute_plan_command( - app, - pool, - target_location, - instance_id, - &command.argv, - ) - .await - { - Ok(result) => result, - Err(error) => { - return Err(plan_command_failure_message(kind, round, &command.argv, &error)); - } - }; + let command_result = + match execute_plan_command(app, pool, target_location, instance_id, &command.argv) + .await + { + Ok(result) => result, + Err(error) => { + return Err(plan_command_failure_message( + kind, + round, + &command.argv, + &error, + )); + } + }; append_remote_doctor_log( session_id, json!({ @@ -2551,7 +2684,11 @@ async fn run_agent_planner_repair_loop( )); } - round_observations.push(RepairRoundObservation::new(round, &last_step_types, &diagnosis)); + round_observations.push(RepairRoundObservation::new( + round, + &last_step_types, + &diagnosis, + )); if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { let observation = round_observations .last() @@ -2741,16 +2878,27 @@ async fn start_remote_doctor_repair_impl( "[remote_doctor] session={} protocol fallback legacy_doctor -> clawpal_server", session_id )); - run_clawpal_server_repair_loop(&app, &client, &session_id, &instance_id, target_location) - .await + run_clawpal_server_repair_loop( + &app, + &client, + &session_id, + &instance_id, + target_location, + ) + .await } else { legacy } } RemoteDoctorProtocol::ClawpalServer => { - let clawpal_server = - run_clawpal_server_repair_loop(&app, &client, &session_id, &instance_id, target_location) - .await; + let clawpal_server = run_clawpal_server_repair_loop( + &app, + &client, + &session_id, + &instance_id, + target_location, + ) + .await; if forced_protocol.is_none() && matches!(&clawpal_server, Err(error) if is_unknown_method_error(error)) { @@ -2952,7 +3100,9 @@ mod tests { ) .expect("config set"); assert_eq!( - value.pointer("/models/providers/openai/baseUrl").and_then(Value::as_str), + value + .pointer("/models/providers/openai/baseUrl") + .and_then(Value::as_str), Some("http://127.0.0.1:3000/v1") ); } @@ -3137,8 +3287,7 @@ mod tests { assert_eq!(remote_doctor_agent_id(), "clawpal-remote-doctor"); assert!(!remote_doctor_agent_session_key("sess-1").contains("main")); assert!( - remote_doctor_agent_session_key("sess-1") - .starts_with("agent:clawpal-remote-doctor:") + remote_doctor_agent_session_key("sess-1").starts_with("agent:clawpal-remote-doctor:") ); } @@ -3198,10 +3347,12 @@ mod tests { .expect("agent workspace") .replace("~/", &format!("{}/", home_dir.to_string_lossy())); for file_name in ["IDENTITY.md", "USER.md", "BOOTSTRAP.md", "AGENTS.md"] { - let content = - std::fs::read_to_string(std::path::Path::new(&workspace).join(file_name)) - .unwrap_or_else(|error| panic!("read {file_name}: {error}")); - assert!(!content.trim().is_empty(), "{file_name} should not be empty"); + let content = std::fs::read_to_string(std::path::Path::new(&workspace).join(file_name)) + .unwrap_or_else(|error| panic!("read {file_name}: {error}")); + assert!( + !content.trim().is_empty(), + "{file_name} should not be empty" + ); } let _ = std::fs::remove_dir_all(&temp_root); @@ -3210,14 +3361,22 @@ mod tests { #[test] fn only_agent_planner_protocol_requires_bridge() { assert!(protocol_requires_bridge(RemoteDoctorProtocol::AgentPlanner)); - assert!(!protocol_requires_bridge(RemoteDoctorProtocol::ClawpalServer)); - assert!(!protocol_requires_bridge(RemoteDoctorProtocol::LegacyDoctor)); + assert!(!protocol_requires_bridge( + RemoteDoctorProtocol::ClawpalServer + )); + assert!(!protocol_requires_bridge( + RemoteDoctorProtocol::LegacyDoctor + )); } #[test] fn clawpal_server_protocol_skips_local_rescue_preflight() { - assert!(!protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer)); - assert!(!protocol_runs_rescue_preflight(RemoteDoctorProtocol::AgentPlanner)); + assert!(!protocol_runs_rescue_preflight( + RemoteDoctorProtocol::ClawpalServer + )); + assert!(!protocol_runs_rescue_preflight( + RemoteDoctorProtocol::AgentPlanner + )); } #[test] @@ -3318,7 +3477,11 @@ mod tests { let error = plan_command_failure_message( PlanKind::Investigate, 2, - &["openclaw".to_string(), "gateway".to_string(), "logs".to_string()], + &[ + "openclaw".to_string(), + "gateway".to_string(), + "logs".to_string(), + ], "ssh command failed: russh exec timed out after 25s", ); assert!(error.contains("Investigate command failed in round 2")); @@ -3387,17 +3550,15 @@ mod tests { #[test] fn repeated_rediagnose_only_rounds_are_detected_as_stalled() { - let diagnosis = sample_diagnosis(vec![ - json!({ - "id": "providers.base_url", - "code": "invalid.base_url", - "severity": "medium", - "message": "Provider base URL is invalid", - "autoFixable": true, - "fixHint": "Reset baseUrl", - "source": "config" - }), - ]); + let diagnosis = sample_diagnosis(vec![json!({ + "id": "providers.base_url", + "code": "invalid.base_url", + "severity": "medium", + "message": "Provider base URL is invalid", + "autoFixable": true, + "fixHint": "Reset baseUrl", + "source": "config" + })]); let step_types = vec!["doctorRediagnose".to_string()]; assert!(!repair_plan_stalled( @@ -3419,21 +3580,16 @@ mod tests { #[test] fn round_limit_error_message_includes_latest_issues_and_step_types() { - let diagnosis = sample_diagnosis(vec![ - json!({ - "id": "providers.base_url", - "code": "invalid.base_url", - "severity": "medium", - "message": "Provider base URL is invalid", - "autoFixable": true, - "fixHint": "Reset baseUrl", - "source": "config" - }), - ]); - let error = round_limit_error_message( - &diagnosis, - &["doctorRediagnose".to_string()], - ); + let diagnosis = sample_diagnosis(vec![json!({ + "id": "providers.base_url", + "code": "invalid.base_url", + "severity": "medium", + "message": "Provider base URL is invalid", + "autoFixable": true, + "fixHint": "Reset baseUrl", + "source": "config" + })]); + let error = round_limit_error_message(&diagnosis, &["doctorRediagnose".to_string()]); assert!(error.contains("invalid.base_url")); assert!(error.contains("doctorRediagnose")); assert!(error.contains("Provider base URL is invalid")); @@ -3443,14 +3599,16 @@ mod tests { fn unreadable_config_context_uses_raw_excerpt_and_parse_error() { let context = build_config_excerpt_context("{\n ddd\n}"); assert!(context.config_excerpt.is_null()); - assert!(context.config_excerpt_raw.as_deref().unwrap_or_default().contains("ddd")); - assert!( - context - .config_parse_error - .as_deref() - .unwrap_or_default() - .contains("key must be a string") - ); + assert!(context + .config_excerpt_raw + .as_deref() + .unwrap_or_default() + .contains("ddd")); + assert!(context + .config_parse_error + .as_deref() + .unwrap_or_default() + .contains("key must be a string")); } #[test] @@ -3459,12 +3617,10 @@ mod tests { let summary = config_excerpt_log_summary(&context); assert_eq!(summary["configExcerptPresent"], json!(false)); assert_eq!(summary["configExcerptRawPresent"], json!(true)); - assert!( - summary["configParseError"] - .as_str() - .unwrap_or_default() - .contains("key must be a string") - ); + assert!(summary["configParseError"] + .as_str() + .unwrap_or_default() + .contains("key must be a string")); } #[test] @@ -3472,12 +3628,10 @@ mod tests { let value = config_read_response("{\n ddd\n}", None).expect("config read response"); assert!(value["value"].is_null()); assert!(value["raw"].as_str().unwrap_or_default().contains("ddd")); - assert!( - value["parseError"] - .as_str() - .unwrap_or_default() - .contains("key must be a string") - ); + assert!(value["parseError"] + .as_str() + .unwrap_or_default() + .contains("key must be a string")); } #[test] @@ -3531,7 +3685,10 @@ mod tests { "rescue", false, "configured_inactive", - &["manage_rescue_bot status rescue".to_string(), "openclaw --profile rescue gateway status".to_string()], + &[ + "manage_rescue_bot status rescue".to_string(), + "openclaw --profile rescue gateway status".to_string(), + ], ); assert!(error.contains("rescue")); assert!(error.contains("configured_inactive")); @@ -3549,7 +3706,8 @@ mod tests { .collect::>(); assert!(rendered.contains(&"manage_rescue_bot status rescue".to_string())); assert!(rendered.contains(&"openclaw --profile rescue gateway status".to_string())); - assert!(rendered.contains(&"openclaw --profile rescue config get gateway.port --json".to_string())); + assert!(rendered + .contains(&"openclaw --profile rescue config get gateway.port --json".to_string())); } const E2E_CONTAINER_NAME: &str = "clawpal-e2e-remote-doctor"; @@ -3677,7 +3835,10 @@ CMD ["/usr/sbin/sshd", "-D"] "#; fn should_run_docker_e2e() -> bool { - std::env::var("CLAWPAL_RUN_REMOTE_DOCTOR_E2E").ok().as_deref() == Some("1") + std::env::var("CLAWPAL_RUN_REMOTE_DOCTOR_E2E") + .ok() + .as_deref() + == Some("1") } fn live_gateway_url() -> Option { @@ -3746,7 +3907,14 @@ CMD ["/usr/sbin/sshd", "-D"] fn build_e2e_image() -> Result<(), String> { let dockerfile = E2E_DOCKERFILE.replace("ROOTPASS", E2E_ROOT_PASSWORD); let output = Command::new("docker") - .args(["build", "-t", &format!("{E2E_CONTAINER_NAME}:latest"), "-f", "-", "."]) + .args([ + "build", + "-t", + &format!("{E2E_CONTAINER_NAME}:latest"), + "-f", + "-", + ".", + ]) .stdin(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::piped()) @@ -3844,10 +4012,12 @@ CMD ["/usr/sbin/sshd", "-D"] let _cleanup = Cleanup; wait_for_ssh(30).expect("ssh should become available"); - let temp_root = std::env::temp_dir().join(format!("clawpal-remote-doctor-e2e-{}", Uuid::new_v4())); + let temp_root = + std::env::temp_dir().join(format!("clawpal-remote-doctor-e2e-{}", Uuid::new_v4())); let clawpal_dir = temp_root.join(".clawpal"); create_dir_all(&clawpal_dir).expect("create clawpal dir"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())).expect("set clawpal data"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); set_active_openclaw_home_override(None).expect("clear openclaw home override"); let pool = SshConnectionPool::new(); @@ -3916,7 +4086,10 @@ CMD ["/usr/sbin/sshd", "-D"] success: true, }) } - _ => Err(format!("unexpected planner request: {:?} round {}", kind, round)), + _ => Err(format!( + "unexpected planner request: {:?} round {}", + kind, round + )), } }, ) @@ -3933,7 +4106,10 @@ CMD ["/usr/sbin/sshd", "-D"] .expect("marker check"); assert_eq!(marker_result.exit_code, 0); - let log_path = clawpal_dir.join("doctor").join("remote").join(format!("{session_id}.jsonl")); + let log_path = clawpal_dir + .join("doctor") + .join("remote") + .join(format!("{session_id}.jsonl")); let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor log"); assert!(log_text.contains("\"planKind\":\"detect\"")); assert!(log_text.contains("\"planKind\":\"repair\"")); @@ -3954,7 +4130,8 @@ CMD ["/usr/sbin/sshd", "-D"] cleanup_e2e_container(); build_e2e_image().expect("docker build"); - start_e2e_container_with_env(&[("OPENCLAW_RESCUE_GATEWAY_ACTIVE", "0")]).expect("docker run"); + start_e2e_container_with_env(&[("OPENCLAW_RESCUE_GATEWAY_ACTIVE", "0")]) + .expect("docker run"); struct Cleanup; impl Drop for Cleanup { fn drop(&mut self) { @@ -3981,12 +4158,10 @@ CMD ["/usr/sbin/sshd", "-D"] assert!(error.message.contains("did not become active")); assert!(error.message.contains("configured_inactive")); - assert!( - error - .diagnostics - .iter() - .any(|result| result.argv.join(" ") == "manage_rescue_bot status rescue") - ); + assert!(error + .diagnostics + .iter() + .any(|result| result.argv.join(" ") == "manage_rescue_bot status rescue")); } #[tokio::test] @@ -4130,8 +4305,10 @@ CMD ["/usr/sbin/sshd", "-D"] let app = mock_app(); let app_handle = app.handle().clone(); app_handle.manage(SshConnectionPool::new()); - let temp_root = std::env::temp_dir() - .join(format!("clawpal-remote-doctor-live-loop-{}", Uuid::new_v4())); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-live-loop-{}", + Uuid::new_v4() + )); let clawpal_dir = temp_root.join(".clawpal"); create_dir_all(&clawpal_dir).expect("create clawpal dir"); set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) @@ -4210,8 +4387,10 @@ CMD ["/usr/sbin/sshd", "-D"] let app = mock_app(); let app_handle = app.handle().clone(); app_handle.manage(SshConnectionPool::new()); - let temp_root = std::env::temp_dir() - .join(format!("clawpal-remote-doctor-live-start-{}", Uuid::new_v4())); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-live-start-{}", + Uuid::new_v4() + )); let clawpal_dir = temp_root.join(".clawpal"); create_dir_all(&clawpal_dir).expect("create clawpal dir"); set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) @@ -4287,8 +4466,10 @@ CMD ["/usr/sbin/sshd", "-D"] let app = mock_app(); let app_handle = app.handle().clone(); app_handle.manage(SshConnectionPool::new()); - let temp_root = std::env::temp_dir() - .join(format!("clawpal-remote-doctor-live-raw-config-{}", Uuid::new_v4())); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-live-raw-config-{}", + Uuid::new_v4() + )); let clawpal_dir = temp_root.join(".clawpal"); create_dir_all(&clawpal_dir).expect("create clawpal dir"); set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) @@ -4331,7 +4512,11 @@ CMD ["/usr/sbin/sshd", "-D"] .exec_login(&cfg.id, "python3 - <<'PY'\nimport json, pathlib\njson.load(open(pathlib.Path.home()/'.openclaw'/'openclaw.json'))\nprint('ok')\nPY") .await .expect("read repaired config"); - assert_eq!(repaired.exit_code, 0, "repaired config should be valid JSON: {}", repaired.stderr); + assert_eq!( + repaired.exit_code, 0, + "repaired config should be valid JSON: {}", + repaired.stderr + ); assert_eq!(repaired.stdout.trim(), "ok"); set_active_clawpal_data_override(None).expect("clear clawpal data"); From 15994d6d84d93400a05aa5e2ef47c9f04454ac40 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 14:53:34 +0800 Subject: [PATCH 06/20] docs: add remote doctor split design and plan --- .../2026-03-19-remote-doctor-split-design.md | 295 ++++++++++++ .../2026-03-19-remote-doctor-split-plan.md | 425 ++++++++++++++++++ 2 files changed, 720 insertions(+) create mode 100644 docs/plans/2026-03-19-remote-doctor-split-design.md create mode 100644 docs/plans/2026-03-19-remote-doctor-split-plan.md diff --git a/docs/plans/2026-03-19-remote-doctor-split-design.md b/docs/plans/2026-03-19-remote-doctor-split-design.md new file mode 100644 index 00000000..c7af696e --- /dev/null +++ b/docs/plans/2026-03-19-remote-doctor-split-design.md @@ -0,0 +1,295 @@ +# Remote Doctor Module Split Design + +日期:2026-03-19 + +## 1. 目标 + +将 [`src-tauri/src/remote_doctor.rs`](/Users/zz/clawpal/src-tauri/src/remote_doctor.rs) 从单文件重构为目录模块,降低文件长度和认知负担,同时允许内部命名与模块边界优化,但不改变外部行为、Tauri command 签名、前端事件名或协议语义。 + +本次工作的核心是“结构重组优先,行为保持稳定”。重构完成后,`start_remote_doctor_repair` 的调用方式、远程修复流程、日志格式、fallback 顺序和现有测试语义都应保持一致。 + +## 2. 当前问题 + +[`src-tauri/src/remote_doctor.rs`](/Users/zz/clawpal/src-tauri/src/remote_doctor.rs) 目前约 4525 行,混杂了以下多种职责: + +- 协议和结果类型定义 +- gateway 配置与 device identity 持久化 +- session 日志和进度事件 +- target config 读写与 rescue 诊断 +- agent prompt 构造与解析 +- plan 请求、命令校验与执行 +- 三条 repair loop 与 protocol fallback 编排 +- 单元测试、集成测试和 live e2e 测试 + +这导致几个直接问题: + +- 很难定位某类逻辑的唯一落点 +- 内部 helper 命名越来越泛,职责感弱 +- 测试与实现耦在同一巨型文件内,阅读和维护成本高 +- 后续继续新增 protocol 或 plan 变种时,冲突概率很高 + +## 3. 设计原则 + +- 不修改对外入口:继续从 `crate::remote_doctor::start_remote_doctor_repair` 导出 +- 不修改前端依赖的事件名:继续发出 `doctor:remote-repair-progress` +- 不修改 session log 基本结构与协议 fallback 行为 +- 优先按职责拆模块,不在本轮引入额外抽象层级 +- 允许内部命名收紧,使函数名和所在模块匹配 +- 测试跟随职责迁移,但测试覆盖目标不下降 + +## 4. 方案对比 + +### 方案 A:最小拆分 + +只把测试和少量工具函数移出,主循环仍留在单文件中。 + +优点: + +- 改动最小 +- 编译错误面最小 + +缺点: + +- 入口文件仍然偏大 +- 主流程、agent、config、执行器仍耦合 +- 只能短期缓解长度问题 + +### 方案 B:职责拆分目录模块(推荐) + +将 `remote_doctor.rs` 重构为 `remote_doctor/` 目录,按数据定义、基础设施、planning、repair orchestration 和测试拆分。 + +优点: + +- 模块边界与现有代码天然一致 +- 风险可控,不需要引入大范围对象化重写 +- 后续新增 protocol 或测试会更容易落位 + +缺点: + +- 需要一次性调整较多 `use` 和可见性 +- 测试迁移时要小心 helper 暴露范围 + +### 方案 C:激进对象化 + +在方案 B 基础上再引入 `RemoteDoctorContext`、`ProtocolRunner`、`PlanExecutor` 等结构体,把大多数函数改为方法。 + +优点: + +- 长期可读性最好 +- 依赖关系最容易显式表达 + +缺点: + +- 本轮改动面过大 +- 行为回归风险明显增加 +- 容易把“拆文件”演变成“架构重写” + +## 5. 推荐方案 + +采用方案 B。 + +理由: + +- 当前文件已经天然分成几段:基础类型与 config、agent/planning、repair loops、tests +- 用户允许内部命名调整,但没有要求业务重写,说明本轮应控制行为变化 +- 方案 B 足以把文件长度和职责问题解决掉,同时保留当前函数式实现风格,降低回归风险 + +## 6. 目标模块结构 + +重构后使用目录模块: + +- `src-tauri/src/remote_doctor/mod.rs` +- `src-tauri/src/remote_doctor/types.rs` +- `src-tauri/src/remote_doctor/session.rs` +- `src-tauri/src/remote_doctor/config.rs` +- `src-tauri/src/remote_doctor/agent.rs` +- `src-tauri/src/remote_doctor/plan.rs` +- `src-tauri/src/remote_doctor/repair_loops.rs` +- `src-tauri/src/remote_doctor/tests/` + +各模块职责如下。 + +### 6.1 `mod.rs` + +只负责: + +- 声明子模块 +- 汇总必要的 `use` +- 暴露 `start_remote_doctor_repair` +- 保留少量顶层常量和跨模块 glue code + +`mod.rs` 不再承载大块业务逻辑。 + +### 6.2 `types.rs` + +存放纯数据结构和小型无副作用 helper: + +- `TargetLocation` +- `PlanKind` +- `PlanCommand` +- `PlanResponse` +- `CommandResult` +- `RemoteDoctorProtocol` +- `ClawpalServerPlanResponse` +- `ClawpalServerPlanStep` +- `RemoteDoctorRepairResult` +- `RemoteDoctorProgressEvent` +- `ConfigExcerptContext` +- `RepairRoundObservation` +- `StoredRemoteDoctorIdentity` +- `parse_target_location` + +目标是让“协议定义”和“运行逻辑”解耦。 + +### 6.3 `session.rs` + +存放 session 级别基础设施: + +- log 目录解析 +- JSONL session log 追加 +- progress event 发射 +- 通用 completion result helper + +典型命名调整: + +- `append_remote_doctor_log` -> `append_session_log` +- `emit_progress` -> `emit_session_progress` + +### 6.4 `config.rs` + +存放配置、identity 和 target I/O: + +- gateway 配置读取 +- auth token 对应的 gateway credentials 构建 +- remote doctor identity 加载或生成 +- target config read/write/restart +- rescue diagnosis 与相关 context 提取 + +典型命名调整: + +- `remote_doctor_gateway_config` -> `load_gateway_config` +- `remote_doctor_gateway_credentials` -> `build_gateway_credentials` +- `load_or_create_remote_doctor_identity` 保留语义,但放入 config 模块 + +### 6.5 `agent.rs` + +存放 agent planner 专属逻辑: + +- protocol 相关 helper +- agent workspace bootstrap +- prompt 生成 +- agent JSON response 解析 +- bridge 辅助请求 + +典型命名调整: + +- `ensure_local_remote_doctor_agent_ready` -> `ensure_agent_workspace_ready` +- `remote_doctor_agent_workspace_files` -> `agent_workspace_bootstrap_files` + +### 6.6 `plan.rs` + +存放通用 planning / command execution 逻辑: + +- plan request +- clawpal-server plan request/result reporting +- invoke payload 解析 +- 命令 argv 校验 +- shell command 构造 +- plan command 执行 +- 本地 / 远程命令执行入口 + +这个模块是“planner 输出”和“实际执行器”之间的边界。 + +### 6.7 `repair_loops.rs` + +存放高层编排逻辑: + +- `run_remote_doctor_repair_loop` +- `run_clawpal_server_repair_loop` +- `run_agent_planner_repair_loop` +- `start_remote_doctor_repair_impl` + +这里集中处理: + +- detect / investigate / repair 的轮询 +- protocol fallback +- loop 终止条件 +- 跨模块 orchestration + +## 7. 依赖方向 + +保持单向依赖,避免循环引用: + +- `types` 被其他所有模块依赖 +- `session` 只依赖 `types` +- `config` 依赖 `types` 和少量 crate 级命令 +- `agent` 依赖 `types`、`config`、`session` +- `plan` 依赖 `types`、`session` +- `repair_loops` 依赖 `types`、`session`、`config`、`agent`、`plan` +- `mod.rs` 只负责组装和导出 + +## 8. 测试拆分 + +测试也按职责迁移,避免继续留在单个超大 `mod tests` 中。 + +建议拆法: + +- `tests/types.rs`:枚举、序列化、轻量 helper +- `tests/agent.rs`:prompt、agent response、workspace bootstrap +- `tests/plan.rs`:argv 校验、invoke 解析、shell escape、plan parsing +- `tests/repair_loops.rs`:round 控制、stall detection、fallback +- `tests/live_e2e.rs`:live gateway / docker / SSH 相关测试 + +目标不是改测试语义,而是把测试落到更清晰的位置。 + +## 9. 风险与控制 + +### 9.1 可见性膨胀 + +拆模块后容易把原本文件内私有 helper 大量变成 `pub(crate)`。 + +控制方式: + +- 默认保持私有 +- 只在跨模块确有需要时提升到 `pub(super)` 或 `pub(crate)` +- 优先通过模块内组合减少暴露面 + +### 9.2 命名漂移 + +内部重命名可能让搜索历史和现有 mental model 断裂。 + +控制方式: + +- 仅调整明显不贴职责的名字 +- 保留对外稳定名 +- 在迁移期让新名字和模块职责一一对应 + +### 9.3 测试迁移回归 + +大块测试拆分时最容易遗漏 helper 或 feature flag 条件。 + +控制方式: + +- 先机械迁移,后清理重复 helper +- 先跑 targeted tests,再跑整个 `remote_doctor` 相关测试集 +- live e2e 测试继续按环境变量守卫,不改变 skip 条件 + +## 10. 验收标准 + +重构完成后应满足: + +- `src-tauri/src/remote_doctor.rs` 不再存在为巨型单文件,改为目录模块 +- `mod.rs` 保持精简,主逻辑分散到职责模块 +- `start_remote_doctor_repair` 外部调用点无需改动 +- 现有 `remote_doctor` 单元测试和 e2e 测试保持通过或保持原有 skip 行为 +- 内部命名比现状更贴合模块职责 + +## 11. 非目标 + +本轮不做: + +- 修改远程 doctor 协议 +- 改变 progress event payload 结构 +- 重写 repair loop 算法 +- 引入新的面向对象执行框架 +- 调整前端 Doctor 页行为 diff --git a/docs/plans/2026-03-19-remote-doctor-split-plan.md b/docs/plans/2026-03-19-remote-doctor-split-plan.md new file mode 100644 index 00000000..501f1e30 --- /dev/null +++ b/docs/plans/2026-03-19-remote-doctor-split-plan.md @@ -0,0 +1,425 @@ +# Remote Doctor Module Split Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Split the oversized remote doctor Rust implementation into focused modules, tighten internal naming, and keep the public behavior, command interface, events, and repair semantics unchanged. + +**Architecture:** Convert `src-tauri/src/remote_doctor.rs` into a directory module with clear responsibility boundaries: shared types, session infrastructure, config/identity helpers, agent planner helpers, plan execution, and repair-loop orchestration. Preserve the existing top-level entrypoint and move tests alongside the responsibilities they verify so the refactor stays behaviorally stable. + +**Tech Stack:** Rust, Tauri 2, Tokio, Serde, Cargo test + +--- + +### Task 1: Create the module shell + +**Files:** +- Create: `src-tauri/src/remote_doctor/mod.rs` +- Create: `src-tauri/src/remote_doctor/types.rs` +- Create: `src-tauri/src/remote_doctor/session.rs` +- Create: `src-tauri/src/remote_doctor/config.rs` +- Create: `src-tauri/src/remote_doctor/agent.rs` +- Create: `src-tauri/src/remote_doctor/plan.rs` +- Create: `src-tauri/src/remote_doctor/repair_loops.rs` +- Modify: `src-tauri/src/lib.rs` + +**Step 1: Write the failing test** + +Add a minimal compile-oriented unit test module in `src-tauri/src/remote_doctor/types.rs` that references `TargetLocation` and `PlanKind`, and wire `src-tauri/src/lib.rs` to `pub mod remote_doctor;`. + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::types` +Expected: FAIL because the directory module does not exist yet. + +**Step 3: Write minimal implementation** + +Create the directory module files with empty or placeholder implementations and move only the shared constants plus the public `start_remote_doctor_repair` export into the new `mod.rs`. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::types` +Expected: PASS with the placeholder module structure compiling. + +**Step 5: Commit** + +```bash +git add src-tauri/src/lib.rs src-tauri/src/remote_doctor +git commit -m "refactor: scaffold remote doctor module layout" +``` + +### Task 2: Move shared types and parsing helpers + +**Files:** +- Modify: `src-tauri/src/remote_doctor/types.rs` +- Modify: `src-tauri/src/remote_doctor/mod.rs` +- Test: `src-tauri/src/remote_doctor/types.rs` + +**Step 1: Write the failing test** + +Add tests for: + +- `parse_target_location("local_openclaw")` +- `parse_target_location("remote_openclaw")` +- `parse_target_location("elsewhere")` +- `RepairRoundObservation::new(...)` generating a stable diagnosis signature + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::types` +Expected: FAIL because the moved types and helpers are not implemented in the new module. + +**Step 3: Write minimal implementation** + +Move these definitions into `types.rs`: + +- `TargetLocation` +- `PlanKind` +- `PlanCommand` +- `PlanResponse` +- `CommandResult` +- `RemoteDoctorProtocol` +- `ClawpalServerPlanResponse` +- `ClawpalServerPlanStep` +- `RemoteDoctorRepairResult` +- `RemoteDoctorProgressEvent` +- `ConfigExcerptContext` +- `RepairRoundObservation` +- `StoredRemoteDoctorIdentity` +- `parse_target_location` + +Re-export only the pieces other modules need from `mod.rs`. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::types` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_doctor/types.rs src-tauri/src/remote_doctor/mod.rs +git commit -m "refactor: move remote doctor shared types" +``` + +### Task 3: Move session logging and completion helpers + +**Files:** +- Modify: `src-tauri/src/remote_doctor/session.rs` +- Modify: `src-tauri/src/remote_doctor/mod.rs` +- Test: `src-tauri/src/remote_doctor/session.rs` + +**Step 1: Write the failing test** + +Add tests for: + +- `append_session_log` writing a JSONL line into the expected temp directory +- `emit_session_progress` building the expected `planKind` string +- completion helpers preserving `session_id`, `round`, and `last_command` + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::session` +Expected: FAIL because logging and completion helpers still live elsewhere. + +**Step 3: Write minimal implementation** + +Move and rename: + +- `remote_doctor_log_dir` -> `session_log_dir` +- `append_remote_doctor_log` -> `append_session_log` +- `emit_progress` -> `emit_session_progress` +- `result_for_completion` +- `result_for_completion_with_warnings` + +Update internal call sites to use the new names. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::session` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_doctor/session.rs src-tauri/src/remote_doctor/mod.rs +git commit -m "refactor: extract remote doctor session helpers" +``` + +### Task 4: Move config, identity, and target I/O helpers + +**Files:** +- Modify: `src-tauri/src/remote_doctor/config.rs` +- Modify: `src-tauri/src/remote_doctor/mod.rs` +- Test: `src-tauri/src/remote_doctor/config.rs` + +**Step 1: Write the failing test** + +Add tests for: + +- `load_gateway_config` preferring app preferences over config file port +- `build_gateway_credentials` returning `None` when the token override is empty +- `load_or_create_remote_doctor_identity` persisting a usable identity +- `build_config_excerpt_context` capturing parse errors for invalid JSON + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::config` +Expected: FAIL because the helpers have not been moved or renamed yet. + +**Step 3: Write minimal implementation** + +Move and rename into `config.rs`: + +- `remote_doctor_gateway_config` -> `load_gateway_config` +- `remote_doctor_gateway_credentials` -> `build_gateway_credentials` +- `remote_doctor_identity_path` +- `load_or_create_remote_doctor_identity` +- `read_target_config` +- `read_target_config_raw` +- `build_config_excerpt_context` +- `config_excerpt_log_summary` +- `empty_config_excerpt_context` +- `empty_diagnosis` +- `write_target_config` +- `write_target_config_raw` +- `restart_target_gateway` +- rescue diagnosis helpers and rescue preflight helpers + +Keep non-I/O pure diagnosis summarizers near the same module if they are only used by config and repair loops. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::config` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_doctor/config.rs src-tauri/src/remote_doctor/mod.rs +git commit -m "refactor: extract remote doctor config and target io" +``` + +### Task 5: Move agent-planner-specific helpers + +**Files:** +- Modify: `src-tauri/src/remote_doctor/agent.rs` +- Modify: `src-tauri/src/remote_doctor/mod.rs` +- Test: `src-tauri/src/remote_doctor/agent.rs` + +**Step 1: Write the failing test** + +Add tests for: + +- `ensure_agent_workspace_ready` writing bootstrap files +- `build_agent_plan_prompt` containing target location, config excerpt, and command constraints +- `parse_agent_plan_response` extracting the JSON payload correctly +- `next_agent_plan_kind_for_round` switching from investigate to repair after prior results + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::agent` +Expected: FAIL because the planner helpers remain in the monolithic file. + +**Step 3: Write minimal implementation** + +Move and rename into `agent.rs`: + +- protocol selection helpers +- next-plan-kind helpers +- agent id and session key helpers +- workspace bootstrap file helper +- `ensure_local_remote_doctor_agent_ready` -> `ensure_agent_workspace_ready` +- bridge connection helper +- `extract_json_block` +- `build_agent_plan_prompt` +- `parse_agent_plan_response` +- `run_agent_request_with_bridge` + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::agent` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_doctor/agent.rs src-tauri/src/remote_doctor/mod.rs +git commit -m "refactor: extract remote doctor agent planner helpers" +``` + +### Task 6: Move plan parsing, validation, and command execution + +**Files:** +- Modify: `src-tauri/src/remote_doctor/plan.rs` +- Modify: `src-tauri/src/remote_doctor/mod.rs` +- Test: `src-tauri/src/remote_doctor/plan.rs` + +**Step 1: Write the failing test** + +Add tests for: + +- `build_shell_command` escaping single quotes +- `parse_invoke_argv` supporting command-string payloads +- `validate_plan_command_argv` rejecting unsupported `openclaw` commands +- `parse_plan_response` filling in a generated `plan_id` when missing + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::plan` +Expected: FAIL because the execution and validation helpers have not moved yet. + +**Step 3: Write minimal implementation** + +Move into `plan.rs`: + +- `request_plan` +- `request_clawpal_server_plan` +- step/final result reporting helpers +- `parse_plan_response` +- `parse_invoke_argv` +- `execute_clawpal_command` +- `execute_clawpal_doctor_command` +- `config_read_response` +- `decode_base64_config_payload` +- `execute_invoke_payload` +- `shell_escape` +- `build_shell_command` +- `execute_command` +- validation helpers +- `execute_plan_command` +- `command_result_stdout` + +Keep the file focused on “planner output to executable command results”. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::plan` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_doctor/plan.rs src-tauri/src/remote_doctor/mod.rs +git commit -m "refactor: extract remote doctor plan execution" +``` + +### Task 7: Move repair loops and public entrypoint orchestration + +**Files:** +- Modify: `src-tauri/src/remote_doctor/repair_loops.rs` +- Modify: `src-tauri/src/remote_doctor/mod.rs` +- Test: `src-tauri/src/remote_doctor/repair_loops.rs` + +**Step 1: Write the failing test** + +Add tests for: + +- generic remote doctor loop stopping on a healthy detect plan +- round limit failure surfacing the expected message +- agent or legacy fallback preserving the same fallback order as before + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::repair_loops` +Expected: FAIL because the orchestration code still lives in the old monolithic file. + +**Step 3: Write minimal implementation** + +Move into `repair_loops.rs`: + +- `run_remote_doctor_repair_loop` +- `run_clawpal_server_repair_loop` +- `run_agent_planner_repair_loop` +- `start_remote_doctor_repair_impl` + +Leave `#[tauri::command] pub async fn start_remote_doctor_repair(...)` in `mod.rs` as the only public entrypoint wrapper. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::repair_loops` +Expected: PASS + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_doctor/repair_loops.rs src-tauri/src/remote_doctor/mod.rs +git commit -m "refactor: extract remote doctor repair orchestration" +``` + +### Task 8: Split the tests by responsibility + +**Files:** +- Create: `src-tauri/src/remote_doctor/tests/mod.rs` +- Create: `src-tauri/src/remote_doctor/tests/types.rs` +- Create: `src-tauri/src/remote_doctor/tests/session.rs` +- Create: `src-tauri/src/remote_doctor/tests/config.rs` +- Create: `src-tauri/src/remote_doctor/tests/agent.rs` +- Create: `src-tauri/src/remote_doctor/tests/plan.rs` +- Create: `src-tauri/src/remote_doctor/tests/repair_loops.rs` +- Create: `src-tauri/src/remote_doctor/tests/live_e2e.rs` +- Delete: `src-tauri/src/remote_doctor.rs` + +**Step 1: Write the failing test** + +Move one existing assertion from the monolithic `mod tests` into each new test file and keep the old monolithic block temporarily disabled so compilation fails until module wiring is fixed. + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::tests` +Expected: FAIL because the new test modules are not fully wired and some helpers are not imported yet. + +**Step 3: Write minimal implementation** + +Split the existing tests into themed files: + +- pure type and parser tests +- session/logging tests +- config and identity tests +- agent planner tests +- plan execution and validation tests +- repair loop tests +- live e2e tests guarded by the same environment checks as today + +Delete the old monolithic `remote_doctor.rs` only after the directory module fully compiles. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::tests` +Expected: PASS, with live e2e tests still skipping when their environment variables are absent. + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_doctor src-tauri/src/lib.rs +git commit -m "refactor: split remote doctor tests by module" +``` + +### Task 9: Run focused regression verification + +**Files:** +- Modify: `docs/plans/2026-03-19-remote-doctor-split-plan.md` + +**Step 1: Write the failing test** + +No new test code. This task verifies the refactor did not change behavior. + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor` +Expected: If anything still fails, fix the failing import, visibility, or naming regression before proceeding. + +**Step 3: Write minimal implementation** + +Fix only the regressions surfaced by the focused remote doctor test run. Do not introduce unrelated cleanup. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor` +Expected: PASS, with environment-gated live tests skipping when not configured. + +**Step 5: Commit** + +```bash +git add src-tauri/src/remote_doctor src-tauri/src/lib.rs docs/plans/2026-03-19-remote-doctor-split-plan.md +git commit -m "test: verify remote doctor module split" +``` From dd111f58f1c54637eca4b20ccceb5d92f3716fb4 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 15:00:32 +0800 Subject: [PATCH 07/20] refactor: scaffold remote doctor modules --- src-tauri/src/remote_doctor/agent.rs | 1 + src-tauri/src/remote_doctor/config.rs | 1 + src-tauri/src/remote_doctor/legacy.rs | 4277 +++++++++++++++++++ src-tauri/src/remote_doctor/mod.rs | 10 + src-tauri/src/remote_doctor/plan.rs | 1 + src-tauri/src/remote_doctor/repair_loops.rs | 1 + src-tauri/src/remote_doctor/session.rs | 171 + src-tauri/src/remote_doctor/types.rs | 243 ++ 8 files changed, 4705 insertions(+) create mode 100644 src-tauri/src/remote_doctor/agent.rs create mode 100644 src-tauri/src/remote_doctor/config.rs create mode 100644 src-tauri/src/remote_doctor/legacy.rs create mode 100644 src-tauri/src/remote_doctor/mod.rs create mode 100644 src-tauri/src/remote_doctor/plan.rs create mode 100644 src-tauri/src/remote_doctor/repair_loops.rs create mode 100644 src-tauri/src/remote_doctor/session.rs create mode 100644 src-tauri/src/remote_doctor/types.rs diff --git a/src-tauri/src/remote_doctor/agent.rs b/src-tauri/src/remote_doctor/agent.rs new file mode 100644 index 00000000..540f4356 --- /dev/null +++ b/src-tauri/src/remote_doctor/agent.rs @@ -0,0 +1 @@ +// Placeholder for agent-planner-specific helpers. diff --git a/src-tauri/src/remote_doctor/config.rs b/src-tauri/src/remote_doctor/config.rs new file mode 100644 index 00000000..b2b7170b --- /dev/null +++ b/src-tauri/src/remote_doctor/config.rs @@ -0,0 +1 @@ +// Placeholder for remote doctor configuration and target I/O helpers. diff --git a/src-tauri/src/remote_doctor/legacy.rs b/src-tauri/src/remote_doctor/legacy.rs new file mode 100644 index 00000000..e4b689b3 --- /dev/null +++ b/src-tauri/src/remote_doctor/legacy.rs @@ -0,0 +1,4277 @@ +use std::fs::create_dir_all; +use std::io::Write; +use std::path::PathBuf; +use std::process::Command; +use std::time::Instant; + +use base64::Engine; +use ed25519_dalek::pkcs8::EncodePrivateKey; +use ed25519_dalek::SigningKey; +use serde_json::{json, Value}; +use sha2::{Digest, Sha256}; +use tauri::{AppHandle, Manager, Runtime, State}; +use uuid::Uuid; + +use super::session::{ + append_session_log as append_remote_doctor_log, + emit_session_progress as emit_progress, result_for_completion, + result_for_completion_with_warnings, +}; +use super::types::{ + diagnosis_issue_summaries, parse_target_location, ClawpalServerPlanResponse, + ClawpalServerPlanStep, CommandResult, ConfigExcerptContext, PlanCommand, PlanKind, + PlanResponse, RemoteDoctorProtocol, RemoteDoctorRepairResult, + RepairRoundObservation, StoredRemoteDoctorIdentity, TargetLocation, +}; +use crate::bridge_client::BridgeClient; +use crate::cli_runner::{get_active_openclaw_home_override, run_openclaw, run_openclaw_remote}; +use crate::commands::logs::log_dev; +use crate::commands::preferences::load_app_preferences_from_paths; +use crate::commands::{agent::create_agent, agent::setup_agent_identity}; +use crate::commands::{ + diagnose_primary_via_rescue, manage_rescue_bot, read_raw_config, + remote_diagnose_primary_via_rescue, remote_manage_rescue_bot, remote_read_raw_config, + remote_restart_gateway, remote_write_raw_config, restart_gateway, RescuePrimaryDiagnosisResult, +}; +use crate::config_io::read_openclaw_config; +use crate::models::resolve_paths; +use crate::node_client::{GatewayCredentials, NodeClient}; +use crate::ssh::SshConnectionPool; + +const DEFAULT_GATEWAY_HOST: &str = "127.0.0.1"; +const DEFAULT_GATEWAY_PORT: u16 = 18789; +const DEFAULT_DETECT_METHOD: &str = "doctor.get_detection_plan"; +const DEFAULT_REPAIR_METHOD: &str = "doctor.get_repair_plan"; +const MAX_REMOTE_DOCTOR_ROUNDS: usize = 50; +const REPAIR_PLAN_STALL_THRESHOLD: usize = 3; +const REMOTE_DOCTOR_AGENT_ID: &str = "clawpal-remote-doctor"; + +#[derive(Debug, Clone)] +struct RemoteDoctorGatewayConfig { + url: String, + auth_token_override: Option, +} + +fn remote_doctor_gateway_config() -> Result { + let paths = resolve_paths(); + let app_preferences = load_app_preferences_from_paths(&paths); + if let Some(url) = app_preferences.remote_doctor_gateway_url { + return Ok(RemoteDoctorGatewayConfig { + url, + auth_token_override: app_preferences.remote_doctor_gateway_auth_token, + }); + } + let configured_port = std::fs::read_to_string(&paths.config_path) + .ok() + .and_then(|text| serde_json::from_str::(&text).ok()) + .and_then(|config| { + config + .get("gateway") + .and_then(|gateway| gateway.get("port")) + .and_then(|value| value.as_u64()) + }) + .map(|value| value as u16) + .unwrap_or(DEFAULT_GATEWAY_PORT); + Ok(RemoteDoctorGatewayConfig { + url: format!("ws://{DEFAULT_GATEWAY_HOST}:{configured_port}"), + auth_token_override: app_preferences.remote_doctor_gateway_auth_token, + }) +} + +fn remote_doctor_gateway_credentials( + auth_token_override: Option<&str>, +) -> Result, String> { + let Some(token) = auth_token_override.filter(|value| !value.trim().is_empty()) else { + return Ok(None); + }; + let identity = load_or_create_remote_doctor_identity()?; + Ok(Some(GatewayCredentials { + token: token.to_string(), + device_id: identity.device_id, + private_key_pem: identity.private_key_pem, + })) +} + +fn remote_doctor_identity_path() -> PathBuf { + resolve_paths() + .clawpal_dir + .join("remote-doctor") + .join("device-identity.json") +} + +fn load_or_create_remote_doctor_identity() -> Result { + let path = remote_doctor_identity_path(); + if let Ok(text) = std::fs::read_to_string(&path) { + if let Ok(identity) = serde_json::from_str::(&text) { + if identity.version == 1 + && !identity.device_id.trim().is_empty() + && !identity.private_key_pem.trim().is_empty() + { + return Ok(identity); + } + } + } + + let parent = path + .parent() + .ok_or("Failed to resolve remote doctor identity directory")?; + create_dir_all(parent) + .map_err(|e| format!("Failed to create remote doctor identity dir: {e}"))?; + + let mut secret = [0u8; 32]; + getrandom::getrandom(&mut secret) + .map_err(|e| format!("Failed to generate remote doctor device secret: {e}"))?; + let signing_key = SigningKey::from_bytes(&secret); + let raw_public = signing_key.verifying_key().to_bytes(); + let device_id = Sha256::digest(raw_public) + .iter() + .map(|b| format!("{b:02x}")) + .collect::(); + let private_key_pem = signing_key + .to_pkcs8_pem(Default::default()) + .map_err(|e| format!("Failed to encode remote doctor private key: {e}"))? + .to_string(); + let created_at_ms = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .map_err(|e| format!("Failed to get system time: {e}"))? + .as_millis() as u64; + let identity = StoredRemoteDoctorIdentity { + version: 1, + created_at_ms, + device_id, + private_key_pem, + }; + let text = serde_json::to_string_pretty(&identity) + .map_err(|e| format!("Failed to serialize remote doctor identity: {e}"))?; + std::fs::write(&path, format!("{text}\n")) + .map_err(|e| format!("Failed to persist remote doctor identity: {e}"))?; + Ok(identity) +} + +fn detect_method_name() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_DETECT_METHOD") + .unwrap_or_else(|_| DEFAULT_DETECT_METHOD.to_string()) +} + +fn repair_method_name() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_REPAIR_METHOD") + .unwrap_or_else(|_| DEFAULT_REPAIR_METHOD.to_string()) +} + +fn configured_remote_doctor_protocol() -> Option { + match std::env::var("CLAWPAL_REMOTE_DOCTOR_PROTOCOL") + .ok() + .as_deref() + .map(str::trim) + { + Some("agent") => Some(RemoteDoctorProtocol::AgentPlanner), + Some("legacy") | Some("legacy_doctor") => Some(RemoteDoctorProtocol::LegacyDoctor), + Some("clawpal_server") => Some(RemoteDoctorProtocol::ClawpalServer), + _ => None, + } +} + +fn default_remote_doctor_protocol() -> RemoteDoctorProtocol { + RemoteDoctorProtocol::AgentPlanner +} + +fn protocol_requires_bridge(protocol: RemoteDoctorProtocol) -> bool { + matches!(protocol, RemoteDoctorProtocol::AgentPlanner) +} + +fn protocol_runs_rescue_preflight(protocol: RemoteDoctorProtocol) -> bool { + matches!(protocol, RemoteDoctorProtocol::LegacyDoctor) +} + +fn next_agent_plan_kind(diagnosis: &RescuePrimaryDiagnosisResult) -> PlanKind { + next_agent_plan_kind_for_round(diagnosis, &[]) +} + +fn next_agent_plan_kind_for_round( + diagnosis: &RescuePrimaryDiagnosisResult, + previous_results: &[CommandResult], +) -> PlanKind { + if diagnosis + .issues + .iter() + .any(|issue| issue.code == "primary.config.unreadable") + { + if !previous_results.is_empty() { + return PlanKind::Repair; + } + PlanKind::Investigate + } else { + PlanKind::Repair + } +} + +fn remote_doctor_agent_id() -> &'static str { + REMOTE_DOCTOR_AGENT_ID +} + +fn remote_doctor_agent_session_key(session_id: &str) -> String { + format!("agent:{}:{session_id}", remote_doctor_agent_id()) +} + +fn remote_doctor_agent_workspace_files() -> [(&'static str, &'static str); 4] { + [ + ( + "AGENTS.md", + "# Remote Doctor\nUse this workspace only for ClawPal remote doctor planning sessions.\nReturn structured, operational answers.\n", + ), + ( + "BOOTSTRAP.md", + "Bootstrap is already complete for this workspace.\nDo not ask who you are or who the user is.\nUse IDENTITY.md and USER.md as the canonical identity context.\n", + ), + ( + "USER.md", + "- Name: ClawPal Desktop\n- Role: desktop repair orchestrator\n- Preferences: concise, operational, no bootstrap chatter\n", + ), + ( + "HEARTBEAT.md", + "Status: active remote-doctor planning workspace.\n", + ), + ] +} + +fn gateway_url_is_local(url: &str) -> bool { + let rest = url + .split_once("://") + .map(|(_, remainder)| remainder) + .unwrap_or(url); + let host_port = rest.split('/').next().unwrap_or(rest); + let host = host_port + .strip_prefix('[') + .and_then(|value| value.split_once(']').map(|(host, _)| host)) + .unwrap_or_else(|| host_port.split(':').next().unwrap_or(host_port)); + matches!(host, "127.0.0.1" | "localhost") +} + +fn ensure_local_remote_doctor_agent_ready() -> Result<(), String> { + let agent_id = remote_doctor_agent_id().to_string(); + if let Err(error) = create_agent(agent_id.clone(), None, Some(true)) { + if !error.contains("already exists") { + return Err(format!("Failed to create remote doctor agent: {error}")); + } + } + + setup_agent_identity(agent_id.clone(), "ClawPal Remote Doctor".to_string(), None)?; + + let paths = resolve_paths(); + let cfg = read_openclaw_config(&paths)?; + let workspace = + clawpal_core::doctor::resolve_agent_workspace_from_config(&cfg, &agent_id, None) + .map(|path| shellexpand::tilde(&path).to_string())?; + create_dir_all(&workspace) + .map_err(|error| format!("Failed to create remote doctor workspace: {error}"))?; + + for (file_name, content) in remote_doctor_agent_workspace_files() { + std::fs::write(PathBuf::from(&workspace).join(file_name), content) + .map_err(|error| format!("Failed to write remote doctor {file_name}: {error}"))?; + } + + Ok(()) +} + +async fn ensure_agent_bridge_connected( + app: &AppHandle, + bridge: &BridgeClient, + gateway_url: &str, + auth_token_override: Option<&str>, + session_id: &str, +) { + if bridge.is_connected().await { + return; + } + + let connect_result = bridge + .connect( + gateway_url, + app.clone(), + remote_doctor_gateway_credentials(auth_token_override) + .ok() + .flatten(), + ) + .await; + if let Err(error) = connect_result { + append_remote_doctor_log( + session_id, + json!({ + "event": "bridge_connect_failed", + "reason": error, + }), + ); + } +} + +async fn ensure_remote_target_connected( + pool: &SshConnectionPool, + instance_id: &str, +) -> Result<(), String> { + let candidate_ids = remote_target_host_id_candidates(instance_id); + if candidate_ids.is_empty() { + return Ok(()); + } + for candidate in &candidate_ids { + if pool.is_connected(candidate).await { + return Ok(()); + } + } + + let hosts = crate::commands::ssh::read_hosts_from_registry()?; + let host = hosts + .into_iter() + .find(|candidate| candidate_ids.iter().any(|id| id == &candidate.id)) + .ok_or_else(|| format!("No SSH host config with id: {}", candidate_ids[0]))?; + if let Some(passphrase) = host.passphrase.as_deref().filter(|value| !value.is_empty()) { + pool.connect_with_passphrase(&host, Some(passphrase)).await + } else { + pool.connect(&host).await + } +} + +fn remote_target_host_id_candidates(instance_id: &str) -> Vec { + let mut candidates = Vec::new(); + let trimmed = instance_id.trim(); + if !trimmed.is_empty() { + candidates.push(trimmed.to_string()); + } + if let Some(stripped) = trimmed.strip_prefix("ssh:").map(str::trim) { + if !stripped.is_empty() && !candidates.iter().any(|value| value == stripped) { + candidates.push(stripped.to_string()); + } + } + candidates +} + +fn primary_remote_target_host_id(instance_id: &str) -> Result { + remote_target_host_id_candidates(instance_id) + .into_iter() + .next() + .ok_or_else(|| "Remote Doctor repair requires an ssh instance id".to_string()) +} + +fn is_unknown_method_error(error: &str) -> bool { + error.contains("unknown method") + || error.contains("\"code\":\"INVALID_REQUEST\"") + || error.contains("\"code\": \"INVALID_REQUEST\"") +} + +fn diagnosis_has_only_non_auto_fixable_issues(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { + !diagnosis.issues.is_empty() && diagnosis.issues.iter().all(|issue| !issue.auto_fixable) +} + +async fn run_rescue_diagnosis( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result { + match target_location { + TargetLocation::LocalOpenclaw => diagnose_primary_via_rescue(None, None).await, + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_diagnose_primary_via_rescue( + app.state::(), + host_id, + None, + None, + ) + .await + } + } +} + +async fn read_target_config( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result { + let raw = match target_location { + TargetLocation::LocalOpenclaw => read_raw_config()?, + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_read_raw_config(app.state::(), host_id).await? + } + }; + serde_json::from_str::(&raw) + .map_err(|error| format!("Failed to parse target config: {error}")) +} + +async fn read_target_config_raw( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result { + match target_location { + TargetLocation::LocalOpenclaw => read_raw_config(), + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_read_raw_config(app.state::(), host_id).await + } + } +} + +fn build_config_excerpt_context(raw: &str) -> ConfigExcerptContext { + match serde_json::from_str::(raw) { + Ok(config_excerpt) => ConfigExcerptContext { + config_excerpt, + config_excerpt_raw: None, + config_parse_error: None, + }, + Err(error) => ConfigExcerptContext { + config_excerpt: Value::Null, + config_excerpt_raw: Some(raw.to_string()), + config_parse_error: Some(format!("Failed to parse target config: {error}")), + }, + } +} + +fn config_excerpt_log_summary(context: &ConfigExcerptContext) -> Value { + json!({ + "configExcerptPresent": !context.config_excerpt.is_null(), + "configExcerptBytes": serde_json::to_string(&context.config_excerpt).ok().map(|text| text.len()).unwrap_or(0), + "configExcerptRawPresent": context.config_excerpt_raw.as_ref().map(|text| !text.trim().is_empty()).unwrap_or(false), + "configExcerptRawBytes": context.config_excerpt_raw.as_ref().map(|text| text.len()).unwrap_or(0), + "configParseError": context.config_parse_error, + }) +} + +fn empty_config_excerpt_context() -> ConfigExcerptContext { + ConfigExcerptContext { + config_excerpt: Value::Null, + config_excerpt_raw: None, + config_parse_error: None, + } +} + +fn empty_diagnosis() -> RescuePrimaryDiagnosisResult { + serde_json::from_value(json!({ + "status": "healthy", + "checkedAt": "2026-03-18T00:00:00Z", + "targetProfile": "primary", + "rescueProfile": "rescue", + "summary": { + "status": "healthy", + "headline": "Healthy", + "recommendedAction": null, + "fixableIssueCount": 0, + "selectedFixIssueIds": [] + }, + "issues": [], + "sections": [] + })) + .expect("empty diagnosis should deserialize") +} + +async fn write_target_config( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, + config: &Value, +) -> Result<(), String> { + let text = serde_json::to_string_pretty(config).map_err(|error| error.to_string())?; + let validated = clawpal_core::config::validate_config_json(&text) + .map_err(|error| format!("Invalid config after remote doctor patch: {error}"))?; + let validated_text = + serde_json::to_string_pretty(&validated).map_err(|error| error.to_string())?; + match target_location { + TargetLocation::LocalOpenclaw => { + let paths = resolve_paths(); + crate::config_io::write_text(&paths.config_path, &validated_text)?; + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_write_raw_config(app.state::(), host_id, validated_text) + .await?; + } + } + Ok(()) +} + +async fn write_target_config_raw( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, + text: &str, +) -> Result<(), String> { + let validated = clawpal_core::config::validate_config_json(text) + .map_err(|error| format!("Invalid raw config payload: {error}"))?; + let validated_text = + serde_json::to_string_pretty(&validated).map_err(|error| error.to_string())?; + match target_location { + TargetLocation::LocalOpenclaw => { + let paths = resolve_paths(); + crate::config_io::write_text(&paths.config_path, &validated_text)?; + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_write_raw_config(app.state::(), host_id, validated_text) + .await?; + } + } + Ok(()) +} + +async fn restart_target_gateway( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result<(), String> { + match target_location { + TargetLocation::LocalOpenclaw => { + restart_gateway().await?; + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_restart_gateway(app.state::(), host_id).await?; + } + } + Ok(()) +} + +fn diagnosis_is_healthy(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { + diagnosis.status == "healthy" + && diagnosis.summary.status == "healthy" + && diagnosis.issues.is_empty() +} + +fn diagnosis_context(diagnosis: &RescuePrimaryDiagnosisResult) -> Value { + json!({ + "status": diagnosis.status, + "summary": { + "status": diagnosis.summary.status, + "headline": diagnosis.summary.headline, + "recommendedAction": diagnosis.summary.recommended_action, + "fixableIssueCount": diagnosis.summary.fixable_issue_count, + "selectedFixIssueIds": diagnosis.summary.selected_fix_issue_ids, + }, + "issues": diagnosis.issues, + "sections": diagnosis.sections, + }) +} + +fn diagnosis_missing_rescue_profile(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { + diagnosis + .issues + .iter() + .any(|issue| issue.code == "rescue.profile.missing") +} + +fn diagnosis_unhealthy_rescue_gateway(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { + diagnosis + .issues + .iter() + .any(|issue| issue.code == "rescue.gateway.unhealthy") +} + +fn rescue_setup_command_result( + action: &str, + profile: &str, + configured: bool, + active: bool, + runtime_state: &str, +) -> CommandResult { + CommandResult { + argv: vec!["manage_rescue_bot".into(), action.into(), profile.into()], + exit_code: Some(0), + stdout: format!( + "configured={} active={} runtimeState={}", + configured, active, runtime_state + ), + stderr: String::new(), + duration_ms: 0, + timed_out: false, + } +} + +fn rescue_bot_manage_command_result( + result: &crate::commands::RescueBotManageResult, +) -> CommandResult { + CommandResult { + argv: vec![ + "manage_rescue_bot".into(), + result.action.clone(), + result.profile.clone(), + ], + exit_code: Some(if result.active || result.configured { + 0 + } else { + 1 + }), + stdout: format!( + "configured={} active={} runtimeState={} rescuePort={} mainPort={} commands={}", + result.configured, + result.active, + result.runtime_state, + result.rescue_port, + result.main_port, + result.commands.len() + ), + stderr: String::new(), + duration_ms: 0, + timed_out: false, + } +} + +fn rescue_activation_diagnostic_commands(profile: &str) -> Vec> { + vec![ + vec!["manage_rescue_bot".into(), "status".into(), profile.into()], + vec![ + "openclaw".into(), + "--profile".into(), + profile.into(), + "gateway".into(), + "status".into(), + ], + vec![ + "openclaw".into(), + "--profile".into(), + profile.into(), + "config".into(), + "get".into(), + "gateway.port".into(), + "--json".into(), + ], + ] +} + +fn rescue_activation_error_message( + profile: &str, + configured: bool, + runtime_state: &str, + suggested_checks: &[String], +) -> String { + let suffix = if suggested_checks.is_empty() { + String::new() + } else { + format!(" Suggested checks: {}.", suggested_checks.join("; ")) + }; + format!( + "Rescue profile \"{}\" was {} but did not become active (runtime state: {}).", + profile, + if configured { + "configured" + } else { + "not configured" + }, + runtime_state + ) + &suffix +} + +async fn execute_rescue_activation_diagnostic_command( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> CommandResult { + let started = Instant::now(); + if argv.first().map(String::as_str) == Some("manage_rescue_bot") + && argv.get(1).map(String::as_str) == Some("status") + { + let profile = argv + .get(2) + .map(String::as_str) + .filter(|value| !value.trim().is_empty()) + .unwrap_or("rescue"); + let result = match target_location { + TargetLocation::LocalOpenclaw => { + manage_rescue_bot("status".into(), Some(profile.to_string()), None).await + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id); + match host_id { + Ok(host_id) => { + remote_manage_rescue_bot( + app.state::(), + host_id, + "status".into(), + Some(profile.to_string()), + None, + ) + .await + } + Err(error) => Err(error), + } + } + }; + return match result { + Ok(result) => { + let mut command_result = rescue_bot_manage_command_result(&result); + command_result.duration_ms = started.elapsed().as_millis() as u64; + command_result + } + Err(error) => CommandResult { + argv: argv.to_vec(), + exit_code: Some(1), + stdout: String::new(), + stderr: error, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + }, + }; + } + + match execute_command( + &app.state::(), + target_location, + instance_id, + argv, + ) + .await + { + Ok(result) => result, + Err(error) => CommandResult { + argv: argv.to_vec(), + exit_code: Some(1), + stdout: String::new(), + stderr: error, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + }, + } +} + +async fn collect_rescue_activation_failure_diagnostics( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, + profile: &str, +) -> Vec { + let mut results = Vec::new(); + for argv in rescue_activation_diagnostic_commands(profile) { + results.push( + execute_rescue_activation_diagnostic_command(app, target_location, instance_id, &argv) + .await, + ); + } + results +} + +struct RescueActivationFailure { + message: String, + activation_result: CommandResult, + diagnostics: Vec, +} + +async fn ensure_rescue_profile_ready( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result { + let started = Instant::now(); + let result = match target_location { + TargetLocation::LocalOpenclaw => { + manage_rescue_bot("activate".into(), Some("rescue".into()), None) + .await + .map_err(|error| RescueActivationFailure { + message: error, + activation_result: rescue_setup_command_result( + "activate", + "rescue", + false, + false, + "activation_failed", + ), + diagnostics: Vec::new(), + })? + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id).map_err(|error| { + RescueActivationFailure { + message: error, + activation_result: rescue_setup_command_result( + "activate", + "rescue", + false, + false, + "activation_failed", + ), + diagnostics: Vec::new(), + } + })?; + remote_manage_rescue_bot( + app.state::(), + host_id, + "activate".into(), + Some("rescue".into()), + None, + ) + .await + .map_err(|error| RescueActivationFailure { + message: error, + activation_result: rescue_setup_command_result( + "activate", + "rescue", + false, + false, + "activation_failed", + ), + diagnostics: Vec::new(), + })? + } + }; + let mut command_result = rescue_setup_command_result( + &result.action, + &result.profile, + result.configured, + result.active, + &result.runtime_state, + ); + command_result.duration_ms = started.elapsed().as_millis() as u64; + if !result.active { + let diagnostics = collect_rescue_activation_failure_diagnostics( + app, + target_location, + instance_id, + &result.profile, + ) + .await; + let suggested_checks = diagnostics + .iter() + .map(|result| result.argv.join(" ")) + .collect::>(); + return Err(RescueActivationFailure { + message: rescue_activation_error_message( + &result.profile, + result.configured, + &result.runtime_state, + &suggested_checks, + ), + activation_result: command_result, + diagnostics, + }); + } + Ok(command_result) +} + +async fn repair_rescue_gateway_if_needed( + app: &AppHandle, + session_id: &str, + round: usize, + target_location: TargetLocation, + instance_id: &str, + diagnosis: &mut RescuePrimaryDiagnosisResult, +) -> Result<(), String> { + if !(diagnosis_missing_rescue_profile(diagnosis) + || diagnosis_unhealthy_rescue_gateway(diagnosis)) + { + return Ok(()); + } + + emit_progress( + Some(app), + session_id, + round, + "preparing_rescue", + "Activating rescue profile before requesting remote repair plan", + Some(PlanKind::Repair), + None, + ); + let setup_result = match ensure_rescue_profile_ready(app, target_location, instance_id).await { + Ok(setup_result) => setup_result, + Err(failure) => { + append_remote_doctor_log( + session_id, + json!({ + "event": "rescue_profile_activation", + "round": round, + "result": failure.activation_result, + "status": "failed", + }), + ); + append_remote_doctor_log( + session_id, + json!({ + "event": "rescue_activation_diagnosis", + "round": round, + "checks": failure.diagnostics, + }), + ); + return Err(failure.message); + } + }; + append_remote_doctor_log( + session_id, + json!({ + "event": "rescue_profile_activation", + "round": round, + "result": setup_result, + }), + ); + *diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "after_rescue_activation", round, diagnosis); + Ok(()) +} + +fn append_diagnosis_log( + session_id: &str, + stage: &str, + round: usize, + diagnosis: &RescuePrimaryDiagnosisResult, +) { + append_remote_doctor_log( + session_id, + json!({ + "event": "diagnosis_result", + "stage": stage, + "round": round, + "status": diagnosis.status, + "summaryStatus": diagnosis.summary.status, + "headline": diagnosis.summary.headline, + "recommendedAction": diagnosis.summary.recommended_action, + "issueCount": diagnosis.issues.len(), + "issues": diagnosis_issue_summaries(diagnosis), + }), + ); +} + +fn clawpal_server_step_type_summary(steps: &[ClawpalServerPlanStep]) -> Value { + let mut counts = serde_json::Map::new(); + for step in steps { + let entry = counts + .entry(step.step_type.clone()) + .or_insert_with(|| Value::from(0_u64)); + let next = entry.as_u64().unwrap_or(0) + 1; + *entry = Value::from(next); + } + Value::Object(counts) +} + +fn repair_plan_stalled(observations: &[RepairRoundObservation], threshold: usize) -> bool { + if observations.len() < threshold { + return false; + } + let recent = &observations[observations.len() - threshold..]; + let Some(first) = recent.first() else { + return false; + }; + !first.issue_summaries.is_empty() + && recent.iter().all(|entry| { + entry.step_types.len() == 1 + && entry.step_types[0] == "doctorRediagnose" + && entry.diagnosis_signature == first.diagnosis_signature + }) +} + +fn round_limit_error_message( + diagnosis: &RescuePrimaryDiagnosisResult, + last_step_types: &[String], +) -> String { + let issue_summary = serde_json::to_string(&diagnosis_issue_summaries(diagnosis)) + .unwrap_or_else(|_| "[]".to_string()); + let step_summary = if last_step_types.is_empty() { + "[]".to_string() + } else { + serde_json::to_string(last_step_types).unwrap_or_else(|_| "[]".to_string()) + }; + format!( + "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean rescue diagnosis result. Last issues: {issue_summary}. Last repair step types: {step_summary}." + ) +} + +fn stalled_plan_error_message(observation: &RepairRoundObservation) -> String { + let issue_summary = + serde_json::to_string(&observation.issue_summaries).unwrap_or_else(|_| "[]".to_string()); + let step_summary = + serde_json::to_string(&observation.step_types).unwrap_or_else(|_| "[]".to_string()); + format!( + "Remote Doctor did not return actionable repair steps by round {} after {} repeated rounds. Last issues: {}. Last repair step types: {}.", + observation.round, + REPAIR_PLAN_STALL_THRESHOLD, + issue_summary, + step_summary + ) +} + +fn ensure_object(value: &mut Value) -> Result<&mut serde_json::Map, String> { + if !value.is_object() { + *value = json!({}); + } + value + .as_object_mut() + .ok_or_else(|| "Expected object while applying remote doctor config step".to_string()) +} + +fn apply_config_set(root: &mut Value, path: &str, value: Value) -> Result<(), String> { + let segments = path + .split('.') + .filter(|segment| !segment.trim().is_empty()) + .collect::>(); + if segments.is_empty() { + return Err("Config set path cannot be empty".into()); + } + let mut cursor = root; + for segment in &segments[..segments.len() - 1] { + let object = ensure_object(cursor)?; + cursor = object + .entry((*segment).to_string()) + .or_insert_with(|| json!({})); + } + let object = ensure_object(cursor)?; + object.insert(segments[segments.len() - 1].to_string(), value); + Ok(()) +} + +fn apply_config_unset(root: &mut Value, path: &str) -> Result<(), String> { + let segments = path + .split('.') + .filter(|segment| !segment.trim().is_empty()) + .collect::>(); + if segments.is_empty() { + return Err("Config unset path cannot be empty".into()); + } + let mut cursor = root; + for segment in &segments[..segments.len() - 1] { + let Some(next) = cursor + .as_object_mut() + .and_then(|object| object.get_mut(*segment)) + else { + return Ok(()); + }; + cursor = next; + } + if let Some(object) = cursor.as_object_mut() { + object.remove(segments[segments.len() - 1]); + } + Ok(()) +} + +fn extract_json_block(text: &str) -> Option<&str> { + clawpal_core::doctor::extract_json_from_output(text) +} + +fn build_agent_plan_prompt( + kind: PlanKind, + session_id: &str, + round: usize, + target_location: TargetLocation, + instance_id: &str, + diagnosis: &RescuePrimaryDiagnosisResult, + config_context: &ConfigExcerptContext, + previous_results: &[CommandResult], +) -> String { + let kind_label = match kind { + PlanKind::Detect => "detection", + PlanKind::Investigate => "investigation", + PlanKind::Repair => "repair", + }; + let target_label = match target_location { + TargetLocation::LocalOpenclaw => "local_openclaw", + TargetLocation::RemoteOpenclaw => "remote_openclaw", + }; + let diagnosis_json = + serde_json::to_string_pretty(&diagnosis_context(diagnosis)).unwrap_or_else(|_| "{}".into()); + let config_context_json = serde_json::to_string_pretty(&json!({ + "configExcerpt": config_context.config_excerpt, + "configExcerptRaw": config_context.config_excerpt_raw, + "configParseError": config_context.config_parse_error, + })) + .unwrap_or_else(|_| "{}".into()); + let previous_results_json = + serde_json::to_string_pretty(previous_results).unwrap_or_else(|_| "[]".into()); + let phase_rules = match kind { + PlanKind::Detect => "For detection plans, gather only the commands needed to confirm current state. Set healthy=true and done=true only when no issue remains.", + PlanKind::Investigate => "For investigation plans, return read-only diagnosis steps only. Do not modify files, delete files, overwrite config, or restart services. Prefer commands that inspect, validate, backup, or print evidence for why the config is unreadable. Do not run follow/tail commands, streaming log readers, or any unbounded command; every investigation command must be bounded and return promptly. Do not use heredocs, multiline scripts, or commands that wait on stdin. Prefer single-line commands over shell scripting.", + PlanKind::Repair => "For repair plans, return the minimal safe repair commands. Reference prior investigation evidence when config is unreadable. Back up the file before changing it and include validation/rediagnosis steps as needed. Do not invent OpenClaw subcommands. Use only the verified OpenClaw commands listed below or the `clawpal doctor ...` tools. Do not use `openclaw auth ...` commands. Do not use `openclaw doctor --json`; use `clawpal doctor probe-openclaw` or `clawpal doctor exec --tool doctor` instead. Do not use heredocs, multiline scripts, or commands that wait on stdin.", + }; + format!( + "Identity bootstrap for this session:\n\ +- Your name: ClawPal Remote Doctor\n\ +- Your creature: maintenance daemon\n\ +- Your vibe: direct, terse, operational\n\ +- Your emoji: none\n\ +- The user is: ClawPal desktop app\n\ +- The user timezone is: Asia/Shanghai\n\ +- Do not ask identity/bootstrap questions.\n\ +- Do not ask who you are or who the user is.\n\ +- Do not modify IDENTITY.md, USER.md, or workspace bootstrap files.\n\ +\n\ +You are ClawPal Remote Doctor planner.\n\ +Return ONLY one JSON object and no markdown.\n\ +Task: produce the next {kind_label} plan for OpenClaw.\n\ +Session: {session_id}\n\ +Round: {round}\n\ +Target location: {target_label}\n\ +Instance id: {instance_id}\n\ +Diagnosis JSON:\n{diagnosis_json}\n\n\ +Config context JSON:\n{config_context_json}\n\n\ +Previous command results JSON:\n{previous_results_json}\n\n\ +Available gateway tools:\n\ +- `clawpal doctor probe-openclaw`\n\ +- `clawpal doctor config-read [path]`\n\ +- `clawpal doctor config-read-raw`\n\ +- `clawpal doctor config-upsert `\n\ +- `clawpal doctor config-delete `\n\ +- `clawpal doctor config-write-raw-base64 `\n\ +- `clawpal doctor exec --tool [--args ]`\n\ +- Verified direct OpenClaw commands only:\n\ + - `openclaw --version`\n\ + - `openclaw gateway status`\n\ +You may invoke these tools before answering when you need fresh diagnostics or config state.\n\ +If you already have enough information, return the JSON plan directly.\n\n\ +Return this exact JSON schema:\n\ +{{\n \"planId\": \"string\",\n \"planKind\": \"{kind}\",\n \"summary\": \"string\",\n \"commands\": [{{\"argv\": [\"cmd\"], \"timeoutSec\": 60, \"purpose\": \"why\", \"continueOnFailure\": false}}],\n \"healthy\": false,\n \"done\": false,\n \"success\": false\n}}\n\ +Rules:\n\ +- {phase_rules}\n\ +- For repair plans, return shell/openclaw commands in commands.\n\ +- Keep commands empty when no command is needed.\n\ +- Output valid JSON only.", + kind = match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + } + ) +} + +fn parse_agent_plan_response(kind: PlanKind, text: &str) -> Result { + let json_block = extract_json_block(text) + .ok_or_else(|| format!("Remote doctor agent did not return JSON: {text}"))?; + let value: Value = serde_json::from_str(json_block) + .map_err(|error| format!("Failed to parse remote doctor agent JSON: {error}"))?; + parse_plan_response(kind, value) +} + +fn parse_invoke_argv(command: &str, args: &Value) -> Result, String> { + if let Some(argv) = args.get("argv").and_then(Value::as_array) { + let parsed = argv + .iter() + .map(|value| { + value + .as_str() + .map(str::to_string) + .ok_or_else(|| "invoke argv entries must be strings".to_string()) + }) + .collect::, _>>()?; + if parsed.is_empty() { + return Err("invoke argv cannot be empty".into()); + } + return Ok(parsed); + } + + let arg_string = args + .get("args") + .and_then(Value::as_str) + .or_else(|| args.get("command").and_then(Value::as_str)) + .unwrap_or(""); + let mut parsed = if arg_string.trim().is_empty() { + Vec::new() + } else { + shell_words::split(arg_string) + .map_err(|error| format!("Failed to parse invoke args: {error}"))? + }; + if parsed.first().map(String::as_str) != Some(command) { + parsed.insert(0, command.to_string()); + } + Ok(parsed) +} + +async fn execute_clawpal_command( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + match argv.get(1).map(String::as_str) { + Some("doctor") => { + execute_clawpal_doctor_command(app, pool, target_location, instance_id, argv).await + } + other => Err(format!( + "Unsupported clawpal command in remote doctor agent session: {:?}", + other + )), + } +} + +async fn execute_clawpal_doctor_command( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + match argv.get(2).map(String::as_str) { + Some("probe-openclaw") => { + let version_result = execute_command( + pool, + target_location, + instance_id, + &["openclaw".into(), "--version".into()], + ) + .await?; + let which_result = match target_location { + TargetLocation::LocalOpenclaw => { + execute_command( + pool, + target_location, + instance_id, + &[ + "sh".into(), + "-lc".into(), + "command -v openclaw || true".into(), + ], + ) + .await? + } + TargetLocation::RemoteOpenclaw => { + execute_command( + pool, + target_location, + instance_id, + &[ + "sh".into(), + "-lc".into(), + "command -v openclaw || true".into(), + ], + ) + .await? + } + }; + Ok(json!({ + "ok": version_result.exit_code == Some(0), + "version": version_result.stdout.trim(), + "openclawPath": which_result.stdout.trim(), + })) + } + Some("config-read") => { + let maybe_path = argv + .get(3) + .map(String::as_str) + .filter(|value| !value.starts_with("--")); + let raw = read_target_config_raw(app, target_location, instance_id).await?; + config_read_response(&raw, maybe_path) + } + Some("config-read-raw") => { + let raw = read_target_config_raw(app, target_location, instance_id).await?; + Ok(json!({ + "raw": raw, + })) + } + Some("config-delete") => { + let path = argv + .get(3) + .ok_or("clawpal doctor config-delete requires a path")?; + let mut config = read_target_config(app, target_location, instance_id).await?; + apply_config_unset(&mut config, path)?; + write_target_config(app, target_location, instance_id, &config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + Ok(json!({ "deleted": true, "path": path })) + } + Some("config-write-raw-base64") => { + let encoded = argv + .get(3) + .ok_or("clawpal doctor config-write-raw-base64 requires a base64 payload")?; + let decoded = decode_base64_config_payload(encoded)?; + write_target_config_raw(app, target_location, instance_id, &decoded).await?; + restart_target_gateway(app, target_location, instance_id).await?; + Ok(json!({ + "written": true, + "bytes": decoded.len(), + })) + } + Some("config-upsert") => { + let path = argv + .get(3) + .ok_or("clawpal doctor config-upsert requires a path")?; + let value_raw = argv + .get(4) + .ok_or("clawpal doctor config-upsert requires a value")?; + let value: Value = serde_json::from_str(value_raw) + .map_err(|error| format!("Invalid JSON value for config-upsert: {error}"))?; + let mut config = read_target_config(app, target_location, instance_id).await?; + apply_config_set(&mut config, path, value)?; + write_target_config(app, target_location, instance_id, &config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + Ok(json!({ "upserted": true, "path": path })) + } + Some("exec") => { + let tool_idx = argv + .iter() + .position(|part| part == "--tool") + .ok_or("clawpal doctor exec requires --tool")?; + let tool = argv + .get(tool_idx + 1) + .ok_or("clawpal doctor exec missing tool name")?; + let args_idx = argv.iter().position(|part| part == "--args"); + let mut exec_argv = vec![tool.clone()]; + if let Some(index) = args_idx { + if let Some(arg_string) = argv.get(index + 1) { + exec_argv.extend(shell_words::split(arg_string).map_err(|error| { + format!("Failed to parse clawpal doctor exec args: {error}") + })?); + } + } + let result = execute_command(pool, target_location, instance_id, &exec_argv).await?; + Ok(json!({ + "argv": result.argv, + "exitCode": result.exit_code, + "stdout": result.stdout, + "stderr": result.stderr, + })) + } + other => Err(format!( + "Unsupported clawpal doctor subcommand in remote doctor agent session: {:?}", + other + )), + } +} + +fn config_read_response(raw: &str, path: Option<&str>) -> Result { + let context = build_config_excerpt_context(raw); + if let Some(parse_error) = context.config_parse_error { + return Ok(json!({ + "value": Value::Null, + "path": path, + "raw": context.config_excerpt_raw.unwrap_or_else(|| raw.to_string()), + "parseError": parse_error, + })); + } + + let value = if let Some(path) = path { + clawpal_core::doctor::select_json_value_from_str( + &serde_json::to_string_pretty(&context.config_excerpt).unwrap_or_else(|_| "{}".into()), + Some(path), + "remote doctor config", + )? + } else { + context.config_excerpt + }; + + Ok(json!({ + "value": value, + "path": path, + })) +} + +fn decode_base64_config_payload(encoded: &str) -> Result { + let bytes = base64::engine::general_purpose::STANDARD + .decode(encoded.trim()) + .map_err(|error| format!("Failed to decode base64 config payload: {error}"))?; + String::from_utf8(bytes) + .map_err(|error| format!("Base64 config payload is not valid UTF-8: {error}")) +} + +async fn execute_invoke_payload( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + payload: &Value, +) -> Result { + let command = payload + .get("command") + .and_then(Value::as_str) + .ok_or("invoke payload missing command")?; + let args = payload.get("args").cloned().unwrap_or(Value::Null); + let argv = parse_invoke_argv(command, &args)?; + match command { + "openclaw" => { + let result = execute_command(pool, target_location, instance_id, &argv).await?; + Ok(json!({ + "argv": result.argv, + "exitCode": result.exit_code, + "stdout": result.stdout, + "stderr": result.stderr, + })) + } + "clawpal" => execute_clawpal_command(app, pool, target_location, instance_id, &argv).await, + other => Err(format!( + "Unsupported invoke command in remote doctor agent session: {other}" + )), + } +} + +async fn run_agent_request_with_bridge( + app: &AppHandle, + client: &NodeClient, + bridge: &BridgeClient, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + agent_id: &str, + session_key: &str, + message: &str, +) -> Result { + let final_rx = client + .start_agent_request(agent_id, session_key, message) + .await?; + let mut invokes = bridge.subscribe_invokes(); + let final_future = async move { + final_rx.await.map_err(|_| { + "Agent request ended before a final chat response was received".to_string() + }) + }; + tokio::pin!(final_future); + + loop { + tokio::select! { + result = &mut final_future => { + return result; + } + event = invokes.recv() => { + let payload = match event { + Ok(payload) => payload, + Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => { + continue; + } + Err(tokio::sync::broadcast::error::RecvError::Closed) => { + return Err("Bridge invoke stream closed during agent request".into()); + } + }; + let invoke_id = payload.get("id").and_then(Value::as_str).unwrap_or("").to_string(); + let node_id = payload.get("nodeId").and_then(Value::as_str).unwrap_or("").to_string(); + let result = execute_invoke_payload(app, pool, target_location, instance_id, &payload).await; + match result { + Ok(value) => { + bridge.send_invoke_result(&invoke_id, &node_id, value).await?; + } + Err(error) => { + bridge.send_invoke_error(&invoke_id, &node_id, "EXEC_ERROR", &error).await?; + } + } + let _ = bridge.take_invoke(&invoke_id).await; + } + } + } +} + +fn shell_escape(value: &str) -> String { + format!("'{}'", value.replace('\'', "'\\''")) +} + +fn build_shell_command(argv: &[String]) -> String { + argv.iter() + .map(|part| shell_escape(part)) + .collect::>() + .join(" ") +} + +async fn execute_command( + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + let started = Instant::now(); + if argv.is_empty() { + return Err("Plan command argv cannot be empty".into()); + } + let result = match target_location { + TargetLocation::LocalOpenclaw => { + if argv[0] == "openclaw" { + let arg_refs = argv + .iter() + .skip(1) + .map(String::as_str) + .collect::>(); + let output = run_openclaw(&arg_refs)?; + CommandResult { + argv: argv.to_vec(), + exit_code: Some(output.exit_code), + stdout: output.stdout, + stderr: output.stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } else { + let mut command = std::process::Command::new(&argv[0]); + command.args(argv.iter().skip(1)); + if let Some(openclaw_home) = get_active_openclaw_home_override() { + command.env("OPENCLAW_HOME", openclaw_home); + } + let output = command.output().map_err(|error| { + format!("Failed to execute local command {:?}: {error}", argv) + })?; + CommandResult { + argv: argv.to_vec(), + exit_code: output.status.code(), + stdout: String::from_utf8_lossy(&output.stdout).to_string(), + stderr: String::from_utf8_lossy(&output.stderr).to_string(), + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + if argv[0] == "openclaw" { + let arg_refs = argv + .iter() + .skip(1) + .map(String::as_str) + .collect::>(); + let output = run_openclaw_remote(pool, &host_id, &arg_refs).await?; + CommandResult { + argv: argv.to_vec(), + exit_code: Some(output.exit_code), + stdout: output.stdout, + stderr: output.stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } else { + let output = pool + .exec_login(&host_id, &build_shell_command(argv)) + .await?; + CommandResult { + argv: argv.to_vec(), + exit_code: Some(output.exit_code as i32), + stdout: output.stdout, + stderr: output.stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } + } + }; + Ok(result) +} + +fn plan_command_uses_internal_clawpal_tool(argv: &[String]) -> bool { + argv.first().map(String::as_str) == Some("clawpal") +} + +fn validate_clawpal_exec_args(argv: &[String]) -> Result<(), String> { + if argv.get(0).map(String::as_str) != Some("clawpal") + || argv.get(1).map(String::as_str) != Some("doctor") + || argv.get(2).map(String::as_str) != Some("exec") + { + return Ok(()); + } + + let args_idx = argv.iter().position(|part| part == "--args"); + let Some(index) = args_idx else { + return Ok(()); + }; + let Some(arg_string) = argv.get(index + 1) else { + return Ok(()); + }; + if arg_string.contains('\n') || arg_string.contains("<<") { + return Err(format!( + "Unsupported clawpal doctor exec args: {}. Use bounded single-line commands without heredocs or stdin-driven scripts.", + argv.join(" ") + )); + } + Ok(()) +} + +fn validate_plan_command_argv(argv: &[String]) -> Result<(), String> { + if argv.is_empty() { + return Err("Plan command argv cannot be empty".into()); + } + validate_clawpal_exec_args(argv)?; + if argv[0] != "openclaw" { + return Ok(()); + } + + let supported = argv == ["openclaw", "--version"] || argv == ["openclaw", "gateway", "status"]; + if supported { + Ok(()) + } else { + Err(format!( + "Unsupported openclaw plan command: {}", + argv.join(" ") + )) + } +} + +fn plan_command_failure_message( + kind: PlanKind, + round: usize, + argv: &[String], + error: &str, +) -> String { + let kind_label = match kind { + PlanKind::Detect => "Detect", + PlanKind::Investigate => "Investigate", + PlanKind::Repair => "Repair", + }; + format!( + "{kind_label} command failed in round {round}: {}: {error}", + argv.join(" ") + ) +} + +fn command_result_stdout(value: &Value) -> String { + value + .get("stdout") + .and_then(Value::as_str) + .map(str::to_string) + .unwrap_or_else(|| { + serde_json::to_string_pretty(value).unwrap_or_else(|_| value.to_string()) + }) +} + +async fn execute_plan_command( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + let started = Instant::now(); + validate_plan_command_argv(argv)?; + if plan_command_uses_internal_clawpal_tool(argv) { + let value = execute_clawpal_command(app, pool, target_location, instance_id, argv).await?; + let exit_code = value + .get("exitCode") + .and_then(Value::as_i64) + .map(|code| code as i32) + .unwrap_or(0); + let stderr = value + .get("stderr") + .and_then(Value::as_str) + .unwrap_or("") + .to_string(); + return Ok(CommandResult { + argv: argv.to_vec(), + exit_code: Some(exit_code), + stdout: command_result_stdout(&value), + stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + }); + } + + execute_command(pool, target_location, instance_id, argv).await +} + +fn parse_plan_response(kind: PlanKind, value: Value) -> Result { + let mut response: PlanResponse = serde_json::from_value(value) + .map_err(|error| format!("Failed to parse remote doctor plan response: {error}"))?; + response.plan_kind = kind; + if response.plan_id.trim().is_empty() { + response.plan_id = format!("plan-{}", Uuid::new_v4()); + } + Ok(response) +} + +async fn request_plan( + client: &NodeClient, + method: &str, + kind: PlanKind, + session_id: &str, + round: usize, + target_location: TargetLocation, + instance_id: &str, + previous_results: &[CommandResult], +) -> Result { + let response = client + .send_request( + method, + json!({ + "sessionId": session_id, + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "targetLocation": match target_location { + TargetLocation::LocalOpenclaw => "local_openclaw", + TargetLocation::RemoteOpenclaw => "remote_openclaw", + }, + "instanceId": instance_id, + "hostId": instance_id.strip_prefix("ssh:"), + "previousResults": previous_results, + }), + ) + .await?; + parse_plan_response(kind, response) +} + +async fn request_agent_plan( + app: &AppHandle, + client: &NodeClient, + bridge_client: &BridgeClient, + pool: &SshConnectionPool, + session_id: &str, + round: usize, + kind: PlanKind, + target_location: TargetLocation, + instance_id: &str, + diagnosis: &RescuePrimaryDiagnosisResult, + config_context: &ConfigExcerptContext, + previous_results: &[CommandResult], +) -> Result { + let agent_session_key = remote_doctor_agent_session_key(session_id); + let prompt = build_agent_plan_prompt( + kind, + session_id, + round, + target_location, + instance_id, + diagnosis, + config_context, + previous_results, + ); + let text = if bridge_client.is_connected().await { + run_agent_request_with_bridge( + app, + client, + bridge_client, + pool, + target_location, + instance_id, + remote_doctor_agent_id(), + &agent_session_key, + &prompt, + ) + .await? + } else { + client + .run_agent_request(remote_doctor_agent_id(), &agent_session_key, &prompt) + .await? + }; + parse_agent_plan_response(kind, &text) +} + +fn agent_plan_step_types(plan: &PlanResponse) -> Vec { + if plan.commands.is_empty() { + return vec![format!( + "plan:{}", + match plan.plan_kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + } + )]; + } + plan.commands + .iter() + .map(|command| { + command + .argv + .first() + .cloned() + .unwrap_or_else(|| "empty-command".to_string()) + }) + .collect() +} + +async fn request_clawpal_server_plan( + client: &NodeClient, + session_id: &str, + round: usize, + instance_id: &str, + target_location: TargetLocation, + diagnosis: &RescuePrimaryDiagnosisResult, + config_context: &ConfigExcerptContext, +) -> Result { + let response = client + .send_request( + "remote_repair_plan.request", + json!({ + "requestId": format!("{session_id}-round-{round}"), + "targetId": instance_id, + "targetLocation": match target_location { + TargetLocation::LocalOpenclaw => "local_openclaw", + TargetLocation::RemoteOpenclaw => "remote_openclaw", + }, + "context": { + "configExcerpt": config_context.config_excerpt, + "configExcerptRaw": config_context.config_excerpt_raw, + "configParseError": config_context.config_parse_error, + "diagnosis": diagnosis_context(diagnosis), + } + }), + ) + .await?; + serde_json::from_value::(response) + .map_err(|error| format!("Failed to parse clawpal-server plan response: {error}")) +} + +async fn report_clawpal_server_step_result( + client: &NodeClient, + plan_id: &str, + step_index: usize, + step: &ClawpalServerPlanStep, + result: &CommandResult, +) { + let _ = client + .send_request( + "remote_repair_plan.step_result", + json!({ + "planId": plan_id, + "stepIndex": step_index, + "step": step, + "result": result, + }), + ) + .await; +} + +async fn report_clawpal_server_final_result( + client: &NodeClient, + plan_id: &str, + healthy: bool, + diagnosis: &RescuePrimaryDiagnosisResult, +) { + let _ = client + .send_request( + "remote_repair_plan.final_result", + json!({ + "planId": plan_id, + "healthy": healthy, + "diagnosis": diagnosis_context(diagnosis), + }), + ) + .await; +} + +async fn run_remote_doctor_repair_loop( + app: Option<&AppHandle>, + pool: &SshConnectionPool, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, + mut request_plan_fn: F, +) -> Result +where + F: FnMut(PlanKind, usize, Vec) -> Fut, + Fut: std::future::Future>, +{ + let mut previous_results: Vec = Vec::new(); + let mut last_command: Option> = None; + let mut last_plan_kind = PlanKind::Detect; + + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + emit_progress( + app, + session_id, + round, + "planning_detect", + format!("Requesting detection plan for round {round}"), + Some(PlanKind::Detect), + None, + ); + let detect_plan = + request_plan_fn(PlanKind::Detect, round, previous_results.clone()).await?; + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_received", + "round": round, + "planKind": "detect", + "planId": detect_plan.plan_id, + "summary": detect_plan.summary, + "commandCount": detect_plan.commands.len(), + "healthy": detect_plan.healthy, + "done": detect_plan.done, + }), + ); + if detect_plan.healthy || (detect_plan.done && detect_plan.commands.is_empty()) { + return Ok(RemoteDoctorRepairResult { + mode: "remoteDoctor".into(), + status: "completed".into(), + round, + phase: "completed".into(), + last_plan_kind: match last_plan_kind { + PlanKind::Detect => "detect".into(), + PlanKind::Investigate => "investigate".into(), + PlanKind::Repair => "repair".into(), + }, + latest_diagnosis_healthy: true, + last_command, + session_id: session_id.to_string(), + message: "Remote Doctor repair completed with a healthy detection result.".into(), + }); + } + previous_results.clear(); + for command in &detect_plan.commands { + last_command = Some(command.argv.clone()); + emit_progress( + app, + session_id, + round, + "executing_detect", + format!("Running detect command: {}", command.argv.join(" ")), + Some(PlanKind::Detect), + Some(command.argv.clone()), + ); + let command_result = + execute_command(pool, target_location, instance_id, &command.argv).await?; + append_remote_doctor_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": "detect", + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + previous_results.push(command_result); + return Err(format!( + "Detect command failed in round {round}: {}", + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + + emit_progress( + app, + session_id, + round, + "planning_repair", + format!("Requesting repair plan for round {round}"), + Some(PlanKind::Repair), + None, + ); + let repair_plan = + request_plan_fn(PlanKind::Repair, round, previous_results.clone()).await?; + last_plan_kind = PlanKind::Repair; + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_received", + "round": round, + "planKind": "repair", + "planId": repair_plan.plan_id, + "summary": repair_plan.summary, + "commandCount": repair_plan.commands.len(), + "success": repair_plan.success, + "done": repair_plan.done, + }), + ); + previous_results.clear(); + for command in &repair_plan.commands { + last_command = Some(command.argv.clone()); + emit_progress( + app, + session_id, + round, + "executing_repair", + format!("Running repair command: {}", command.argv.join(" ")), + Some(PlanKind::Repair), + Some(command.argv.clone()), + ); + let command_result = + execute_command(pool, target_location, instance_id, &command.argv).await?; + append_remote_doctor_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": "repair", + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + previous_results.push(command_result); + return Err(format!( + "Repair command failed in round {round}: {}", + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + } + + append_remote_doctor_log( + session_id, + json!({ + "event": "session_complete", + "status": "failed", + "reason": "round_limit_exceeded", + }), + ); + Err(format!( + "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean detection result" + )) +} + +async fn run_clawpal_server_repair_loop( + app: &AppHandle, + client: &NodeClient, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, +) -> Result { + let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "initial", 0, &diagnosis); + if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { + repair_rescue_gateway_if_needed( + app, + session_id, + 0, + target_location, + instance_id, + &mut diagnosis, + ) + .await?; + } + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + 0, + PlanKind::Detect, + None, + "Remote Doctor repair skipped because diagnosis is already healthy.", + )); + } + + let mut last_command = None; + let mut round_observations: Vec = Vec::new(); + let mut last_step_types: Vec = Vec::new(); + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + emit_progress( + Some(app), + session_id, + round, + "planning_repair", + format!("Requesting remote repair plan for round {round}"), + Some(PlanKind::Repair), + None, + ); + let config_context = build_config_excerpt_context( + &read_target_config_raw(app, target_location, instance_id).await?, + ); + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_request_context", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "instanceId": instance_id, + "targetLocation": target_location, + "configContext": config_excerpt_log_summary(&config_context), + "diagnosisIssueCount": diagnosis.issues.len(), + "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), + }), + ); + if config_context.config_parse_error.is_some() { + append_remote_doctor_log( + session_id, + json!({ + "event": "config_recovery_context", + "round": round, + "context": config_excerpt_log_summary(&config_context), + }), + ); + } + let plan = request_clawpal_server_plan( + client, + session_id, + round, + instance_id, + target_location, + &diagnosis, + &config_context, + ) + .await?; + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_received", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "planId": plan.plan_id, + "summary": plan.summary, + "stepCount": plan.steps.len(), + "stepTypeCounts": clawpal_server_step_type_summary(&plan.steps), + }), + ); + + let mut current_config = config_context.config_excerpt.clone(); + let mut rediagnosed = false; + let mut round_step_types = Vec::new(); + for (step_index, step) in plan.steps.iter().enumerate() { + round_step_types.push(step.step_type.clone()); + let mut result = CommandResult { + argv: Vec::new(), + exit_code: Some(0), + stdout: String::new(), + stderr: String::new(), + duration_ms: 0, + timed_out: false, + }; + let started = Instant::now(); + match step.step_type.as_str() { + "configSet" => { + let path = step.path.as_deref().ok_or("configSet step missing path")?; + let value = step.value.clone().ok_or("configSet step missing value")?; + emit_progress( + Some(app), + session_id, + round, + "executing_repair", + format!("Applying config set: {path}"), + Some(PlanKind::Repair), + None, + ); + apply_config_set(&mut current_config, path, value)?; + write_target_config(app, target_location, instance_id, ¤t_config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + result.argv = vec!["configSet".into(), path.into()]; + result.stdout = format!("Updated {path}"); + } + "configUnset" => { + let path = step + .path + .as_deref() + .ok_or("configUnset step missing path")?; + emit_progress( + Some(app), + session_id, + round, + "executing_repair", + format!("Applying config unset: {path}"), + Some(PlanKind::Repair), + None, + ); + apply_config_unset(&mut current_config, path)?; + write_target_config(app, target_location, instance_id, ¤t_config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + result.argv = vec!["configUnset".into(), path.into()]; + result.stdout = format!("Removed {path}"); + } + "doctorRediagnose" => { + emit_progress( + Some(app), + session_id, + round, + "planning_detect", + format!("Running rescue diagnosis after repair plan round {round}"), + Some(PlanKind::Detect), + None, + ); + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_step_rediagnose", round, &diagnosis); + rediagnosed = true; + result.argv = vec!["doctorRediagnose".into()]; + result.stdout = format!( + "Diagnosis status={} issues={}", + diagnosis.status, + diagnosis.issues.len() + ); + } + other => { + result.exit_code = Some(1); + result.stderr = format!("Unsupported clawpal-server step type: {other}"); + } + } + result.duration_ms = started.elapsed().as_millis() as u64; + last_command = Some(result.argv.clone()); + append_remote_doctor_log( + session_id, + json!({ + "event": "command_result", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "stepIndex": step_index, + "step": step, + "result": result, + }), + ); + report_clawpal_server_step_result(client, &plan.plan_id, step_index, step, &result) + .await; + if result.exit_code.unwrap_or(1) != 0 { + return Err(result.stderr); + } + } + + if !rediagnosed { + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_round", round, &diagnosis); + } + if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { + repair_rescue_gateway_if_needed( + app, + session_id, + round, + target_location, + instance_id, + &mut diagnosis, + ) + .await?; + } + last_step_types = round_step_types.clone(); + round_observations.push(RepairRoundObservation::new( + round, + &round_step_types, + &diagnosis, + )); + if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { + let observation = round_observations + .last() + .expect("stalled observations should contain current round"); + append_remote_doctor_log( + session_id, + json!({ + "event": "repair_plan_stalled", + "protocol": "clawpal_server", + "round": round, + "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, + "latestStepTypes": observation.step_types, + "issues": observation.issue_summaries, + }), + ); + return Err(stalled_plan_error_message(observation)); + } + let healthy = diagnosis_is_healthy(&diagnosis); + report_clawpal_server_final_result(client, &plan.plan_id, healthy, &diagnosis).await; + if healthy { + return Ok(result_for_completion( + session_id, + round, + PlanKind::Repair, + last_command, + "Remote Doctor repair completed with a healthy rescue diagnosis.", + )); + } + } + + Err(round_limit_error_message(&diagnosis, &last_step_types)) +} + +async fn run_agent_planner_repair_loop( + app: &AppHandle, + client: &NodeClient, + bridge_client: &BridgeClient, + pool: &SshConnectionPool, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, +) -> Result { + let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "initial", 0, &diagnosis); + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + 0, + PlanKind::Detect, + None, + "Remote Doctor repair skipped because diagnosis is already healthy.", + )); + } + + let mut previous_results: Vec = Vec::new(); + let mut last_command = None; + let mut last_step_types: Vec = Vec::new(); + let mut round_observations: Vec = Vec::new(); + + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + let kind = next_agent_plan_kind_for_round(&diagnosis, &previous_results); + let config_context = build_config_excerpt_context( + &read_target_config_raw(app, target_location, instance_id).await?, + ); + let phase = match kind { + PlanKind::Detect => "planning_detect", + PlanKind::Investigate => "planning_investigate", + PlanKind::Repair => "planning_repair", + }; + let line = match kind { + PlanKind::Detect => format!("Requesting detection plan for round {round}"), + PlanKind::Investigate => format!("Requesting investigation plan for round {round}"), + PlanKind::Repair => format!("Requesting repair plan for round {round}"), + }; + emit_progress(Some(app), session_id, round, phase, line, Some(kind), None); + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_request_context", + "protocol": "agent", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "instanceId": instance_id, + "targetLocation": target_location, + "configContext": config_excerpt_log_summary(&config_context), + "diagnosisIssueCount": diagnosis.issues.len(), + "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), + }), + ); + let plan = request_agent_plan( + app, + client, + bridge_client, + pool, + session_id, + round, + kind, + target_location, + instance_id, + &diagnosis, + &config_context, + &previous_results, + ) + .await?; + append_remote_doctor_log( + session_id, + json!({ + "event": "plan_received", + "protocol": "agent", + "round": round, + "planKind": match plan.plan_kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "planId": plan.plan_id, + "summary": plan.summary, + "commandCount": plan.commands.len(), + "healthy": plan.healthy, + "done": plan.done, + "success": plan.success, + }), + ); + previous_results.clear(); + last_step_types = agent_plan_step_types(&plan); + for command in &plan.commands { + last_command = Some(command.argv.clone()); + emit_progress( + Some(app), + session_id, + round, + match kind { + PlanKind::Detect => "executing_detect", + PlanKind::Investigate => "executing_investigate", + PlanKind::Repair => "executing_repair", + }, + format!( + "Running {} command: {}", + match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + command.argv.join(" ") + ), + Some(kind), + Some(command.argv.clone()), + ); + append_remote_doctor_log( + session_id, + json!({ + "event": "command_start", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "argv": command.argv, + "timeoutSec": command.timeout_sec, + "purpose": command.purpose, + }), + ); + let command_result = + match execute_plan_command(app, pool, target_location, instance_id, &command.argv) + .await + { + Ok(result) => result, + Err(error) => { + return Err(plan_command_failure_message( + kind, + round, + &command.argv, + &error, + )); + } + }; + append_remote_doctor_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + return Err(format!( + "{} command failed in round {round}: {}", + match kind { + PlanKind::Detect => "Detect", + PlanKind::Investigate => "Investigate", + PlanKind::Repair => "Repair", + }, + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_round", round, &diagnosis); + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + round, + kind, + last_command, + "Remote Doctor repair completed with a healthy rescue diagnosis.", + )); + } + if matches!(kind, PlanKind::Repair) + && plan.done + && plan.commands.is_empty() + && diagnosis_has_only_non_auto_fixable_issues(&diagnosis) + { + return Ok(result_for_completion_with_warnings( + session_id, + round, + kind, + last_command, + "Remote Doctor completed all safe automatic repairs. Remaining issues are non-auto-fixable warnings.", + )); + } + + round_observations.push(RepairRoundObservation::new( + round, + &last_step_types, + &diagnosis, + )); + if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { + let observation = round_observations + .last() + .expect("stalled observations should contain current round"); + append_remote_doctor_log( + session_id, + json!({ + "event": "repair_plan_stalled", + "protocol": "agent", + "round": round, + "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, + "latestStepTypes": observation.step_types, + "issues": observation.issue_summaries, + }), + ); + return Err(stalled_plan_error_message(observation)); + } + } + + Err(round_limit_error_message(&diagnosis, &last_step_types)) +} + +async fn start_remote_doctor_repair_impl( + app: AppHandle, + pool: &SshConnectionPool, + instance_id: String, + target_location: String, +) -> Result { + let target_location = parse_target_location(&target_location)?; + if matches!(target_location, TargetLocation::RemoteOpenclaw) { + ensure_remote_target_connected(pool, &instance_id).await?; + } + let session_id = Uuid::new_v4().to_string(); + let gateway = remote_doctor_gateway_config()?; + let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref())?; + log_dev(format!( + "[remote_doctor] start session={} instance_id={} target_location={:?} gateway_url={} auth_token_override={}", + session_id, + instance_id, + target_location, + gateway.url, + gateway.auth_token_override.is_some() + )); + append_remote_doctor_log( + &session_id, + json!({ + "event": "session_start", + "instanceId": instance_id, + "targetLocation": target_location, + "gatewayUrl": gateway.url, + "gatewayAuthTokenOverride": gateway.auth_token_override.is_some(), + }), + ); + + let client = NodeClient::new(); + client.connect(&gateway.url, app.clone(), creds).await?; + let bridge = BridgeClient::new(); + + let forced_protocol = configured_remote_doctor_protocol(); + let active_protocol = forced_protocol.unwrap_or(default_remote_doctor_protocol()); + let pool_ref: &SshConnectionPool = pool; + let app_handle = app.clone(); + let bridge_client = bridge.clone(); + let gateway_url = gateway.url.clone(); + let gateway_auth_override = gateway.auth_token_override.clone(); + if matches!(active_protocol, RemoteDoctorProtocol::AgentPlanner) + && gateway_url_is_local(&gateway_url) + { + ensure_local_remote_doctor_agent_ready()?; + } + if protocol_requires_bridge(active_protocol) { + ensure_agent_bridge_connected( + &app, + &bridge, + &gateway_url, + gateway_auth_override.as_deref(), + &session_id, + ) + .await; + } + let result = match active_protocol { + RemoteDoctorProtocol::AgentPlanner => { + let agent = run_agent_planner_repair_loop( + &app, + &client, + &bridge_client, + pool_ref, + &session_id, + &instance_id, + target_location, + ) + .await; + + if forced_protocol.is_none() + && matches!(&agent, Err(error) if is_unknown_method_error(error)) + { + append_remote_doctor_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "agent", + "to": "legacy_doctor", + "reason": agent.as_ref().err(), + }), + ); + run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let method = match kind { + PlanKind::Detect => detect_method_name(), + PlanKind::Investigate => repair_method_name(), + PlanKind::Repair => repair_method_name(), + }; + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + async move { + request_plan( + client, + &method, + kind, + session_id, + round, + target_location, + instance_id, + &previous_results, + ) + .await + } + }, + ) + .await + } else { + agent + } + } + RemoteDoctorProtocol::LegacyDoctor => { + let legacy = run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let method = match kind { + PlanKind::Detect => detect_method_name(), + PlanKind::Investigate => repair_method_name(), + PlanKind::Repair => repair_method_name(), + }; + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + async move { + request_plan( + client, + &method, + kind, + session_id, + round, + target_location, + instance_id, + &previous_results, + ) + .await + } + }, + ) + .await; + + if forced_protocol.is_none() + && matches!(&legacy, Err(error) if is_unknown_method_error(error)) + { + append_remote_doctor_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "legacy_doctor", + "to": "clawpal_server", + "reason": legacy.as_ref().err(), + }), + ); + log_dev(format!( + "[remote_doctor] session={} protocol fallback legacy_doctor -> clawpal_server", + session_id + )); + run_clawpal_server_repair_loop( + &app, + &client, + &session_id, + &instance_id, + target_location, + ) + .await + } else { + legacy + } + } + RemoteDoctorProtocol::ClawpalServer => { + let clawpal_server = run_clawpal_server_repair_loop( + &app, + &client, + &session_id, + &instance_id, + target_location, + ) + .await; + if forced_protocol.is_none() + && matches!(&clawpal_server, Err(error) if is_unknown_method_error(error)) + { + append_remote_doctor_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "clawpal_server", + "to": "agent", + "reason": clawpal_server.as_ref().err(), + }), + ); + let agent = run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + let app_handle = app_handle.clone(); + let bridge_client = bridge_client.clone(); + let gateway_url = gateway_url.clone(); + let gateway_auth_override = gateway_auth_override.clone(); + let empty_diagnosis = empty_diagnosis(); + let empty_config = empty_config_excerpt_context(); + async move { + ensure_agent_bridge_connected( + &app_handle, + &bridge_client, + &gateway_url, + gateway_auth_override.as_deref(), + session_id, + ) + .await; + let text = if bridge_client.is_connected().await { + run_agent_request_with_bridge( + &app_handle, + client, + &bridge_client, + pool_ref, + target_location, + instance_id, + remote_doctor_agent_id(), + &remote_doctor_agent_session_key(session_id), + &build_agent_plan_prompt( + kind, + session_id, + round, + target_location, + instance_id, + &empty_diagnosis, + &empty_config, + &previous_results, + ), + ) + .await? + } else { + client + .run_agent_request( + remote_doctor_agent_id(), + &remote_doctor_agent_session_key(session_id), + &build_agent_plan_prompt( + kind, + session_id, + round, + target_location, + instance_id, + &empty_diagnosis, + &empty_config, + &previous_results, + ), + ) + .await? + }; + parse_agent_plan_response(kind, &text) + } + }, + ) + .await; + if matches!(&agent, Err(error) if is_unknown_method_error(error)) { + append_remote_doctor_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "agent", + "to": "legacy_doctor", + "reason": agent.as_ref().err(), + }), + ); + run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let method = match kind { + PlanKind::Detect => detect_method_name(), + PlanKind::Investigate => repair_method_name(), + PlanKind::Repair => repair_method_name(), + }; + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + async move { + request_plan( + client, + &method, + kind, + session_id, + round, + target_location, + instance_id, + &previous_results, + ) + .await + } + }, + ) + .await + } else { + agent + } + } else { + clawpal_server + } + } + }; + + let _ = client.disconnect().await; + let _ = bridge.disconnect().await; + + match result { + Ok(done) => { + append_remote_doctor_log( + &session_id, + json!({ + "event": "session_complete", + "status": "completed", + "latestDiagnosisHealthy": done.latest_diagnosis_healthy, + }), + ); + Ok(done) + } + Err(error) => { + append_remote_doctor_log( + &session_id, + json!({ + "event": "session_complete", + "status": "failed", + "reason": error, + }), + ); + Err(error) + } + } +} + +#[tauri::command] +pub async fn start_remote_doctor_repair( + app: AppHandle, + pool: State<'_, SshConnectionPool>, + instance_id: String, + target_location: String, +) -> Result { + start_remote_doctor_repair_impl(app, &pool, instance_id, target_location).await +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::cli_runner::{set_active_clawpal_data_override, set_active_openclaw_home_override}; + use crate::ssh::SshHostConfig; + use std::net::TcpStream; + use tauri::test::mock_app; + + #[test] + fn build_shell_command_escapes_single_quotes() { + let command = build_shell_command(&["echo".into(), "a'b".into()]); + assert_eq!(command, "'echo' 'a'\\''b'"); + } + + #[test] + fn parse_target_location_rejects_unknown_values() { + let error = parse_target_location("elsewhere").unwrap_err(); + assert!(error.contains("Unsupported target location")); + } + + #[test] + fn apply_config_set_creates_missing_object_path() { + let mut value = json!({}); + apply_config_set( + &mut value, + "models.providers.openai.baseUrl", + json!("http://127.0.0.1:3000/v1"), + ) + .expect("config set"); + assert_eq!( + value + .pointer("/models/providers/openai/baseUrl") + .and_then(Value::as_str), + Some("http://127.0.0.1:3000/v1") + ); + } + + #[test] + fn apply_config_unset_removes_existing_leaf() { + let mut value = json!({ + "models": { + "providers": { + "openai": { + "baseUrl": "http://127.0.0.1:3000/v1", + "models": [{"id": "gpt-4.1"}] + } + } + } + }); + apply_config_unset(&mut value, "models.providers.openai.baseUrl").expect("config unset"); + assert!(value.pointer("/models/providers/openai/baseUrl").is_none()); + assert!(value.pointer("/models/providers/openai/models").is_some()); + } + + #[test] + fn parse_agent_plan_response_reads_json_payload() { + let text = r#"preface +{"planId":"detect-1","planKind":"detect","summary":"ok","commands":[{"argv":["openclaw","doctor","--json"]}],"healthy":false,"done":false,"success":false} +"#; + let plan = parse_agent_plan_response(PlanKind::Detect, text).expect("parse plan"); + assert_eq!(plan.plan_id, "detect-1"); + assert_eq!(plan.commands[0].argv, vec!["openclaw", "doctor", "--json"]); + } + + #[test] + fn build_agent_plan_prompt_mentions_target_and_schema() { + let prompt = build_agent_plan_prompt( + PlanKind::Repair, + "sess-1", + 3, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &sample_diagnosis(Vec::new()), + &ConfigExcerptContext { + config_excerpt: json!({"ok": true}), + config_excerpt_raw: None, + config_parse_error: None, + }, + &[], + ); + assert!(prompt.contains("Task: produce the next repair plan")); + assert!(prompt.contains("Target location: remote_openclaw")); + assert!(prompt.contains("\"planKind\": \"repair\"")); + assert!(prompt.contains("\"configExcerpt\"")); + assert!(prompt.contains("clawpal doctor probe-openclaw")); + assert!(prompt.contains("openclaw gateway status")); + assert!(prompt.contains("Output valid JSON only.")); + } + + #[test] + fn default_remote_doctor_protocol_prefers_agent() { + assert_eq!( + default_remote_doctor_protocol(), + RemoteDoctorProtocol::AgentPlanner + ); + } + + #[test] + fn unreadable_config_requires_investigate_plan_kind() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "primary.config.unreadable", + "code": "primary.config.unreadable", + "severity": "error", + "message": "Primary configuration could not be read", + "autoFixable": false, + "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", + "source": "primary" + })]); + assert_eq!(next_agent_plan_kind(&diagnosis), PlanKind::Investigate); + } + + #[test] + fn unreadable_config_switches_to_repair_after_investigation_results_exist() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "primary.config.unreadable", + "code": "primary.config.unreadable", + "severity": "error", + "message": "Primary configuration could not be read", + "autoFixable": false, + "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", + "source": "primary" + })]); + let previous_results = vec![CommandResult { + argv: vec!["clawpal".into(), "doctor".into(), "config-read-raw".into()], + exit_code: Some(0), + stdout: "{\"raw\":\"{\\n ddd\\n}\"}".into(), + stderr: String::new(), + duration_ms: 1, + timed_out: false, + }]; + assert_eq!( + next_agent_plan_kind_for_round(&diagnosis, &previous_results), + PlanKind::Repair + ); + } + + #[test] + fn non_auto_fixable_warning_only_diagnosis_is_terminal() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "rescue.gateway.unhealthy", + "code": "rescue.gateway.unhealthy", + "severity": "warn", + "message": "Rescue gateway is not healthy", + "autoFixable": false, + "fixHint": "Inspect rescue gateway logs before using failover", + "source": "rescue" + })]); + assert!(diagnosis_has_only_non_auto_fixable_issues(&diagnosis)); + } + + #[test] + fn investigate_prompt_requires_read_only_diagnosis_steps() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "primary.config.unreadable", + "code": "primary.config.unreadable", + "severity": "error", + "message": "Primary configuration could not be read", + "autoFixable": false, + "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", + "source": "primary" + })]); + let prompt = build_agent_plan_prompt( + PlanKind::Investigate, + "sess-1", + 1, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &diagnosis, + &build_config_excerpt_context("{\n ddd\n}"), + &[], + ); + assert!(prompt.contains("read-only")); + assert!(prompt.contains("Do not modify files")); + assert!(prompt.contains("\"planKind\": \"investigate\"")); + assert!(prompt.contains("configParseError")); + } + + #[test] + fn investigate_prompt_discourages_long_running_log_commands() { + let prompt = build_agent_plan_prompt( + PlanKind::Investigate, + "sess-1", + 1, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &sample_diagnosis(Vec::new()), + &empty_config_excerpt_context(), + &[], + ); + assert!(prompt.contains("Do not run follow/tail commands")); + assert!(prompt.contains("bounded")); + assert!(prompt.contains("Do not use heredocs")); + } + + #[test] + fn repair_prompt_discourages_unverified_openclaw_subcommands() { + let prompt = build_agent_plan_prompt( + PlanKind::Repair, + "sess-1", + 2, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &sample_diagnosis(Vec::new()), + &empty_config_excerpt_context(), + &[], + ); + assert!(prompt.contains("Do not invent OpenClaw subcommands")); + assert!(prompt.contains("Do not use `openclaw auth")); + assert!(prompt.contains("Do not use `openclaw doctor --json`")); + assert!(!prompt.contains("- `openclaw doctor --json`")); + } + + #[test] + fn remote_doctor_agent_id_is_dedicated() { + assert_eq!(remote_doctor_agent_id(), "clawpal-remote-doctor"); + assert!(!remote_doctor_agent_session_key("sess-1").contains("main")); + assert!( + remote_doctor_agent_session_key("sess-1").starts_with("agent:clawpal-remote-doctor:") + ); + } + + #[test] + fn ensure_local_remote_doctor_agent_creates_workspace_bootstrap_files() { + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-agent-test-{}", + Uuid::new_v4() + )); + let home_dir = temp_root.join("home"); + let clawpal_dir = temp_root.join("clawpal"); + let openclaw_dir = home_dir.join(".openclaw"); + std::fs::create_dir_all(&openclaw_dir).expect("create openclaw dir"); + std::fs::create_dir_all(&clawpal_dir).expect("create clawpal dir"); + std::fs::write( + openclaw_dir.join("openclaw.json"), + r#"{ + "gateway": { "port": 18789, "auth": { "token": "gw-test-token" } }, + "agents": { + "defaults": { "model": "openai/gpt-4o-mini" }, + "list": [{ "id": "main", "workspace": "~/.openclaw/workspaces/main" }] + } +} +"#, + ) + .expect("write config"); + + set_active_openclaw_home_override(Some(home_dir.to_string_lossy().to_string())) + .expect("set openclaw override"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal override"); + + let result = ensure_local_remote_doctor_agent_ready(); + + set_active_openclaw_home_override(None).expect("clear openclaw override"); + set_active_clawpal_data_override(None).expect("clear clawpal override"); + + if let Err(error) = &result { + let _ = std::fs::remove_dir_all(&temp_root); + panic!("ensure agent ready: {error}"); + } + + let cfg: Value = serde_json::from_str( + &std::fs::read_to_string(openclaw_dir.join("openclaw.json")).expect("read config"), + ) + .expect("parse config"); + let agent = cfg["agents"]["list"] + .as_array() + .and_then(|agents| { + agents.iter().find(|agent| { + agent.get("id").and_then(Value::as_str) == Some(remote_doctor_agent_id()) + }) + }) + .expect("dedicated agent entry"); + let workspace = agent["workspace"] + .as_str() + .expect("agent workspace") + .replace("~/", &format!("{}/", home_dir.to_string_lossy())); + for file_name in ["IDENTITY.md", "USER.md", "BOOTSTRAP.md", "AGENTS.md"] { + let content = std::fs::read_to_string(std::path::Path::new(&workspace).join(file_name)) + .unwrap_or_else(|error| panic!("read {file_name}: {error}")); + assert!( + !content.trim().is_empty(), + "{file_name} should not be empty" + ); + } + + let _ = std::fs::remove_dir_all(&temp_root); + } + + #[test] + fn only_agent_planner_protocol_requires_bridge() { + assert!(protocol_requires_bridge(RemoteDoctorProtocol::AgentPlanner)); + assert!(!protocol_requires_bridge( + RemoteDoctorProtocol::ClawpalServer + )); + assert!(!protocol_requires_bridge( + RemoteDoctorProtocol::LegacyDoctor + )); + } + + #[test] + fn clawpal_server_protocol_skips_local_rescue_preflight() { + assert!(!protocol_runs_rescue_preflight( + RemoteDoctorProtocol::ClawpalServer + )); + assert!(!protocol_runs_rescue_preflight( + RemoteDoctorProtocol::AgentPlanner + )); + } + + #[test] + fn remote_target_host_id_candidates_include_exact_and_stripped_ids() { + assert_eq!( + remote_target_host_id_candidates("ssh:15-235-214-81"), + vec!["ssh:15-235-214-81".to_string(), "15-235-214-81".to_string()] + ); + assert_eq!( + remote_target_host_id_candidates("e2e-remote-doctor"), + vec!["e2e-remote-doctor".to_string()] + ); + } + + #[test] + fn primary_remote_target_host_id_prefers_exact_instance_id() { + assert_eq!( + primary_remote_target_host_id("ssh:15-235-214-81").unwrap(), + "ssh:15-235-214-81" + ); + } + + #[test] + fn parse_invoke_argv_supports_command_string_payloads() { + let argv = parse_invoke_argv( + "clawpal", + &json!({ + "command": "doctor config-read models.providers.openai" + }), + ) + .expect("parse invoke argv"); + assert_eq!( + argv, + vec![ + "clawpal", + "doctor", + "config-read", + "models.providers.openai" + ] + ); + } + + #[test] + fn plan_commands_treat_clawpal_as_internal_tool() { + assert!(plan_command_uses_internal_clawpal_tool(&[ + "clawpal".to_string(), + "doctor".to_string(), + "config-read".to_string(), + ])); + assert!(!plan_command_uses_internal_clawpal_tool(&[ + "openclaw".to_string(), + "doctor".to_string(), + ])); + } + + #[test] + fn unsupported_openclaw_subcommand_is_rejected_early() { + let error = validate_plan_command_argv(&[ + "openclaw".to_string(), + "auth".to_string(), + "list".to_string(), + ]) + .unwrap_err(); + assert!(error.contains("Unsupported openclaw plan command")); + assert!(error.contains("openclaw auth list")); + } + + #[test] + fn openclaw_doctor_json_is_rejected_early() { + let error = validate_plan_command_argv(&[ + "openclaw".to_string(), + "doctor".to_string(), + "--json".to_string(), + ]) + .unwrap_err(); + assert!(error.contains("Unsupported openclaw plan command")); + assert!(error.contains("openclaw doctor --json")); + } + + #[test] + fn multiline_clawpal_exec_is_rejected_early() { + let error = validate_plan_command_argv(&[ + "clawpal".to_string(), + "doctor".to_string(), + "exec".to_string(), + "--tool".to_string(), + "python3".to_string(), + "--args".to_string(), + "- <<'PY'\nprint('hi')\nPY".to_string(), + ]) + .unwrap_err(); + assert!(error.contains("Unsupported clawpal doctor exec args")); + assert!(error.contains("heredocs")); + } + + #[test] + fn plan_command_failure_message_mentions_command_and_error() { + let error = plan_command_failure_message( + PlanKind::Investigate, + 2, + &[ + "openclaw".to_string(), + "gateway".to_string(), + "logs".to_string(), + ], + "ssh command failed: russh exec timed out after 25s", + ); + assert!(error.contains("Investigate command failed in round 2")); + assert!(error.contains("openclaw gateway logs")); + assert!(error.contains("timed out after 25s")); + } + + fn sample_diagnosis(issues: Vec) -> RescuePrimaryDiagnosisResult { + serde_json::from_value(json!({ + "status": if issues.is_empty() { "healthy" } else { "broken" }, + "checkedAt": "2026-03-18T00:00:00Z", + "targetProfile": "primary", + "rescueProfile": "rescue", + "rescueConfigured": true, + "rescuePort": 18789, + "summary": { + "status": if issues.is_empty() { "healthy" } else { "broken" }, + "headline": if issues.is_empty() { "Healthy" } else { "Broken" }, + "recommendedAction": if issues.is_empty() { "No action needed" } else { "Repair issues" }, + "fixableIssueCount": issues.len(), + "selectedFixIssueIds": issues.iter().filter_map(|issue| issue.get("id").and_then(Value::as_str)).collect::>(), + "rootCauseHypotheses": [], + "fixSteps": [], + "confidence": 0.8, + "citations": [], + "versionAwareness": null + }, + "sections": [], + "checks": [], + "issues": issues + })) + .expect("sample diagnosis") + } + + #[test] + fn diagnosis_issue_summaries_capture_code_severity_and_message() { + let diagnosis = sample_diagnosis(vec![ + json!({ + "id": "gateway.unhealthy", + "code": "gateway.unhealthy", + "severity": "high", + "message": "Gateway is unhealthy", + "autoFixable": true, + "fixHint": "Restart gateway", + "source": "gateway" + }), + json!({ + "id": "providers.base_url", + "code": "invalid.base_url", + "severity": "medium", + "message": "Provider base URL is invalid", + "autoFixable": true, + "fixHint": "Reset baseUrl", + "source": "config" + }), + ]); + + let summary = diagnosis_issue_summaries(&diagnosis); + assert_eq!(summary.len(), 2); + assert_eq!(summary[0]["code"], "gateway.unhealthy"); + assert_eq!(summary[0]["severity"], "high"); + assert_eq!(summary[0]["title"], "Gateway is unhealthy"); + assert_eq!(summary[0]["target"], "gateway"); + assert_eq!(summary[1]["code"], "invalid.base_url"); + } + + #[test] + fn repeated_rediagnose_only_rounds_are_detected_as_stalled() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "providers.base_url", + "code": "invalid.base_url", + "severity": "medium", + "message": "Provider base URL is invalid", + "autoFixable": true, + "fixHint": "Reset baseUrl", + "source": "config" + })]); + let step_types = vec!["doctorRediagnose".to_string()]; + + assert!(!repair_plan_stalled( + &[ + RepairRoundObservation::new(1, &step_types, &diagnosis), + RepairRoundObservation::new(2, &step_types, &diagnosis), + ], + 3, + )); + assert!(repair_plan_stalled( + &[ + RepairRoundObservation::new(1, &step_types, &diagnosis), + RepairRoundObservation::new(2, &step_types, &diagnosis), + RepairRoundObservation::new(3, &step_types, &diagnosis), + ], + 3, + )); + } + + #[test] + fn round_limit_error_message_includes_latest_issues_and_step_types() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "providers.base_url", + "code": "invalid.base_url", + "severity": "medium", + "message": "Provider base URL is invalid", + "autoFixable": true, + "fixHint": "Reset baseUrl", + "source": "config" + })]); + let error = round_limit_error_message(&diagnosis, &["doctorRediagnose".to_string()]); + assert!(error.contains("invalid.base_url")); + assert!(error.contains("doctorRediagnose")); + assert!(error.contains("Provider base URL is invalid")); + } + + #[test] + fn unreadable_config_context_uses_raw_excerpt_and_parse_error() { + let context = build_config_excerpt_context("{\n ddd\n}"); + assert!(context.config_excerpt.is_null()); + assert!(context + .config_excerpt_raw + .as_deref() + .unwrap_or_default() + .contains("ddd")); + assert!(context + .config_parse_error + .as_deref() + .unwrap_or_default() + .contains("key must be a string")); + } + + #[test] + fn unreadable_config_context_summary_marks_excerpt_missing() { + let context = build_config_excerpt_context("{\n ddd\n}"); + let summary = config_excerpt_log_summary(&context); + assert_eq!(summary["configExcerptPresent"], json!(false)); + assert_eq!(summary["configExcerptRawPresent"], json!(true)); + assert!(summary["configParseError"] + .as_str() + .unwrap_or_default() + .contains("key must be a string")); + } + + #[test] + fn config_read_response_returns_raw_context_for_unreadable_json() { + let value = config_read_response("{\n ddd\n}", None).expect("config read response"); + assert!(value["value"].is_null()); + assert!(value["raw"].as_str().unwrap_or_default().contains("ddd")); + assert!(value["parseError"] + .as_str() + .unwrap_or_default() + .contains("key must be a string")); + } + + #[test] + fn decode_base64_config_payload_reads_utf8_text() { + use base64::Engine as _; + let encoded = base64::engine::general_purpose::STANDARD.encode("{\"ok\":true}"); + let decoded = decode_base64_config_payload(&encoded).expect("decode payload"); + assert_eq!(decoded, "{\"ok\":true}"); + } + + #[test] + fn diagnosis_missing_rescue_profile_is_detected() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "rescue.profile.missing", + "code": "rescue.profile.missing", + "severity": "error", + "message": "Rescue profile \"rescue\" is not configured", + "autoFixable": false, + "fixHint": "Activate Rescue Bot first", + "source": "rescue" + })]); + assert!(diagnosis_missing_rescue_profile(&diagnosis)); + } + + #[test] + fn diagnosis_unhealthy_rescue_gateway_is_detected() { + let diagnosis = sample_diagnosis(vec![json!({ + "id": "rescue.gateway.unhealthy", + "code": "rescue.gateway.unhealthy", + "severity": "warn", + "message": "Rescue gateway is not healthy", + "autoFixable": false, + "fixHint": "Inspect rescue gateway logs before using failover", + "source": "rescue" + })]); + assert!(diagnosis_unhealthy_rescue_gateway(&diagnosis)); + } + + #[test] + fn rescue_setup_command_result_reports_activation() { + let result = rescue_setup_command_result("activate", "rescue", true, true, "active"); + assert_eq!(result.argv, vec!["manage_rescue_bot", "activate", "rescue"]); + assert_eq!(result.exit_code, Some(0)); + assert!(result.stdout.contains("configured=true")); + assert!(result.stdout.contains("active=true")); + } + + #[test] + fn rescue_setup_activation_error_mentions_runtime_state() { + let error = rescue_activation_error_message( + "rescue", + false, + "configured_inactive", + &[ + "manage_rescue_bot status rescue".to_string(), + "openclaw --profile rescue gateway status".to_string(), + ], + ); + assert!(error.contains("rescue")); + assert!(error.contains("configured_inactive")); + assert!(error.contains("did not become active")); + assert!(error.contains("manage_rescue_bot status rescue")); + assert!(error.contains("openclaw --profile rescue gateway status")); + } + + #[test] + fn rescue_activation_diagnostic_commands_include_status_and_gateway_checks() { + let commands = rescue_activation_diagnostic_commands("rescue"); + let rendered = commands + .iter() + .map(|command| command.join(" ")) + .collect::>(); + assert!(rendered.contains(&"manage_rescue_bot status rescue".to_string())); + assert!(rendered.contains(&"openclaw --profile rescue gateway status".to_string())); + assert!(rendered + .contains(&"openclaw --profile rescue config get gateway.port --json".to_string())); + } + + const E2E_CONTAINER_NAME: &str = "clawpal-e2e-remote-doctor"; + const E2E_SSH_PORT: u16 = 2399; + const E2E_ROOT_PASSWORD: &str = "clawpal-remote-doctor-pass"; + const E2E_DOCKERFILE: &str = r#" +FROM ubuntu:22.04 +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update && apt-get install -y openssh-server && rm -rf /var/lib/apt/lists/* && mkdir /var/run/sshd +RUN echo "root:ROOTPASS" | chpasswd && \ + sed -i 's/#PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config && \ + sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config && \ + echo "PasswordAuthentication yes" >> /etc/ssh/sshd_config +RUN mkdir -p /root/.openclaw +RUN cat > /root/.openclaw/openclaw.json <<'EOF' +{ + "gateway": { "port": 18789, "auth": { "token": "gw-test-token" } }, + "auth": { + "profiles": { + "openai-default": { + "provider": "openai", + "apiKey": "sk-test" + } + } + }, + "models": { + "providers": { + "openai": { + "baseUrl": "http://127.0.0.1:9/v1", + "models": [{ "id": "gpt-4o-mini", "name": "gpt-4o-mini" }] + } + } + }, + "agents": { + "defaults": { "model": "openai/gpt-4o-mini" }, + "list": [ { "id": "main", "model": "anthropic/claude-sonnet-4-20250514" } ] + }, + "channels": { + "discord": { + "guilds": { + "guild-1": { + "channels": { + "general": { "model": "openai/gpt-4o-mini" } + } + } + } + } + } +} +EOF +RUN cat > /usr/local/bin/openclaw <<'EOF' && chmod +x /usr/local/bin/openclaw +#!/bin/sh +STATE_DIR="${OPENCLAW_STATE_DIR:-${OPENCLAW_HOME:-$HOME/.openclaw}}" +CONFIG_PATH="$STATE_DIR/openclaw.json" +PROFILE="primary" +if [ "$1" = "--profile" ]; then + PROFILE="$2" + shift 2 +fi +case "$1" in + --version) + echo "openclaw 2026.3.2-test" + ;; + doctor) + if grep -q '127.0.0.1:9/v1' "$CONFIG_PATH"; then + echo '{"ok":false,"score":40,"issues":[{"id":"primary.models.base_url","code":"invalid.base_url","severity":"error","message":"provider baseUrl points to test blackhole","autoFixable":true,"fixHint":"Remove the bad baseUrl override"}]}' + else + echo '{"ok":true,"score":100,"issues":[],"checks":[{"id":"test","status":"ok"}]}' + fi + ;; + agents) + if [ "$2" = "list" ] && [ "$3" = "--json" ]; then + echo '[{"id":"main"}]' + else + echo "unsupported openclaw agents command" >&2 + exit 1 + fi + ;; + models) + if [ "$2" = "list" ] && [ "$3" = "--all" ] && [ "$4" = "--json" ] && [ "$5" = "--no-color" ]; then + echo '{"models":[{"key":"openai/gpt-4o-mini","provider":"openai","id":"gpt-4o-mini","name":"gpt-4o-mini","baseUrl":"https://api.openai.com/v1"}],"providers":{"openai":{"baseUrl":"https://api.openai.com/v1"}}}' + else + echo "unsupported openclaw models command" >&2 + exit 1 + fi + ;; + config) + if [ "$2" = "get" ] && [ "$3" = "gateway.port" ] && [ "$4" = "--json" ]; then + if [ "$PROFILE" = "rescue" ]; then + echo '19789' + else + echo '18789' + fi + else + echo "unsupported openclaw config command: $*" >&2 + exit 1 + fi + ;; + gateway) + case "$2" in + status) + if [ "$PROFILE" = "rescue" ] && [ "${OPENCLAW_RESCUE_GATEWAY_ACTIVE:-1}" != "1" ]; then + echo '{"running":false,"healthy":false,"gateway":{"running":false},"health":{"ok":false}}' + else + echo '{"running":true,"healthy":true,"gateway":{"running":true},"health":{"ok":true}}' + fi + ;; + restart|start|stop) + echo '{"ok":true}' + ;; + *) + echo "unsupported openclaw gateway command: $*" >&2 + exit 1 + ;; + esac + ;; + *) + echo "unsupported openclaw command: $*" >&2 + exit 1 + ;; +esac +EOF +EXPOSE 22 +CMD ["/usr/sbin/sshd", "-D"] +"#; + + fn should_run_docker_e2e() -> bool { + std::env::var("CLAWPAL_RUN_REMOTE_DOCTOR_E2E") + .ok() + .as_deref() + == Some("1") + } + + fn live_gateway_url() -> Option { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) + } + + fn live_gateway_token() -> Option { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) + } + + fn live_gateway_instance_id() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_INSTANCE_ID") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) + .unwrap_or_else(|| "local".to_string()) + } + + fn live_gateway_target_location() -> TargetLocation { + match std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TARGET_LOCATION") + .ok() + .as_deref() + { + Some("remote_openclaw") => TargetLocation::RemoteOpenclaw, + _ => TargetLocation::LocalOpenclaw, + } + } + + fn live_gateway_protocol() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_PROTOCOL") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) + .unwrap_or_else(|| "clawpal_server".to_string()) + } + + fn docker_available() -> bool { + Command::new("docker") + .args(["info"]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .map(|status| status.success()) + .unwrap_or(false) + } + + fn cleanup_e2e_container() { + let _ = Command::new("docker") + .args(["rm", "-f", E2E_CONTAINER_NAME]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status(); + let _ = Command::new("docker") + .args(["rmi", "-f", &format!("{E2E_CONTAINER_NAME}:latest")]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status(); + } + + fn build_e2e_image() -> Result<(), String> { + let dockerfile = E2E_DOCKERFILE.replace("ROOTPASS", E2E_ROOT_PASSWORD); + let output = Command::new("docker") + .args([ + "build", + "-t", + &format!("{E2E_CONTAINER_NAME}:latest"), + "-f", + "-", + ".", + ]) + .stdin(std::process::Stdio::piped()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .current_dir(std::env::temp_dir()) + .spawn() + .and_then(|mut child| { + if let Some(ref mut stdin) = child.stdin { + stdin.write_all(dockerfile.as_bytes())?; + } + child.wait_with_output() + }) + .map_err(|error| format!("docker build failed: {error}"))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } + Ok(()) + } + + fn start_e2e_container() -> Result<(), String> { + start_e2e_container_with_env(&[]) + } + + fn start_e2e_container_with_env(env: &[(&str, &str)]) -> Result<(), String> { + let mut args = vec![ + "run".to_string(), + "-d".to_string(), + "--name".to_string(), + E2E_CONTAINER_NAME.to_string(), + ]; + for (key, value) in env { + args.push("-e".to_string()); + args.push(format!("{key}={value}")); + } + args.extend([ + "-p".to_string(), + format!("{E2E_SSH_PORT}:22"), + format!("{E2E_CONTAINER_NAME}:latest"), + ]); + let output = Command::new("docker") + .args(&args) + .output() + .map_err(|error| format!("docker run failed: {error}"))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } + Ok(()) + } + + fn wait_for_ssh(timeout_secs: u64) -> Result<(), String> { + let start = Instant::now(); + while start.elapsed().as_secs() < timeout_secs { + if TcpStream::connect(format!("127.0.0.1:{E2E_SSH_PORT}")).is_ok() { + std::thread::sleep(std::time::Duration::from_millis(500)); + return Ok(()); + } + std::thread::sleep(std::time::Duration::from_millis(300)); + } + Err("timeout waiting for ssh".into()) + } + + fn e2e_host_config() -> SshHostConfig { + SshHostConfig { + id: "e2e-remote-doctor".into(), + label: "E2E Remote Doctor".into(), + host: "127.0.0.1".into(), + port: E2E_SSH_PORT, + username: "root".into(), + auth_method: "password".into(), + key_path: None, + password: Some(E2E_ROOT_PASSWORD.into()), + passphrase: None, + } + } + + #[tokio::test] + async fn remote_doctor_docker_e2e_loop_completes() { + if !should_run_docker_e2e() { + eprintln!("skip: set CLAWPAL_RUN_REMOTE_DOCTOR_E2E=1 to enable"); + return; + } + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let temp_root = + std::env::temp_dir().join(format!("clawpal-remote-doctor-e2e-{}", Uuid::new_v4())); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + let pool = SshConnectionPool::new(); + let cfg = e2e_host_config(); + pool.connect(&cfg).await.expect("ssh connect"); + + let session_id = Uuid::new_v4().to_string(); + let marker = "/tmp/clawpal-remote-doctor-fixed"; + let result = run_remote_doctor_repair_loop( + Option::<&AppHandle>::None, + &pool, + &session_id, + &format!("ssh:{}", cfg.id), + TargetLocation::RemoteOpenclaw, + |kind, round, previous_results| async move { + match (kind, round) { + (PlanKind::Detect, 1) => Ok(PlanResponse { + plan_id: "detect-1".into(), + plan_kind: PlanKind::Detect, + summary: "Initial detect".into(), + commands: vec![PlanCommand { + argv: vec!["openclaw".into(), "--version".into()], + timeout_sec: Some(10), + purpose: Some("collect version".into()), + continue_on_failure: Some(false), + }], + healthy: false, + done: false, + success: false, + }), + (PlanKind::Repair, 1) => { + assert_eq!(previous_results.len(), 1); + Ok(PlanResponse { + plan_id: "repair-1".into(), + plan_kind: PlanKind::Repair, + summary: "Write marker".into(), + commands: vec![PlanCommand { + argv: vec![ + "sh".into(), + "-lc".into(), + format!("printf 'fixed' > {marker}"), + ], + timeout_sec: Some(10), + purpose: Some("mark repaired".into()), + continue_on_failure: Some(false), + }], + healthy: false, + done: false, + success: false, + }) + } + (PlanKind::Detect, 2) => { + assert_eq!(previous_results.len(), 1); + assert_eq!( + previous_results[0].stdout.trim(), + "", + "repair command should not print to stdout" + ); + Ok(PlanResponse { + plan_id: "detect-2".into(), + plan_kind: PlanKind::Detect, + summary: "Marker exists".into(), + commands: Vec::new(), + healthy: true, + done: true, + success: true, + }) + } + _ => Err(format!( + "unexpected planner request: {:?} round {}", + kind, round + )), + } + }, + ) + .await + .expect("remote doctor loop should complete"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + assert_eq!(result.round, 2); + + let marker_result = pool + .exec(&cfg.id, &format!("test -f {marker}")) + .await + .expect("marker check"); + assert_eq!(marker_result.exit_code, 0); + + let log_path = clawpal_dir + .join("doctor") + .join("remote") + .join(format!("{session_id}.jsonl")); + let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor log"); + assert!(log_text.contains("\"planKind\":\"detect\"")); + assert!(log_text.contains("\"planKind\":\"repair\"")); + let _ = std::fs::remove_dir_all(temp_root); + set_active_clawpal_data_override(None).expect("clear clawpal data"); + } + + #[tokio::test] + async fn remote_doctor_docker_e2e_rescue_activation_fails_when_gateway_stays_inactive() { + if !should_run_docker_e2e() { + eprintln!("skip: set CLAWPAL_RUN_REMOTE_DOCTOR_E2E=1 to enable"); + return; + } + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container_with_env(&[("OPENCLAW_RESCUE_GATEWAY_ACTIVE", "0")]) + .expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let pool = app_handle.state::(); + let cfg = e2e_host_config(); + pool.connect(&cfg).await.expect("ssh connect"); + + let error = ensure_rescue_profile_ready( + &app_handle, + TargetLocation::RemoteOpenclaw, + &format!("ssh:{}", cfg.id), + ) + .await + .expect_err("rescue activation should fail when gateway remains inactive"); + + assert!(error.message.contains("did not become active")); + assert!(error.message.contains("configured_inactive")); + assert!(error + .diagnostics + .iter() + .any(|result| result.argv.join(" ") == "manage_rescue_bot status rescue")); + } + + #[tokio::test] + async fn remote_doctor_live_gateway_uses_configured_url_and_token() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = + std::env::temp_dir().join(format!("clawpal-remote-doctor-live-{}", Uuid::new_v4())); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let gateway = remote_doctor_gateway_config().expect("gateway config"); + assert_eq!(gateway.url, url); + assert_eq!(gateway.auth_token_override.as_deref(), Some(token.as_str())); + + let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref()) + .expect("gateway credentials"); + assert!(creds.is_some(), "expected token override credentials"); + + let client = NodeClient::new(); + client + .connect(&gateway.url, app.handle().clone(), creds) + .await + .expect("connect live remote doctor gateway"); + assert!(client.is_connected().await); + match live_gateway_protocol().as_str() { + "clawpal_server" => { + let response = client + .send_request( + "remote_repair_plan.request", + json!({ + "requestId": format!("live-e2e-{}", Uuid::new_v4()), + "targetId": live_gateway_instance_id(), + "context": { + "configExcerpt": { + "models": { + "providers": { + "openai-codex": { + "baseUrl": "http://127.0.0.1:9/v1" + } + } + } + } + } + }), + ) + .await + .expect("request clawpal-server remote repair plan"); + let plan_id = response + .get("planId") + .and_then(|value| value.as_str()) + .unwrap_or_default(); + assert!( + !plan_id.trim().is_empty(), + "clawpal-server response should include a plan id" + ); + let steps = response + .get("steps") + .and_then(|value| value.as_array()) + .cloned() + .unwrap_or_default(); + assert!( + !steps.is_empty(), + "clawpal-server response should include repair steps" + ); + } + _ => { + let detect_plan = request_plan( + &client, + &detect_method_name(), + PlanKind::Detect, + &format!("live-e2e-{}", Uuid::new_v4()), + 1, + live_gateway_target_location(), + &live_gateway_instance_id(), + &[], + ) + .await + .expect("request live detection plan"); + assert!( + !detect_plan.plan_id.trim().is_empty(), + "live detection plan should include a plan id" + ); + } + } + client.disconnect().await.expect("disconnect"); + + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); + } + + #[tokio::test] + async fn remote_doctor_live_gateway_full_repair_loop_completes() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-live-loop-{}", + Uuid::new_v4() + )); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let cfg = e2e_host_config(); + let pool = app_handle.state::(); + pool.connect(&cfg).await.expect("ssh connect"); + + let gateway = remote_doctor_gateway_config().expect("gateway config"); + let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref()) + .expect("gateway credentials"); + let client = NodeClient::new(); + client + .connect(&gateway.url, app_handle.clone(), creds) + .await + .expect("connect live remote doctor gateway"); + + let session_id = Uuid::new_v4().to_string(); + let result = run_clawpal_server_repair_loop( + &app_handle, + &client, + &session_id, + &format!("ssh:{}", cfg.id), + TargetLocation::RemoteOpenclaw, + ) + .await + .expect("full live remote doctor repair loop should complete"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + + client.disconnect().await.expect("disconnect"); + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); + } + + #[tokio::test] + async fn remote_doctor_live_start_command_remote_target_completes_without_bridge_pairing() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-live-start-{}", + Uuid::new_v4() + )); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); + let pool = app_handle.state::(); + + let result = start_remote_doctor_repair_impl( + app_handle.clone(), + &pool, + format!("ssh:{}", cfg.id), + "remote_openclaw".to_string(), + ) + .await + .expect("start command should complete remote repair"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + + let log_path = clawpal_dir + .join("doctor") + .join("remote") + .join(format!("{}.jsonl", result.session_id)); + let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor session log"); + assert!( + !log_text.contains("\"event\":\"bridge_connect_failed\""), + "clawpal_server path should not attempt bridge pairing: {log_text}" + ); + + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); + } + + #[tokio::test] + async fn remote_doctor_live_gateway_repairs_unreadable_remote_config() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-live-raw-config-{}", + Uuid::new_v4() + )); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); + let pool = app_handle.state::(); + pool.connect(&cfg).await.expect("ssh connect"); + pool.exec_login( + &cfg.id, + "cat > ~/.openclaw/openclaw.json <<'EOF'\n{\n ddd\n}\nEOF", + ) + .await + .expect("corrupt remote config"); + + let result = start_remote_doctor_repair_impl( + app_handle.clone(), + &pool, + cfg.id.clone(), + "remote_openclaw".to_string(), + ) + .await + .expect("start command should repair unreadable config"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + + let repaired = pool + .exec_login(&cfg.id, "python3 - <<'PY'\nimport json, pathlib\njson.load(open(pathlib.Path.home()/'.openclaw'/'openclaw.json'))\nprint('ok')\nPY") + .await + .expect("read repaired config"); + assert_eq!( + repaired.exit_code, 0, + "repaired config should be valid JSON: {}", + repaired.stderr + ); + assert_eq!(repaired.stdout.trim(), "ok"); + + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); + } +} diff --git a/src-tauri/src/remote_doctor/mod.rs b/src-tauri/src/remote_doctor/mod.rs new file mode 100644 index 00000000..5239c1a5 --- /dev/null +++ b/src-tauri/src/remote_doctor/mod.rs @@ -0,0 +1,10 @@ +mod agent; +mod config; +mod legacy; +mod plan; +mod repair_loops; +mod session; +mod types; + +pub use legacy::start_remote_doctor_repair; +pub use types::RemoteDoctorRepairResult; diff --git a/src-tauri/src/remote_doctor/plan.rs b/src-tauri/src/remote_doctor/plan.rs new file mode 100644 index 00000000..fc8b9e0a --- /dev/null +++ b/src-tauri/src/remote_doctor/plan.rs @@ -0,0 +1 @@ +// Placeholder for plan parsing, validation, and execution helpers. diff --git a/src-tauri/src/remote_doctor/repair_loops.rs b/src-tauri/src/remote_doctor/repair_loops.rs new file mode 100644 index 00000000..c8007366 --- /dev/null +++ b/src-tauri/src/remote_doctor/repair_loops.rs @@ -0,0 +1 @@ +// Placeholder for repair loop orchestration helpers. diff --git a/src-tauri/src/remote_doctor/session.rs b/src-tauri/src/remote_doctor/session.rs new file mode 100644 index 00000000..3ca70bf7 --- /dev/null +++ b/src-tauri/src/remote_doctor/session.rs @@ -0,0 +1,171 @@ +use std::fs::{create_dir_all, OpenOptions}; +use std::io::Write; +use std::path::PathBuf; + +use serde_json::Value; +use tauri::{AppHandle, Emitter, Runtime}; + +use super::types::{PlanKind, RemoteDoctorProgressEvent, RemoteDoctorRepairResult}; +use crate::models::resolve_paths; + +pub(crate) fn session_log_dir() -> PathBuf { + resolve_paths().clawpal_dir.join("doctor").join("remote") +} + +pub(crate) fn append_session_log(session_id: &str, payload: Value) { + let dir = session_log_dir(); + if create_dir_all(&dir).is_err() { + return; + } + let path = dir.join(format!("{session_id}.jsonl")); + let Ok(mut file) = OpenOptions::new().create(true).append(true).open(path) else { + return; + }; + let _ = writeln!(file, "{}", payload); +} + +pub(crate) fn emit_session_progress( + app: Option<&AppHandle>, + session_id: &str, + round: usize, + phase: &str, + line: impl Into, + plan_kind: Option, + command: Option>, +) { + let payload = progress_event(session_id, round, phase, line, plan_kind, command); + if let Some(app) = app { + let _ = app.emit("doctor:remote-repair-progress", payload); + } +} + +pub(crate) fn result_for_completion( + session_id: &str, + round: usize, + last_plan_kind: PlanKind, + last_command: Option>, + message: &str, +) -> RemoteDoctorRepairResult { + RemoteDoctorRepairResult { + mode: "remoteDoctor".into(), + status: "completed".into(), + round, + phase: "completed".into(), + last_plan_kind: plan_kind_name(last_plan_kind).into(), + latest_diagnosis_healthy: true, + last_command, + session_id: session_id.to_string(), + message: message.into(), + } +} + +pub(crate) fn result_for_completion_with_warnings( + session_id: &str, + round: usize, + last_plan_kind: PlanKind, + last_command: Option>, + message: &str, +) -> RemoteDoctorRepairResult { + RemoteDoctorRepairResult { + mode: "remoteDoctor".into(), + status: "completed_with_warnings".into(), + round, + phase: "completed".into(), + last_plan_kind: plan_kind_name(last_plan_kind).into(), + latest_diagnosis_healthy: false, + last_command, + session_id: session_id.to_string(), + message: message.into(), + } +} + +fn progress_event( + session_id: &str, + round: usize, + phase: &str, + line: impl Into, + plan_kind: Option, + command: Option>, +) -> RemoteDoctorProgressEvent { + RemoteDoctorProgressEvent { + session_id: session_id.to_string(), + mode: "remoteDoctor".into(), + round, + phase: phase.to_string(), + line: line.into(), + plan_kind: plan_kind.map(|kind| plan_kind_name(kind).into()), + command, + } +} + +fn plan_kind_name(kind: PlanKind) -> &'static str { + match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + } +} + +#[cfg(test)] +mod tests { + use serde_json::json; + + use super::*; + use crate::cli_runner::set_active_clawpal_data_override; + + #[test] + fn append_session_log_writes_jsonl_line() { + let temp_root = std::env::temp_dir().join("clawpal-remote-doctor-session-log-test"); + let clawpal_dir = temp_root.join(".clawpal"); + std::fs::create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal override"); + + append_session_log("sess-1", json!({"event": "hello"})); + + set_active_clawpal_data_override(None).expect("clear clawpal override"); + + let log_path = clawpal_dir.join("doctor").join("remote").join("sess-1.jsonl"); + let log_text = std::fs::read_to_string(&log_path).expect("read session log"); + assert!(log_text.contains("\"event\":\"hello\"")); + + let _ = std::fs::remove_dir_all(&temp_root); + } + + #[test] + fn progress_event_uses_snake_case_plan_kind() { + let payload = progress_event( + "sess-1", + 2, + "planning_repair", + "Requesting repair plan", + Some(PlanKind::Repair), + None, + ); + assert_eq!(payload.plan_kind.as_deref(), Some("repair")); + } + + #[test] + fn completion_helpers_preserve_session_round_and_last_command() { + let last_command = Some(vec!["clawpal".to_string(), "doctor".to_string()]); + + let completed = + result_for_completion("sess-1", 4, PlanKind::Detect, last_command.clone(), "done"); + assert_eq!(completed.session_id, "sess-1"); + assert_eq!(completed.round, 4); + assert_eq!(completed.last_command, last_command); + assert!(completed.latest_diagnosis_healthy); + + let warning = result_for_completion_with_warnings( + "sess-2", + 5, + PlanKind::Repair, + None, + "warning", + ); + assert_eq!(warning.session_id, "sess-2"); + assert_eq!(warning.round, 5); + assert_eq!(warning.last_plan_kind, "repair"); + assert!(!warning.latest_diagnosis_healthy); + } +} diff --git a/src-tauri/src/remote_doctor/types.rs b/src-tauri/src/remote_doctor/types.rs new file mode 100644 index 00000000..402edde2 --- /dev/null +++ b/src-tauri/src/remote_doctor/types.rs @@ -0,0 +1,243 @@ +use serde::{Deserialize, Serialize}; +use serde_json::{json, Value}; + +use crate::commands::{RescuePrimaryDiagnosisResult, RescuePrimaryIssue}; + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub(crate) enum TargetLocation { + LocalOpenclaw, + RemoteOpenclaw, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub(crate) enum PlanKind { + Detect, + Investigate, + Repair, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct PlanCommand { + pub(crate) argv: Vec, + pub(crate) timeout_sec: Option, + pub(crate) purpose: Option, + pub(crate) continue_on_failure: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct PlanResponse { + pub(crate) plan_id: String, + pub(crate) plan_kind: PlanKind, + pub(crate) summary: String, + #[serde(default)] + pub(crate) commands: Vec, + #[serde(default)] + pub(crate) healthy: bool, + #[serde(default)] + pub(crate) done: bool, + #[serde(default)] + pub(crate) success: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct CommandResult { + pub(crate) argv: Vec, + pub(crate) exit_code: Option, + pub(crate) stdout: String, + pub(crate) stderr: String, + pub(crate) duration_ms: u64, + pub(crate) timed_out: bool, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum RemoteDoctorProtocol { + AgentPlanner, + LegacyDoctor, + ClawpalServer, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct ClawpalServerPlanResponse { + pub(crate) request_id: String, + pub(crate) plan_id: String, + pub(crate) summary: String, + #[serde(default)] + pub(crate) steps: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct ClawpalServerPlanStep { + #[serde(rename = "type")] + pub(crate) step_type: String, + pub(crate) path: Option, + pub(crate) value: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteDoctorRepairResult { + pub(crate) mode: String, + pub(crate) status: String, + pub(crate) round: usize, + pub(crate) phase: String, + pub(crate) last_plan_kind: String, + pub(crate) latest_diagnosis_healthy: bool, + pub(crate) last_command: Option>, + pub(crate) session_id: String, + pub(crate) message: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct RemoteDoctorProgressEvent { + pub(crate) session_id: String, + pub(crate) mode: String, + pub(crate) round: usize, + pub(crate) phase: String, + pub(crate) line: String, + pub(crate) plan_kind: Option, + pub(crate) command: Option>, +} + +#[derive(Debug, Clone)] +pub(crate) struct ConfigExcerptContext { + pub(crate) config_excerpt: Value, + pub(crate) config_excerpt_raw: Option, + pub(crate) config_parse_error: Option, +} + +#[derive(Debug, Clone)] +pub(crate) struct RepairRoundObservation { + pub(crate) round: usize, + pub(crate) step_types: Vec, + pub(crate) diagnosis_signature: String, + pub(crate) issue_summaries: Vec, +} + +impl RepairRoundObservation { + pub(crate) fn new( + round: usize, + step_types: &[String], + diagnosis: &RescuePrimaryDiagnosisResult, + ) -> Self { + let issue_summaries = diagnosis_issue_summaries(diagnosis); + let diagnosis_signature = + serde_json::to_string(&issue_summaries).unwrap_or_else(|_| "[]".to_string()); + Self { + round, + step_types: step_types.to_vec(), + diagnosis_signature, + issue_summaries, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct StoredRemoteDoctorIdentity { + pub(crate) version: u8, + pub(crate) created_at_ms: u64, + pub(crate) device_id: String, + pub(crate) private_key_pem: String, +} + +pub(crate) fn parse_target_location(raw: &str) -> Result { + match raw { + "local_openclaw" => Ok(TargetLocation::LocalOpenclaw), + "remote_openclaw" => Ok(TargetLocation::RemoteOpenclaw), + other => Err(format!("Unsupported target location: {other}")), + } +} + +pub(crate) fn diagnosis_issue_summaries(diagnosis: &RescuePrimaryDiagnosisResult) -> Vec { + diagnosis.issues.iter().map(summarize_issue).collect() +} + +fn summarize_issue(issue: &RescuePrimaryIssue) -> Value { + json!({ + "id": issue.id, + "code": issue.code, + "severity": issue.severity, + "title": issue.message, + "target": issue.source, + "autoFixable": issue.auto_fixable, + "fixHint": issue.fix_hint, + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::commands::RescuePrimarySummary; + + #[test] + fn parse_target_location_accepts_known_values() { + assert_eq!( + parse_target_location("local_openclaw").unwrap(), + TargetLocation::LocalOpenclaw + ); + assert_eq!( + parse_target_location("remote_openclaw").unwrap(), + TargetLocation::RemoteOpenclaw + ); + } + + #[test] + fn parse_target_location_rejects_unknown_values() { + let error = parse_target_location("elsewhere").unwrap_err(); + assert!(error.contains("Unsupported target location")); + } + + #[test] + fn repair_round_observation_uses_stable_diagnosis_signature() { + let step_types = vec!["repair_config".to_string()]; + let diagnosis = sample_diagnosis(vec![RescuePrimaryIssue { + id: "issue-1".to_string(), + code: "primary.config.unreadable".to_string(), + severity: "error".to_string(), + message: "Unreadable config".to_string(), + auto_fixable: false, + fix_hint: Some("Repair the config".to_string()), + source: "primary".to_string(), + }]); + + let first = RepairRoundObservation::new(1, &step_types, &diagnosis); + let second = RepairRoundObservation::new(2, &step_types, &diagnosis); + + assert_eq!(first.diagnosis_signature, second.diagnosis_signature); + assert_eq!(first.issue_summaries, second.issue_summaries); + } + + fn sample_diagnosis(issues: Vec) -> RescuePrimaryDiagnosisResult { + RescuePrimaryDiagnosisResult { + status: "degraded".to_string(), + checked_at: "2026-03-19T00:00:00Z".to_string(), + target_profile: "primary".to_string(), + rescue_profile: "rescue".to_string(), + rescue_configured: true, + rescue_port: Some(18789), + summary: RescuePrimarySummary { + status: "degraded".to_string(), + headline: "Issues found".to_string(), + recommended_action: "Repair".to_string(), + fixable_issue_count: 0, + selected_fix_issue_ids: Vec::new(), + root_cause_hypotheses: Vec::new(), + fix_steps: Vec::new(), + confidence: None, + citations: Vec::new(), + version_awareness: None, + }, + sections: Vec::new(), + checks: Vec::new(), + issues, + } + } +} From b8f40e91ced211d0dc1d0700e3ede8d8f7919fd3 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 15:04:13 +0800 Subject: [PATCH 08/20] refactor: extract remote doctor shared modules --- src-tauri/src/remote_doctor.rs | 4525 ------------------------- src-tauri/src/remote_doctor/config.rs | 324 +- src-tauri/src/remote_doctor/legacy.rs | 233 +- 3 files changed, 334 insertions(+), 4748 deletions(-) delete mode 100644 src-tauri/src/remote_doctor.rs diff --git a/src-tauri/src/remote_doctor.rs b/src-tauri/src/remote_doctor.rs deleted file mode 100644 index e73f8d8f..00000000 --- a/src-tauri/src/remote_doctor.rs +++ /dev/null @@ -1,4525 +0,0 @@ -use std::fs::{create_dir_all, OpenOptions}; -use std::io::Write; -use std::path::PathBuf; -use std::process::Command; -use std::time::Instant; - -use base64::Engine; -use ed25519_dalek::pkcs8::EncodePrivateKey; -use ed25519_dalek::SigningKey; -use serde::{Deserialize, Serialize}; -use serde_json::{json, Value}; -use sha2::{Digest, Sha256}; -use tauri::{AppHandle, Emitter, Manager, Runtime, State}; -use uuid::Uuid; - -use crate::bridge_client::BridgeClient; -use crate::cli_runner::{get_active_openclaw_home_override, run_openclaw, run_openclaw_remote}; -use crate::commands::logs::log_dev; -use crate::commands::preferences::load_app_preferences_from_paths; -use crate::commands::{agent::create_agent, agent::setup_agent_identity}; -use crate::commands::{ - diagnose_primary_via_rescue, manage_rescue_bot, read_raw_config, - remote_diagnose_primary_via_rescue, remote_manage_rescue_bot, remote_read_raw_config, - remote_restart_gateway, remote_write_raw_config, restart_gateway, RescuePrimaryDiagnosisResult, -}; -use crate::config_io::read_openclaw_config; -use crate::models::resolve_paths; -use crate::node_client::{GatewayCredentials, NodeClient}; -use crate::ssh::SshConnectionPool; - -const DEFAULT_GATEWAY_HOST: &str = "127.0.0.1"; -const DEFAULT_GATEWAY_PORT: u16 = 18789; -const DEFAULT_DETECT_METHOD: &str = "doctor.get_detection_plan"; -const DEFAULT_REPAIR_METHOD: &str = "doctor.get_repair_plan"; -const MAX_REMOTE_DOCTOR_ROUNDS: usize = 50; -const REPAIR_PLAN_STALL_THRESHOLD: usize = 3; -const REMOTE_DOCTOR_AGENT_ID: &str = "clawpal-remote-doctor"; - -#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] -#[serde(rename_all = "snake_case")] -enum TargetLocation { - LocalOpenclaw, - RemoteOpenclaw, -} - -#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] -#[serde(rename_all = "snake_case")] -enum PlanKind { - Detect, - Investigate, - Repair, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -struct PlanCommand { - argv: Vec, - timeout_sec: Option, - purpose: Option, - continue_on_failure: Option, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -struct PlanResponse { - plan_id: String, - plan_kind: PlanKind, - summary: String, - #[serde(default)] - commands: Vec, - #[serde(default)] - healthy: bool, - #[serde(default)] - done: bool, - #[serde(default)] - success: bool, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -struct CommandResult { - argv: Vec, - exit_code: Option, - stdout: String, - stderr: String, - duration_ms: u64, - timed_out: bool, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -enum RemoteDoctorProtocol { - AgentPlanner, - LegacyDoctor, - ClawpalServer, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -struct ClawpalServerPlanResponse { - request_id: String, - plan_id: String, - summary: String, - #[serde(default)] - steps: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -struct ClawpalServerPlanStep { - #[serde(rename = "type")] - step_type: String, - path: Option, - value: Option, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct RemoteDoctorRepairResult { - mode: String, - status: String, - round: usize, - phase: String, - last_plan_kind: String, - latest_diagnosis_healthy: bool, - last_command: Option>, - session_id: String, - message: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -struct RemoteDoctorProgressEvent { - session_id: String, - mode: String, - round: usize, - phase: String, - line: String, - plan_kind: Option, - command: Option>, -} - -#[derive(Debug, Clone)] -struct RemoteDoctorGatewayConfig { - url: String, - auth_token_override: Option, -} - -#[derive(Debug, Clone)] -struct ConfigExcerptContext { - config_excerpt: Value, - config_excerpt_raw: Option, - config_parse_error: Option, -} - -#[derive(Debug, Clone)] -struct RepairRoundObservation { - round: usize, - step_types: Vec, - diagnosis_signature: String, - issue_summaries: Vec, -} - -impl RepairRoundObservation { - fn new(round: usize, step_types: &[String], diagnosis: &RescuePrimaryDiagnosisResult) -> Self { - let issue_summaries = diagnosis_issue_summaries(diagnosis); - let diagnosis_signature = - serde_json::to_string(&issue_summaries).unwrap_or_else(|_| "[]".to_string()); - Self { - round, - step_types: step_types.to_vec(), - diagnosis_signature, - issue_summaries, - } - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -struct StoredRemoteDoctorIdentity { - version: u8, - created_at_ms: u64, - device_id: String, - private_key_pem: String, -} - -fn parse_target_location(raw: &str) -> Result { - match raw { - "local_openclaw" => Ok(TargetLocation::LocalOpenclaw), - "remote_openclaw" => Ok(TargetLocation::RemoteOpenclaw), - other => Err(format!("Unsupported target location: {other}")), - } -} - -fn remote_doctor_log_dir() -> PathBuf { - resolve_paths().clawpal_dir.join("doctor").join("remote") -} - -fn append_remote_doctor_log(session_id: &str, payload: Value) { - let dir = remote_doctor_log_dir(); - if create_dir_all(&dir).is_err() { - return; - } - let path = dir.join(format!("{session_id}.jsonl")); - let Ok(mut file) = OpenOptions::new().create(true).append(true).open(path) else { - return; - }; - let _ = writeln!(file, "{}", payload); -} - -fn emit_progress( - app: Option<&AppHandle>, - session_id: &str, - round: usize, - phase: &str, - line: impl Into, - plan_kind: Option, - command: Option>, -) { - let payload = RemoteDoctorProgressEvent { - session_id: session_id.to_string(), - mode: "remoteDoctor".into(), - round, - phase: phase.to_string(), - line: line.into(), - plan_kind: plan_kind.map(|kind| match kind { - PlanKind::Detect => "detect".into(), - PlanKind::Investigate => "investigate".into(), - PlanKind::Repair => "repair".into(), - }), - command, - }; - if let Some(app) = app { - let _ = app.emit("doctor:remote-repair-progress", payload); - } -} - -fn remote_doctor_gateway_config() -> Result { - let paths = resolve_paths(); - let app_preferences = load_app_preferences_from_paths(&paths); - if let Some(url) = app_preferences.remote_doctor_gateway_url { - return Ok(RemoteDoctorGatewayConfig { - url, - auth_token_override: app_preferences.remote_doctor_gateway_auth_token, - }); - } - let configured_port = std::fs::read_to_string(&paths.config_path) - .ok() - .and_then(|text| serde_json::from_str::(&text).ok()) - .and_then(|config| { - config - .get("gateway") - .and_then(|gateway| gateway.get("port")) - .and_then(|value| value.as_u64()) - }) - .map(|value| value as u16) - .unwrap_or(DEFAULT_GATEWAY_PORT); - Ok(RemoteDoctorGatewayConfig { - url: format!("ws://{DEFAULT_GATEWAY_HOST}:{configured_port}"), - auth_token_override: app_preferences.remote_doctor_gateway_auth_token, - }) -} - -fn remote_doctor_gateway_credentials( - auth_token_override: Option<&str>, -) -> Result, String> { - let Some(token) = auth_token_override.filter(|value| !value.trim().is_empty()) else { - return Ok(None); - }; - let identity = load_or_create_remote_doctor_identity()?; - Ok(Some(GatewayCredentials { - token: token.to_string(), - device_id: identity.device_id, - private_key_pem: identity.private_key_pem, - })) -} - -fn remote_doctor_identity_path() -> PathBuf { - resolve_paths() - .clawpal_dir - .join("remote-doctor") - .join("device-identity.json") -} - -fn load_or_create_remote_doctor_identity() -> Result { - let path = remote_doctor_identity_path(); - if let Ok(text) = std::fs::read_to_string(&path) { - if let Ok(identity) = serde_json::from_str::(&text) { - if identity.version == 1 - && !identity.device_id.trim().is_empty() - && !identity.private_key_pem.trim().is_empty() - { - return Ok(identity); - } - } - } - - let parent = path - .parent() - .ok_or("Failed to resolve remote doctor identity directory")?; - create_dir_all(parent) - .map_err(|e| format!("Failed to create remote doctor identity dir: {e}"))?; - - let mut secret = [0u8; 32]; - getrandom::getrandom(&mut secret) - .map_err(|e| format!("Failed to generate remote doctor device secret: {e}"))?; - let signing_key = SigningKey::from_bytes(&secret); - let raw_public = signing_key.verifying_key().to_bytes(); - let device_id = Sha256::digest(raw_public) - .iter() - .map(|b| format!("{b:02x}")) - .collect::(); - let private_key_pem = signing_key - .to_pkcs8_pem(Default::default()) - .map_err(|e| format!("Failed to encode remote doctor private key: {e}"))? - .to_string(); - let created_at_ms = std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .map_err(|e| format!("Failed to get system time: {e}"))? - .as_millis() as u64; - let identity = StoredRemoteDoctorIdentity { - version: 1, - created_at_ms, - device_id, - private_key_pem, - }; - let text = serde_json::to_string_pretty(&identity) - .map_err(|e| format!("Failed to serialize remote doctor identity: {e}"))?; - std::fs::write(&path, format!("{text}\n")) - .map_err(|e| format!("Failed to persist remote doctor identity: {e}"))?; - Ok(identity) -} - -fn detect_method_name() -> String { - std::env::var("CLAWPAL_REMOTE_DOCTOR_DETECT_METHOD") - .unwrap_or_else(|_| DEFAULT_DETECT_METHOD.to_string()) -} - -fn repair_method_name() -> String { - std::env::var("CLAWPAL_REMOTE_DOCTOR_REPAIR_METHOD") - .unwrap_or_else(|_| DEFAULT_REPAIR_METHOD.to_string()) -} - -fn configured_remote_doctor_protocol() -> Option { - match std::env::var("CLAWPAL_REMOTE_DOCTOR_PROTOCOL") - .ok() - .as_deref() - .map(str::trim) - { - Some("agent") => Some(RemoteDoctorProtocol::AgentPlanner), - Some("legacy") | Some("legacy_doctor") => Some(RemoteDoctorProtocol::LegacyDoctor), - Some("clawpal_server") => Some(RemoteDoctorProtocol::ClawpalServer), - _ => None, - } -} - -fn default_remote_doctor_protocol() -> RemoteDoctorProtocol { - RemoteDoctorProtocol::AgentPlanner -} - -fn protocol_requires_bridge(protocol: RemoteDoctorProtocol) -> bool { - matches!(protocol, RemoteDoctorProtocol::AgentPlanner) -} - -fn protocol_runs_rescue_preflight(protocol: RemoteDoctorProtocol) -> bool { - matches!(protocol, RemoteDoctorProtocol::LegacyDoctor) -} - -fn next_agent_plan_kind(diagnosis: &RescuePrimaryDiagnosisResult) -> PlanKind { - next_agent_plan_kind_for_round(diagnosis, &[]) -} - -fn next_agent_plan_kind_for_round( - diagnosis: &RescuePrimaryDiagnosisResult, - previous_results: &[CommandResult], -) -> PlanKind { - if diagnosis - .issues - .iter() - .any(|issue| issue.code == "primary.config.unreadable") - { - if !previous_results.is_empty() { - return PlanKind::Repair; - } - PlanKind::Investigate - } else { - PlanKind::Repair - } -} - -fn remote_doctor_agent_id() -> &'static str { - REMOTE_DOCTOR_AGENT_ID -} - -fn remote_doctor_agent_session_key(session_id: &str) -> String { - format!("agent:{}:{session_id}", remote_doctor_agent_id()) -} - -fn remote_doctor_agent_workspace_files() -> [(&'static str, &'static str); 4] { - [ - ( - "AGENTS.md", - "# Remote Doctor\nUse this workspace only for ClawPal remote doctor planning sessions.\nReturn structured, operational answers.\n", - ), - ( - "BOOTSTRAP.md", - "Bootstrap is already complete for this workspace.\nDo not ask who you are or who the user is.\nUse IDENTITY.md and USER.md as the canonical identity context.\n", - ), - ( - "USER.md", - "- Name: ClawPal Desktop\n- Role: desktop repair orchestrator\n- Preferences: concise, operational, no bootstrap chatter\n", - ), - ( - "HEARTBEAT.md", - "Status: active remote-doctor planning workspace.\n", - ), - ] -} - -fn gateway_url_is_local(url: &str) -> bool { - let rest = url - .split_once("://") - .map(|(_, remainder)| remainder) - .unwrap_or(url); - let host_port = rest.split('/').next().unwrap_or(rest); - let host = host_port - .strip_prefix('[') - .and_then(|value| value.split_once(']').map(|(host, _)| host)) - .unwrap_or_else(|| host_port.split(':').next().unwrap_or(host_port)); - matches!(host, "127.0.0.1" | "localhost") -} - -fn ensure_local_remote_doctor_agent_ready() -> Result<(), String> { - let agent_id = remote_doctor_agent_id().to_string(); - if let Err(error) = create_agent(agent_id.clone(), None, Some(true)) { - if !error.contains("already exists") { - return Err(format!("Failed to create remote doctor agent: {error}")); - } - } - - setup_agent_identity(agent_id.clone(), "ClawPal Remote Doctor".to_string(), None)?; - - let paths = resolve_paths(); - let cfg = read_openclaw_config(&paths)?; - let workspace = - clawpal_core::doctor::resolve_agent_workspace_from_config(&cfg, &agent_id, None) - .map(|path| shellexpand::tilde(&path).to_string())?; - create_dir_all(&workspace) - .map_err(|error| format!("Failed to create remote doctor workspace: {error}"))?; - - for (file_name, content) in remote_doctor_agent_workspace_files() { - std::fs::write(PathBuf::from(&workspace).join(file_name), content) - .map_err(|error| format!("Failed to write remote doctor {file_name}: {error}"))?; - } - - Ok(()) -} - -async fn ensure_agent_bridge_connected( - app: &AppHandle, - bridge: &BridgeClient, - gateway_url: &str, - auth_token_override: Option<&str>, - session_id: &str, -) { - if bridge.is_connected().await { - return; - } - - let connect_result = bridge - .connect( - gateway_url, - app.clone(), - remote_doctor_gateway_credentials(auth_token_override) - .ok() - .flatten(), - ) - .await; - if let Err(error) = connect_result { - append_remote_doctor_log( - session_id, - json!({ - "event": "bridge_connect_failed", - "reason": error, - }), - ); - } -} - -async fn ensure_remote_target_connected( - pool: &SshConnectionPool, - instance_id: &str, -) -> Result<(), String> { - let candidate_ids = remote_target_host_id_candidates(instance_id); - if candidate_ids.is_empty() { - return Ok(()); - } - for candidate in &candidate_ids { - if pool.is_connected(candidate).await { - return Ok(()); - } - } - - let hosts = crate::commands::ssh::read_hosts_from_registry()?; - let host = hosts - .into_iter() - .find(|candidate| candidate_ids.iter().any(|id| id == &candidate.id)) - .ok_or_else(|| format!("No SSH host config with id: {}", candidate_ids[0]))?; - if let Some(passphrase) = host.passphrase.as_deref().filter(|value| !value.is_empty()) { - pool.connect_with_passphrase(&host, Some(passphrase)).await - } else { - pool.connect(&host).await - } -} - -fn remote_target_host_id_candidates(instance_id: &str) -> Vec { - let mut candidates = Vec::new(); - let trimmed = instance_id.trim(); - if !trimmed.is_empty() { - candidates.push(trimmed.to_string()); - } - if let Some(stripped) = trimmed.strip_prefix("ssh:").map(str::trim) { - if !stripped.is_empty() && !candidates.iter().any(|value| value == stripped) { - candidates.push(stripped.to_string()); - } - } - candidates -} - -fn primary_remote_target_host_id(instance_id: &str) -> Result { - remote_target_host_id_candidates(instance_id) - .into_iter() - .next() - .ok_or_else(|| "Remote Doctor repair requires an ssh instance id".to_string()) -} - -fn is_unknown_method_error(error: &str) -> bool { - error.contains("unknown method") - || error.contains("\"code\":\"INVALID_REQUEST\"") - || error.contains("\"code\": \"INVALID_REQUEST\"") -} - -fn result_for_completion( - session_id: &str, - round: usize, - last_plan_kind: PlanKind, - last_command: Option>, - message: &str, -) -> RemoteDoctorRepairResult { - RemoteDoctorRepairResult { - mode: "remoteDoctor".into(), - status: "completed".into(), - round, - phase: "completed".into(), - last_plan_kind: match last_plan_kind { - PlanKind::Detect => "detect".into(), - PlanKind::Investigate => "investigate".into(), - PlanKind::Repair => "repair".into(), - }, - latest_diagnosis_healthy: true, - last_command, - session_id: session_id.to_string(), - message: message.into(), - } -} - -fn result_for_completion_with_warnings( - session_id: &str, - round: usize, - last_plan_kind: PlanKind, - last_command: Option>, - message: &str, -) -> RemoteDoctorRepairResult { - RemoteDoctorRepairResult { - mode: "remoteDoctor".into(), - status: "completed_with_warnings".into(), - round, - phase: "completed".into(), - last_plan_kind: match last_plan_kind { - PlanKind::Detect => "detect".into(), - PlanKind::Investigate => "investigate".into(), - PlanKind::Repair => "repair".into(), - }, - latest_diagnosis_healthy: false, - last_command, - session_id: session_id.to_string(), - message: message.into(), - } -} - -fn diagnosis_has_only_non_auto_fixable_issues(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { - !diagnosis.issues.is_empty() && diagnosis.issues.iter().all(|issue| !issue.auto_fixable) -} - -async fn run_rescue_diagnosis( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, -) -> Result { - match target_location { - TargetLocation::LocalOpenclaw => diagnose_primary_via_rescue(None, None).await, - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_diagnose_primary_via_rescue( - app.state::(), - host_id, - None, - None, - ) - .await - } - } -} - -async fn read_target_config( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, -) -> Result { - let raw = match target_location { - TargetLocation::LocalOpenclaw => read_raw_config()?, - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_read_raw_config(app.state::(), host_id).await? - } - }; - serde_json::from_str::(&raw) - .map_err(|error| format!("Failed to parse target config: {error}")) -} - -async fn read_target_config_raw( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, -) -> Result { - match target_location { - TargetLocation::LocalOpenclaw => read_raw_config(), - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_read_raw_config(app.state::(), host_id).await - } - } -} - -fn build_config_excerpt_context(raw: &str) -> ConfigExcerptContext { - match serde_json::from_str::(raw) { - Ok(config_excerpt) => ConfigExcerptContext { - config_excerpt, - config_excerpt_raw: None, - config_parse_error: None, - }, - Err(error) => ConfigExcerptContext { - config_excerpt: Value::Null, - config_excerpt_raw: Some(raw.to_string()), - config_parse_error: Some(format!("Failed to parse target config: {error}")), - }, - } -} - -fn config_excerpt_log_summary(context: &ConfigExcerptContext) -> Value { - json!({ - "configExcerptPresent": !context.config_excerpt.is_null(), - "configExcerptBytes": serde_json::to_string(&context.config_excerpt).ok().map(|text| text.len()).unwrap_or(0), - "configExcerptRawPresent": context.config_excerpt_raw.as_ref().map(|text| !text.trim().is_empty()).unwrap_or(false), - "configExcerptRawBytes": context.config_excerpt_raw.as_ref().map(|text| text.len()).unwrap_or(0), - "configParseError": context.config_parse_error, - }) -} - -fn empty_config_excerpt_context() -> ConfigExcerptContext { - ConfigExcerptContext { - config_excerpt: Value::Null, - config_excerpt_raw: None, - config_parse_error: None, - } -} - -fn empty_diagnosis() -> RescuePrimaryDiagnosisResult { - serde_json::from_value(json!({ - "status": "healthy", - "checkedAt": "2026-03-18T00:00:00Z", - "targetProfile": "primary", - "rescueProfile": "rescue", - "summary": { - "status": "healthy", - "headline": "Healthy", - "recommendedAction": null, - "fixableIssueCount": 0, - "selectedFixIssueIds": [] - }, - "issues": [], - "sections": [] - })) - .expect("empty diagnosis should deserialize") -} - -async fn write_target_config( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, - config: &Value, -) -> Result<(), String> { - let text = serde_json::to_string_pretty(config).map_err(|error| error.to_string())?; - let validated = clawpal_core::config::validate_config_json(&text) - .map_err(|error| format!("Invalid config after remote doctor patch: {error}"))?; - let validated_text = - serde_json::to_string_pretty(&validated).map_err(|error| error.to_string())?; - match target_location { - TargetLocation::LocalOpenclaw => { - let paths = resolve_paths(); - crate::config_io::write_text(&paths.config_path, &validated_text)?; - } - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_write_raw_config(app.state::(), host_id, validated_text) - .await?; - } - } - Ok(()) -} - -async fn write_target_config_raw( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, - text: &str, -) -> Result<(), String> { - let validated = clawpal_core::config::validate_config_json(text) - .map_err(|error| format!("Invalid raw config payload: {error}"))?; - let validated_text = - serde_json::to_string_pretty(&validated).map_err(|error| error.to_string())?; - match target_location { - TargetLocation::LocalOpenclaw => { - let paths = resolve_paths(); - crate::config_io::write_text(&paths.config_path, &validated_text)?; - } - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_write_raw_config(app.state::(), host_id, validated_text) - .await?; - } - } - Ok(()) -} - -async fn restart_target_gateway( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, -) -> Result<(), String> { - match target_location { - TargetLocation::LocalOpenclaw => { - restart_gateway().await?; - } - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_restart_gateway(app.state::(), host_id).await?; - } - } - Ok(()) -} - -fn diagnosis_is_healthy(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { - diagnosis.status == "healthy" - && diagnosis.summary.status == "healthy" - && diagnosis.issues.is_empty() -} - -fn diagnosis_context(diagnosis: &RescuePrimaryDiagnosisResult) -> Value { - json!({ - "status": diagnosis.status, - "summary": { - "status": diagnosis.summary.status, - "headline": diagnosis.summary.headline, - "recommendedAction": diagnosis.summary.recommended_action, - "fixableIssueCount": diagnosis.summary.fixable_issue_count, - "selectedFixIssueIds": diagnosis.summary.selected_fix_issue_ids, - }, - "issues": diagnosis.issues, - "sections": diagnosis.sections, - }) -} - -fn diagnosis_issue_summaries(diagnosis: &RescuePrimaryDiagnosisResult) -> Vec { - diagnosis - .issues - .iter() - .map(|issue| { - json!({ - "id": issue.id, - "code": issue.code, - "severity": issue.severity, - "title": issue.message, - "target": issue.source, - "autoFixable": issue.auto_fixable, - "fixHint": issue.fix_hint, - }) - }) - .collect() -} - -fn diagnosis_missing_rescue_profile(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { - diagnosis - .issues - .iter() - .any(|issue| issue.code == "rescue.profile.missing") -} - -fn diagnosis_unhealthy_rescue_gateway(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { - diagnosis - .issues - .iter() - .any(|issue| issue.code == "rescue.gateway.unhealthy") -} - -fn rescue_setup_command_result( - action: &str, - profile: &str, - configured: bool, - active: bool, - runtime_state: &str, -) -> CommandResult { - CommandResult { - argv: vec!["manage_rescue_bot".into(), action.into(), profile.into()], - exit_code: Some(0), - stdout: format!( - "configured={} active={} runtimeState={}", - configured, active, runtime_state - ), - stderr: String::new(), - duration_ms: 0, - timed_out: false, - } -} - -fn rescue_bot_manage_command_result( - result: &crate::commands::RescueBotManageResult, -) -> CommandResult { - CommandResult { - argv: vec![ - "manage_rescue_bot".into(), - result.action.clone(), - result.profile.clone(), - ], - exit_code: Some(if result.active || result.configured { - 0 - } else { - 1 - }), - stdout: format!( - "configured={} active={} runtimeState={} rescuePort={} mainPort={} commands={}", - result.configured, - result.active, - result.runtime_state, - result.rescue_port, - result.main_port, - result.commands.len() - ), - stderr: String::new(), - duration_ms: 0, - timed_out: false, - } -} - -fn rescue_activation_diagnostic_commands(profile: &str) -> Vec> { - vec![ - vec!["manage_rescue_bot".into(), "status".into(), profile.into()], - vec![ - "openclaw".into(), - "--profile".into(), - profile.into(), - "gateway".into(), - "status".into(), - ], - vec![ - "openclaw".into(), - "--profile".into(), - profile.into(), - "config".into(), - "get".into(), - "gateway.port".into(), - "--json".into(), - ], - ] -} - -fn rescue_activation_error_message( - profile: &str, - configured: bool, - runtime_state: &str, - suggested_checks: &[String], -) -> String { - let suffix = if suggested_checks.is_empty() { - String::new() - } else { - format!(" Suggested checks: {}.", suggested_checks.join("; ")) - }; - format!( - "Rescue profile \"{}\" was {} but did not become active (runtime state: {}).", - profile, - if configured { - "configured" - } else { - "not configured" - }, - runtime_state - ) + &suffix -} - -async fn execute_rescue_activation_diagnostic_command( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, - argv: &[String], -) -> CommandResult { - let started = Instant::now(); - if argv.first().map(String::as_str) == Some("manage_rescue_bot") - && argv.get(1).map(String::as_str) == Some("status") - { - let profile = argv - .get(2) - .map(String::as_str) - .filter(|value| !value.trim().is_empty()) - .unwrap_or("rescue"); - let result = match target_location { - TargetLocation::LocalOpenclaw => { - manage_rescue_bot("status".into(), Some(profile.to_string()), None).await - } - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id); - match host_id { - Ok(host_id) => { - remote_manage_rescue_bot( - app.state::(), - host_id, - "status".into(), - Some(profile.to_string()), - None, - ) - .await - } - Err(error) => Err(error), - } - } - }; - return match result { - Ok(result) => { - let mut command_result = rescue_bot_manage_command_result(&result); - command_result.duration_ms = started.elapsed().as_millis() as u64; - command_result - } - Err(error) => CommandResult { - argv: argv.to_vec(), - exit_code: Some(1), - stdout: String::new(), - stderr: error, - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - }, - }; - } - - match execute_command( - &app.state::(), - target_location, - instance_id, - argv, - ) - .await - { - Ok(result) => result, - Err(error) => CommandResult { - argv: argv.to_vec(), - exit_code: Some(1), - stdout: String::new(), - stderr: error, - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - }, - } -} - -async fn collect_rescue_activation_failure_diagnostics( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, - profile: &str, -) -> Vec { - let mut results = Vec::new(); - for argv in rescue_activation_diagnostic_commands(profile) { - results.push( - execute_rescue_activation_diagnostic_command(app, target_location, instance_id, &argv) - .await, - ); - } - results -} - -struct RescueActivationFailure { - message: String, - activation_result: CommandResult, - diagnostics: Vec, -} - -async fn ensure_rescue_profile_ready( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, -) -> Result { - let started = Instant::now(); - let result = match target_location { - TargetLocation::LocalOpenclaw => { - manage_rescue_bot("activate".into(), Some("rescue".into()), None) - .await - .map_err(|error| RescueActivationFailure { - message: error, - activation_result: rescue_setup_command_result( - "activate", - "rescue", - false, - false, - "activation_failed", - ), - diagnostics: Vec::new(), - })? - } - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id).map_err(|error| { - RescueActivationFailure { - message: error, - activation_result: rescue_setup_command_result( - "activate", - "rescue", - false, - false, - "activation_failed", - ), - diagnostics: Vec::new(), - } - })?; - remote_manage_rescue_bot( - app.state::(), - host_id, - "activate".into(), - Some("rescue".into()), - None, - ) - .await - .map_err(|error| RescueActivationFailure { - message: error, - activation_result: rescue_setup_command_result( - "activate", - "rescue", - false, - false, - "activation_failed", - ), - diagnostics: Vec::new(), - })? - } - }; - let mut command_result = rescue_setup_command_result( - &result.action, - &result.profile, - result.configured, - result.active, - &result.runtime_state, - ); - command_result.duration_ms = started.elapsed().as_millis() as u64; - if !result.active { - let diagnostics = collect_rescue_activation_failure_diagnostics( - app, - target_location, - instance_id, - &result.profile, - ) - .await; - let suggested_checks = diagnostics - .iter() - .map(|result| result.argv.join(" ")) - .collect::>(); - return Err(RescueActivationFailure { - message: rescue_activation_error_message( - &result.profile, - result.configured, - &result.runtime_state, - &suggested_checks, - ), - activation_result: command_result, - diagnostics, - }); - } - Ok(command_result) -} - -async fn repair_rescue_gateway_if_needed( - app: &AppHandle, - session_id: &str, - round: usize, - target_location: TargetLocation, - instance_id: &str, - diagnosis: &mut RescuePrimaryDiagnosisResult, -) -> Result<(), String> { - if !(diagnosis_missing_rescue_profile(diagnosis) - || diagnosis_unhealthy_rescue_gateway(diagnosis)) - { - return Ok(()); - } - - emit_progress( - Some(app), - session_id, - round, - "preparing_rescue", - "Activating rescue profile before requesting remote repair plan", - Some(PlanKind::Repair), - None, - ); - let setup_result = match ensure_rescue_profile_ready(app, target_location, instance_id).await { - Ok(setup_result) => setup_result, - Err(failure) => { - append_remote_doctor_log( - session_id, - json!({ - "event": "rescue_profile_activation", - "round": round, - "result": failure.activation_result, - "status": "failed", - }), - ); - append_remote_doctor_log( - session_id, - json!({ - "event": "rescue_activation_diagnosis", - "round": round, - "checks": failure.diagnostics, - }), - ); - return Err(failure.message); - } - }; - append_remote_doctor_log( - session_id, - json!({ - "event": "rescue_profile_activation", - "round": round, - "result": setup_result, - }), - ); - *diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "after_rescue_activation", round, diagnosis); - Ok(()) -} - -fn append_diagnosis_log( - session_id: &str, - stage: &str, - round: usize, - diagnosis: &RescuePrimaryDiagnosisResult, -) { - append_remote_doctor_log( - session_id, - json!({ - "event": "diagnosis_result", - "stage": stage, - "round": round, - "status": diagnosis.status, - "summaryStatus": diagnosis.summary.status, - "headline": diagnosis.summary.headline, - "recommendedAction": diagnosis.summary.recommended_action, - "issueCount": diagnosis.issues.len(), - "issues": diagnosis_issue_summaries(diagnosis), - }), - ); -} - -fn clawpal_server_step_type_summary(steps: &[ClawpalServerPlanStep]) -> Value { - let mut counts = serde_json::Map::new(); - for step in steps { - let entry = counts - .entry(step.step_type.clone()) - .or_insert_with(|| Value::from(0_u64)); - let next = entry.as_u64().unwrap_or(0) + 1; - *entry = Value::from(next); - } - Value::Object(counts) -} - -fn repair_plan_stalled(observations: &[RepairRoundObservation], threshold: usize) -> bool { - if observations.len() < threshold { - return false; - } - let recent = &observations[observations.len() - threshold..]; - let Some(first) = recent.first() else { - return false; - }; - !first.issue_summaries.is_empty() - && recent.iter().all(|entry| { - entry.step_types.len() == 1 - && entry.step_types[0] == "doctorRediagnose" - && entry.diagnosis_signature == first.diagnosis_signature - }) -} - -fn round_limit_error_message( - diagnosis: &RescuePrimaryDiagnosisResult, - last_step_types: &[String], -) -> String { - let issue_summary = serde_json::to_string(&diagnosis_issue_summaries(diagnosis)) - .unwrap_or_else(|_| "[]".to_string()); - let step_summary = if last_step_types.is_empty() { - "[]".to_string() - } else { - serde_json::to_string(last_step_types).unwrap_or_else(|_| "[]".to_string()) - }; - format!( - "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean rescue diagnosis result. Last issues: {issue_summary}. Last repair step types: {step_summary}." - ) -} - -fn stalled_plan_error_message(observation: &RepairRoundObservation) -> String { - let issue_summary = - serde_json::to_string(&observation.issue_summaries).unwrap_or_else(|_| "[]".to_string()); - let step_summary = - serde_json::to_string(&observation.step_types).unwrap_or_else(|_| "[]".to_string()); - format!( - "Remote Doctor did not return actionable repair steps by round {} after {} repeated rounds. Last issues: {}. Last repair step types: {}.", - observation.round, - REPAIR_PLAN_STALL_THRESHOLD, - issue_summary, - step_summary - ) -} - -fn ensure_object(value: &mut Value) -> Result<&mut serde_json::Map, String> { - if !value.is_object() { - *value = json!({}); - } - value - .as_object_mut() - .ok_or_else(|| "Expected object while applying remote doctor config step".to_string()) -} - -fn apply_config_set(root: &mut Value, path: &str, value: Value) -> Result<(), String> { - let segments = path - .split('.') - .filter(|segment| !segment.trim().is_empty()) - .collect::>(); - if segments.is_empty() { - return Err("Config set path cannot be empty".into()); - } - let mut cursor = root; - for segment in &segments[..segments.len() - 1] { - let object = ensure_object(cursor)?; - cursor = object - .entry((*segment).to_string()) - .or_insert_with(|| json!({})); - } - let object = ensure_object(cursor)?; - object.insert(segments[segments.len() - 1].to_string(), value); - Ok(()) -} - -fn apply_config_unset(root: &mut Value, path: &str) -> Result<(), String> { - let segments = path - .split('.') - .filter(|segment| !segment.trim().is_empty()) - .collect::>(); - if segments.is_empty() { - return Err("Config unset path cannot be empty".into()); - } - let mut cursor = root; - for segment in &segments[..segments.len() - 1] { - let Some(next) = cursor - .as_object_mut() - .and_then(|object| object.get_mut(*segment)) - else { - return Ok(()); - }; - cursor = next; - } - if let Some(object) = cursor.as_object_mut() { - object.remove(segments[segments.len() - 1]); - } - Ok(()) -} - -fn extract_json_block(text: &str) -> Option<&str> { - clawpal_core::doctor::extract_json_from_output(text) -} - -fn build_agent_plan_prompt( - kind: PlanKind, - session_id: &str, - round: usize, - target_location: TargetLocation, - instance_id: &str, - diagnosis: &RescuePrimaryDiagnosisResult, - config_context: &ConfigExcerptContext, - previous_results: &[CommandResult], -) -> String { - let kind_label = match kind { - PlanKind::Detect => "detection", - PlanKind::Investigate => "investigation", - PlanKind::Repair => "repair", - }; - let target_label = match target_location { - TargetLocation::LocalOpenclaw => "local_openclaw", - TargetLocation::RemoteOpenclaw => "remote_openclaw", - }; - let diagnosis_json = - serde_json::to_string_pretty(&diagnosis_context(diagnosis)).unwrap_or_else(|_| "{}".into()); - let config_context_json = serde_json::to_string_pretty(&json!({ - "configExcerpt": config_context.config_excerpt, - "configExcerptRaw": config_context.config_excerpt_raw, - "configParseError": config_context.config_parse_error, - })) - .unwrap_or_else(|_| "{}".into()); - let previous_results_json = - serde_json::to_string_pretty(previous_results).unwrap_or_else(|_| "[]".into()); - let phase_rules = match kind { - PlanKind::Detect => "For detection plans, gather only the commands needed to confirm current state. Set healthy=true and done=true only when no issue remains.", - PlanKind::Investigate => "For investigation plans, return read-only diagnosis steps only. Do not modify files, delete files, overwrite config, or restart services. Prefer commands that inspect, validate, backup, or print evidence for why the config is unreadable. Do not run follow/tail commands, streaming log readers, or any unbounded command; every investigation command must be bounded and return promptly. Do not use heredocs, multiline scripts, or commands that wait on stdin. Prefer single-line commands over shell scripting.", - PlanKind::Repair => "For repair plans, return the minimal safe repair commands. Reference prior investigation evidence when config is unreadable. Back up the file before changing it and include validation/rediagnosis steps as needed. Do not invent OpenClaw subcommands. Use only the verified OpenClaw commands listed below or the `clawpal doctor ...` tools. Do not use `openclaw auth ...` commands. Do not use `openclaw doctor --json`; use `clawpal doctor probe-openclaw` or `clawpal doctor exec --tool doctor` instead. Do not use heredocs, multiline scripts, or commands that wait on stdin.", - }; - format!( - "Identity bootstrap for this session:\n\ -- Your name: ClawPal Remote Doctor\n\ -- Your creature: maintenance daemon\n\ -- Your vibe: direct, terse, operational\n\ -- Your emoji: none\n\ -- The user is: ClawPal desktop app\n\ -- The user timezone is: Asia/Shanghai\n\ -- Do not ask identity/bootstrap questions.\n\ -- Do not ask who you are or who the user is.\n\ -- Do not modify IDENTITY.md, USER.md, or workspace bootstrap files.\n\ -\n\ -You are ClawPal Remote Doctor planner.\n\ -Return ONLY one JSON object and no markdown.\n\ -Task: produce the next {kind_label} plan for OpenClaw.\n\ -Session: {session_id}\n\ -Round: {round}\n\ -Target location: {target_label}\n\ -Instance id: {instance_id}\n\ -Diagnosis JSON:\n{diagnosis_json}\n\n\ -Config context JSON:\n{config_context_json}\n\n\ -Previous command results JSON:\n{previous_results_json}\n\n\ -Available gateway tools:\n\ -- `clawpal doctor probe-openclaw`\n\ -- `clawpal doctor config-read [path]`\n\ -- `clawpal doctor config-read-raw`\n\ -- `clawpal doctor config-upsert `\n\ -- `clawpal doctor config-delete `\n\ -- `clawpal doctor config-write-raw-base64 `\n\ -- `clawpal doctor exec --tool [--args ]`\n\ -- Verified direct OpenClaw commands only:\n\ - - `openclaw --version`\n\ - - `openclaw gateway status`\n\ -You may invoke these tools before answering when you need fresh diagnostics or config state.\n\ -If you already have enough information, return the JSON plan directly.\n\n\ -Return this exact JSON schema:\n\ -{{\n \"planId\": \"string\",\n \"planKind\": \"{kind}\",\n \"summary\": \"string\",\n \"commands\": [{{\"argv\": [\"cmd\"], \"timeoutSec\": 60, \"purpose\": \"why\", \"continueOnFailure\": false}}],\n \"healthy\": false,\n \"done\": false,\n \"success\": false\n}}\n\ -Rules:\n\ -- {phase_rules}\n\ -- For repair plans, return shell/openclaw commands in commands.\n\ -- Keep commands empty when no command is needed.\n\ -- Output valid JSON only.", - kind = match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - } - ) -} - -fn parse_agent_plan_response(kind: PlanKind, text: &str) -> Result { - let json_block = extract_json_block(text) - .ok_or_else(|| format!("Remote doctor agent did not return JSON: {text}"))?; - let value: Value = serde_json::from_str(json_block) - .map_err(|error| format!("Failed to parse remote doctor agent JSON: {error}"))?; - parse_plan_response(kind, value) -} - -fn parse_invoke_argv(command: &str, args: &Value) -> Result, String> { - if let Some(argv) = args.get("argv").and_then(Value::as_array) { - let parsed = argv - .iter() - .map(|value| { - value - .as_str() - .map(str::to_string) - .ok_or_else(|| "invoke argv entries must be strings".to_string()) - }) - .collect::, _>>()?; - if parsed.is_empty() { - return Err("invoke argv cannot be empty".into()); - } - return Ok(parsed); - } - - let arg_string = args - .get("args") - .and_then(Value::as_str) - .or_else(|| args.get("command").and_then(Value::as_str)) - .unwrap_or(""); - let mut parsed = if arg_string.trim().is_empty() { - Vec::new() - } else { - shell_words::split(arg_string) - .map_err(|error| format!("Failed to parse invoke args: {error}"))? - }; - if parsed.first().map(String::as_str) != Some(command) { - parsed.insert(0, command.to_string()); - } - Ok(parsed) -} - -async fn execute_clawpal_command( - app: &AppHandle, - pool: &SshConnectionPool, - target_location: TargetLocation, - instance_id: &str, - argv: &[String], -) -> Result { - match argv.get(1).map(String::as_str) { - Some("doctor") => { - execute_clawpal_doctor_command(app, pool, target_location, instance_id, argv).await - } - other => Err(format!( - "Unsupported clawpal command in remote doctor agent session: {:?}", - other - )), - } -} - -async fn execute_clawpal_doctor_command( - app: &AppHandle, - pool: &SshConnectionPool, - target_location: TargetLocation, - instance_id: &str, - argv: &[String], -) -> Result { - match argv.get(2).map(String::as_str) { - Some("probe-openclaw") => { - let version_result = execute_command( - pool, - target_location, - instance_id, - &["openclaw".into(), "--version".into()], - ) - .await?; - let which_result = match target_location { - TargetLocation::LocalOpenclaw => { - execute_command( - pool, - target_location, - instance_id, - &[ - "sh".into(), - "-lc".into(), - "command -v openclaw || true".into(), - ], - ) - .await? - } - TargetLocation::RemoteOpenclaw => { - execute_command( - pool, - target_location, - instance_id, - &[ - "sh".into(), - "-lc".into(), - "command -v openclaw || true".into(), - ], - ) - .await? - } - }; - Ok(json!({ - "ok": version_result.exit_code == Some(0), - "version": version_result.stdout.trim(), - "openclawPath": which_result.stdout.trim(), - })) - } - Some("config-read") => { - let maybe_path = argv - .get(3) - .map(String::as_str) - .filter(|value| !value.starts_with("--")); - let raw = read_target_config_raw(app, target_location, instance_id).await?; - config_read_response(&raw, maybe_path) - } - Some("config-read-raw") => { - let raw = read_target_config_raw(app, target_location, instance_id).await?; - Ok(json!({ - "raw": raw, - })) - } - Some("config-delete") => { - let path = argv - .get(3) - .ok_or("clawpal doctor config-delete requires a path")?; - let mut config = read_target_config(app, target_location, instance_id).await?; - apply_config_unset(&mut config, path)?; - write_target_config(app, target_location, instance_id, &config).await?; - restart_target_gateway(app, target_location, instance_id).await?; - Ok(json!({ "deleted": true, "path": path })) - } - Some("config-write-raw-base64") => { - let encoded = argv - .get(3) - .ok_or("clawpal doctor config-write-raw-base64 requires a base64 payload")?; - let decoded = decode_base64_config_payload(encoded)?; - write_target_config_raw(app, target_location, instance_id, &decoded).await?; - restart_target_gateway(app, target_location, instance_id).await?; - Ok(json!({ - "written": true, - "bytes": decoded.len(), - })) - } - Some("config-upsert") => { - let path = argv - .get(3) - .ok_or("clawpal doctor config-upsert requires a path")?; - let value_raw = argv - .get(4) - .ok_or("clawpal doctor config-upsert requires a value")?; - let value: Value = serde_json::from_str(value_raw) - .map_err(|error| format!("Invalid JSON value for config-upsert: {error}"))?; - let mut config = read_target_config(app, target_location, instance_id).await?; - apply_config_set(&mut config, path, value)?; - write_target_config(app, target_location, instance_id, &config).await?; - restart_target_gateway(app, target_location, instance_id).await?; - Ok(json!({ "upserted": true, "path": path })) - } - Some("exec") => { - let tool_idx = argv - .iter() - .position(|part| part == "--tool") - .ok_or("clawpal doctor exec requires --tool")?; - let tool = argv - .get(tool_idx + 1) - .ok_or("clawpal doctor exec missing tool name")?; - let args_idx = argv.iter().position(|part| part == "--args"); - let mut exec_argv = vec![tool.clone()]; - if let Some(index) = args_idx { - if let Some(arg_string) = argv.get(index + 1) { - exec_argv.extend(shell_words::split(arg_string).map_err(|error| { - format!("Failed to parse clawpal doctor exec args: {error}") - })?); - } - } - let result = execute_command(pool, target_location, instance_id, &exec_argv).await?; - Ok(json!({ - "argv": result.argv, - "exitCode": result.exit_code, - "stdout": result.stdout, - "stderr": result.stderr, - })) - } - other => Err(format!( - "Unsupported clawpal doctor subcommand in remote doctor agent session: {:?}", - other - )), - } -} - -fn config_read_response(raw: &str, path: Option<&str>) -> Result { - let context = build_config_excerpt_context(raw); - if let Some(parse_error) = context.config_parse_error { - return Ok(json!({ - "value": Value::Null, - "path": path, - "raw": context.config_excerpt_raw.unwrap_or_else(|| raw.to_string()), - "parseError": parse_error, - })); - } - - let value = if let Some(path) = path { - clawpal_core::doctor::select_json_value_from_str( - &serde_json::to_string_pretty(&context.config_excerpt).unwrap_or_else(|_| "{}".into()), - Some(path), - "remote doctor config", - )? - } else { - context.config_excerpt - }; - - Ok(json!({ - "value": value, - "path": path, - })) -} - -fn decode_base64_config_payload(encoded: &str) -> Result { - let bytes = base64::engine::general_purpose::STANDARD - .decode(encoded.trim()) - .map_err(|error| format!("Failed to decode base64 config payload: {error}"))?; - String::from_utf8(bytes) - .map_err(|error| format!("Base64 config payload is not valid UTF-8: {error}")) -} - -async fn execute_invoke_payload( - app: &AppHandle, - pool: &SshConnectionPool, - target_location: TargetLocation, - instance_id: &str, - payload: &Value, -) -> Result { - let command = payload - .get("command") - .and_then(Value::as_str) - .ok_or("invoke payload missing command")?; - let args = payload.get("args").cloned().unwrap_or(Value::Null); - let argv = parse_invoke_argv(command, &args)?; - match command { - "openclaw" => { - let result = execute_command(pool, target_location, instance_id, &argv).await?; - Ok(json!({ - "argv": result.argv, - "exitCode": result.exit_code, - "stdout": result.stdout, - "stderr": result.stderr, - })) - } - "clawpal" => execute_clawpal_command(app, pool, target_location, instance_id, &argv).await, - other => Err(format!( - "Unsupported invoke command in remote doctor agent session: {other}" - )), - } -} - -async fn run_agent_request_with_bridge( - app: &AppHandle, - client: &NodeClient, - bridge: &BridgeClient, - pool: &SshConnectionPool, - target_location: TargetLocation, - instance_id: &str, - agent_id: &str, - session_key: &str, - message: &str, -) -> Result { - let final_rx = client - .start_agent_request(agent_id, session_key, message) - .await?; - let mut invokes = bridge.subscribe_invokes(); - let final_future = async move { - final_rx.await.map_err(|_| { - "Agent request ended before a final chat response was received".to_string() - }) - }; - tokio::pin!(final_future); - - loop { - tokio::select! { - result = &mut final_future => { - return result; - } - event = invokes.recv() => { - let payload = match event { - Ok(payload) => payload, - Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => { - continue; - } - Err(tokio::sync::broadcast::error::RecvError::Closed) => { - return Err("Bridge invoke stream closed during agent request".into()); - } - }; - let invoke_id = payload.get("id").and_then(Value::as_str).unwrap_or("").to_string(); - let node_id = payload.get("nodeId").and_then(Value::as_str).unwrap_or("").to_string(); - let result = execute_invoke_payload(app, pool, target_location, instance_id, &payload).await; - match result { - Ok(value) => { - bridge.send_invoke_result(&invoke_id, &node_id, value).await?; - } - Err(error) => { - bridge.send_invoke_error(&invoke_id, &node_id, "EXEC_ERROR", &error).await?; - } - } - let _ = bridge.take_invoke(&invoke_id).await; - } - } - } -} - -fn shell_escape(value: &str) -> String { - format!("'{}'", value.replace('\'', "'\\''")) -} - -fn build_shell_command(argv: &[String]) -> String { - argv.iter() - .map(|part| shell_escape(part)) - .collect::>() - .join(" ") -} - -async fn execute_command( - pool: &SshConnectionPool, - target_location: TargetLocation, - instance_id: &str, - argv: &[String], -) -> Result { - let started = Instant::now(); - if argv.is_empty() { - return Err("Plan command argv cannot be empty".into()); - } - let result = match target_location { - TargetLocation::LocalOpenclaw => { - if argv[0] == "openclaw" { - let arg_refs = argv - .iter() - .skip(1) - .map(String::as_str) - .collect::>(); - let output = run_openclaw(&arg_refs)?; - CommandResult { - argv: argv.to_vec(), - exit_code: Some(output.exit_code), - stdout: output.stdout, - stderr: output.stderr, - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - } - } else { - let mut command = std::process::Command::new(&argv[0]); - command.args(argv.iter().skip(1)); - if let Some(openclaw_home) = get_active_openclaw_home_override() { - command.env("OPENCLAW_HOME", openclaw_home); - } - let output = command.output().map_err(|error| { - format!("Failed to execute local command {:?}: {error}", argv) - })?; - CommandResult { - argv: argv.to_vec(), - exit_code: output.status.code(), - stdout: String::from_utf8_lossy(&output.stdout).to_string(), - stderr: String::from_utf8_lossy(&output.stderr).to_string(), - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - } - } - } - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - if argv[0] == "openclaw" { - let arg_refs = argv - .iter() - .skip(1) - .map(String::as_str) - .collect::>(); - let output = run_openclaw_remote(pool, &host_id, &arg_refs).await?; - CommandResult { - argv: argv.to_vec(), - exit_code: Some(output.exit_code), - stdout: output.stdout, - stderr: output.stderr, - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - } - } else { - let output = pool - .exec_login(&host_id, &build_shell_command(argv)) - .await?; - CommandResult { - argv: argv.to_vec(), - exit_code: Some(output.exit_code as i32), - stdout: output.stdout, - stderr: output.stderr, - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - } - } - } - }; - Ok(result) -} - -fn plan_command_uses_internal_clawpal_tool(argv: &[String]) -> bool { - argv.first().map(String::as_str) == Some("clawpal") -} - -fn validate_clawpal_exec_args(argv: &[String]) -> Result<(), String> { - if argv.get(0).map(String::as_str) != Some("clawpal") - || argv.get(1).map(String::as_str) != Some("doctor") - || argv.get(2).map(String::as_str) != Some("exec") - { - return Ok(()); - } - - let args_idx = argv.iter().position(|part| part == "--args"); - let Some(index) = args_idx else { - return Ok(()); - }; - let Some(arg_string) = argv.get(index + 1) else { - return Ok(()); - }; - if arg_string.contains('\n') || arg_string.contains("<<") { - return Err(format!( - "Unsupported clawpal doctor exec args: {}. Use bounded single-line commands without heredocs or stdin-driven scripts.", - argv.join(" ") - )); - } - Ok(()) -} - -fn validate_plan_command_argv(argv: &[String]) -> Result<(), String> { - if argv.is_empty() { - return Err("Plan command argv cannot be empty".into()); - } - validate_clawpal_exec_args(argv)?; - if argv[0] != "openclaw" { - return Ok(()); - } - - let supported = argv == ["openclaw", "--version"] || argv == ["openclaw", "gateway", "status"]; - if supported { - Ok(()) - } else { - Err(format!( - "Unsupported openclaw plan command: {}", - argv.join(" ") - )) - } -} - -fn plan_command_failure_message( - kind: PlanKind, - round: usize, - argv: &[String], - error: &str, -) -> String { - let kind_label = match kind { - PlanKind::Detect => "Detect", - PlanKind::Investigate => "Investigate", - PlanKind::Repair => "Repair", - }; - format!( - "{kind_label} command failed in round {round}: {}: {error}", - argv.join(" ") - ) -} - -fn command_result_stdout(value: &Value) -> String { - value - .get("stdout") - .and_then(Value::as_str) - .map(str::to_string) - .unwrap_or_else(|| { - serde_json::to_string_pretty(value).unwrap_or_else(|_| value.to_string()) - }) -} - -async fn execute_plan_command( - app: &AppHandle, - pool: &SshConnectionPool, - target_location: TargetLocation, - instance_id: &str, - argv: &[String], -) -> Result { - let started = Instant::now(); - validate_plan_command_argv(argv)?; - if plan_command_uses_internal_clawpal_tool(argv) { - let value = execute_clawpal_command(app, pool, target_location, instance_id, argv).await?; - let exit_code = value - .get("exitCode") - .and_then(Value::as_i64) - .map(|code| code as i32) - .unwrap_or(0); - let stderr = value - .get("stderr") - .and_then(Value::as_str) - .unwrap_or("") - .to_string(); - return Ok(CommandResult { - argv: argv.to_vec(), - exit_code: Some(exit_code), - stdout: command_result_stdout(&value), - stderr, - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - }); - } - - execute_command(pool, target_location, instance_id, argv).await -} - -fn parse_plan_response(kind: PlanKind, value: Value) -> Result { - let mut response: PlanResponse = serde_json::from_value(value) - .map_err(|error| format!("Failed to parse remote doctor plan response: {error}"))?; - response.plan_kind = kind; - if response.plan_id.trim().is_empty() { - response.plan_id = format!("plan-{}", Uuid::new_v4()); - } - Ok(response) -} - -async fn request_plan( - client: &NodeClient, - method: &str, - kind: PlanKind, - session_id: &str, - round: usize, - target_location: TargetLocation, - instance_id: &str, - previous_results: &[CommandResult], -) -> Result { - let response = client - .send_request( - method, - json!({ - "sessionId": session_id, - "round": round, - "planKind": match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "targetLocation": match target_location { - TargetLocation::LocalOpenclaw => "local_openclaw", - TargetLocation::RemoteOpenclaw => "remote_openclaw", - }, - "instanceId": instance_id, - "hostId": instance_id.strip_prefix("ssh:"), - "previousResults": previous_results, - }), - ) - .await?; - parse_plan_response(kind, response) -} - -async fn request_agent_plan( - app: &AppHandle, - client: &NodeClient, - bridge_client: &BridgeClient, - pool: &SshConnectionPool, - session_id: &str, - round: usize, - kind: PlanKind, - target_location: TargetLocation, - instance_id: &str, - diagnosis: &RescuePrimaryDiagnosisResult, - config_context: &ConfigExcerptContext, - previous_results: &[CommandResult], -) -> Result { - let agent_session_key = remote_doctor_agent_session_key(session_id); - let prompt = build_agent_plan_prompt( - kind, - session_id, - round, - target_location, - instance_id, - diagnosis, - config_context, - previous_results, - ); - let text = if bridge_client.is_connected().await { - run_agent_request_with_bridge( - app, - client, - bridge_client, - pool, - target_location, - instance_id, - remote_doctor_agent_id(), - &agent_session_key, - &prompt, - ) - .await? - } else { - client - .run_agent_request(remote_doctor_agent_id(), &agent_session_key, &prompt) - .await? - }; - parse_agent_plan_response(kind, &text) -} - -fn agent_plan_step_types(plan: &PlanResponse) -> Vec { - if plan.commands.is_empty() { - return vec![format!( - "plan:{}", - match plan.plan_kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - } - )]; - } - plan.commands - .iter() - .map(|command| { - command - .argv - .first() - .cloned() - .unwrap_or_else(|| "empty-command".to_string()) - }) - .collect() -} - -async fn request_clawpal_server_plan( - client: &NodeClient, - session_id: &str, - round: usize, - instance_id: &str, - target_location: TargetLocation, - diagnosis: &RescuePrimaryDiagnosisResult, - config_context: &ConfigExcerptContext, -) -> Result { - let response = client - .send_request( - "remote_repair_plan.request", - json!({ - "requestId": format!("{session_id}-round-{round}"), - "targetId": instance_id, - "targetLocation": match target_location { - TargetLocation::LocalOpenclaw => "local_openclaw", - TargetLocation::RemoteOpenclaw => "remote_openclaw", - }, - "context": { - "configExcerpt": config_context.config_excerpt, - "configExcerptRaw": config_context.config_excerpt_raw, - "configParseError": config_context.config_parse_error, - "diagnosis": diagnosis_context(diagnosis), - } - }), - ) - .await?; - serde_json::from_value::(response) - .map_err(|error| format!("Failed to parse clawpal-server plan response: {error}")) -} - -async fn report_clawpal_server_step_result( - client: &NodeClient, - plan_id: &str, - step_index: usize, - step: &ClawpalServerPlanStep, - result: &CommandResult, -) { - let _ = client - .send_request( - "remote_repair_plan.step_result", - json!({ - "planId": plan_id, - "stepIndex": step_index, - "step": step, - "result": result, - }), - ) - .await; -} - -async fn report_clawpal_server_final_result( - client: &NodeClient, - plan_id: &str, - healthy: bool, - diagnosis: &RescuePrimaryDiagnosisResult, -) { - let _ = client - .send_request( - "remote_repair_plan.final_result", - json!({ - "planId": plan_id, - "healthy": healthy, - "diagnosis": diagnosis_context(diagnosis), - }), - ) - .await; -} - -async fn run_remote_doctor_repair_loop( - app: Option<&AppHandle>, - pool: &SshConnectionPool, - session_id: &str, - instance_id: &str, - target_location: TargetLocation, - mut request_plan_fn: F, -) -> Result -where - F: FnMut(PlanKind, usize, Vec) -> Fut, - Fut: std::future::Future>, -{ - let mut previous_results: Vec = Vec::new(); - let mut last_command: Option> = None; - let mut last_plan_kind = PlanKind::Detect; - - for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { - emit_progress( - app, - session_id, - round, - "planning_detect", - format!("Requesting detection plan for round {round}"), - Some(PlanKind::Detect), - None, - ); - let detect_plan = - request_plan_fn(PlanKind::Detect, round, previous_results.clone()).await?; - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_received", - "round": round, - "planKind": "detect", - "planId": detect_plan.plan_id, - "summary": detect_plan.summary, - "commandCount": detect_plan.commands.len(), - "healthy": detect_plan.healthy, - "done": detect_plan.done, - }), - ); - if detect_plan.healthy || (detect_plan.done && detect_plan.commands.is_empty()) { - return Ok(RemoteDoctorRepairResult { - mode: "remoteDoctor".into(), - status: "completed".into(), - round, - phase: "completed".into(), - last_plan_kind: match last_plan_kind { - PlanKind::Detect => "detect".into(), - PlanKind::Investigate => "investigate".into(), - PlanKind::Repair => "repair".into(), - }, - latest_diagnosis_healthy: true, - last_command, - session_id: session_id.to_string(), - message: "Remote Doctor repair completed with a healthy detection result.".into(), - }); - } - previous_results.clear(); - for command in &detect_plan.commands { - last_command = Some(command.argv.clone()); - emit_progress( - app, - session_id, - round, - "executing_detect", - format!("Running detect command: {}", command.argv.join(" ")), - Some(PlanKind::Detect), - Some(command.argv.clone()), - ); - let command_result = - execute_command(pool, target_location, instance_id, &command.argv).await?; - append_remote_doctor_log( - session_id, - json!({ - "event": "command_result", - "round": round, - "planKind": "detect", - "result": command_result, - }), - ); - if command_result.exit_code.unwrap_or(1) != 0 - && !command.continue_on_failure.unwrap_or(false) - { - previous_results.push(command_result); - return Err(format!( - "Detect command failed in round {round}: {}", - command.argv.join(" ") - )); - } - previous_results.push(command_result); - } - - emit_progress( - app, - session_id, - round, - "planning_repair", - format!("Requesting repair plan for round {round}"), - Some(PlanKind::Repair), - None, - ); - let repair_plan = - request_plan_fn(PlanKind::Repair, round, previous_results.clone()).await?; - last_plan_kind = PlanKind::Repair; - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_received", - "round": round, - "planKind": "repair", - "planId": repair_plan.plan_id, - "summary": repair_plan.summary, - "commandCount": repair_plan.commands.len(), - "success": repair_plan.success, - "done": repair_plan.done, - }), - ); - previous_results.clear(); - for command in &repair_plan.commands { - last_command = Some(command.argv.clone()); - emit_progress( - app, - session_id, - round, - "executing_repair", - format!("Running repair command: {}", command.argv.join(" ")), - Some(PlanKind::Repair), - Some(command.argv.clone()), - ); - let command_result = - execute_command(pool, target_location, instance_id, &command.argv).await?; - append_remote_doctor_log( - session_id, - json!({ - "event": "command_result", - "round": round, - "planKind": "repair", - "result": command_result, - }), - ); - if command_result.exit_code.unwrap_or(1) != 0 - && !command.continue_on_failure.unwrap_or(false) - { - previous_results.push(command_result); - return Err(format!( - "Repair command failed in round {round}: {}", - command.argv.join(" ") - )); - } - previous_results.push(command_result); - } - } - - append_remote_doctor_log( - session_id, - json!({ - "event": "session_complete", - "status": "failed", - "reason": "round_limit_exceeded", - }), - ); - Err(format!( - "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean detection result" - )) -} - -async fn run_clawpal_server_repair_loop( - app: &AppHandle, - client: &NodeClient, - session_id: &str, - instance_id: &str, - target_location: TargetLocation, -) -> Result { - let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "initial", 0, &diagnosis); - if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { - repair_rescue_gateway_if_needed( - app, - session_id, - 0, - target_location, - instance_id, - &mut diagnosis, - ) - .await?; - } - if diagnosis_is_healthy(&diagnosis) { - return Ok(result_for_completion( - session_id, - 0, - PlanKind::Detect, - None, - "Remote Doctor repair skipped because diagnosis is already healthy.", - )); - } - - let mut last_command = None; - let mut round_observations: Vec = Vec::new(); - let mut last_step_types: Vec = Vec::new(); - for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { - emit_progress( - Some(app), - session_id, - round, - "planning_repair", - format!("Requesting remote repair plan for round {round}"), - Some(PlanKind::Repair), - None, - ); - let config_context = build_config_excerpt_context( - &read_target_config_raw(app, target_location, instance_id).await?, - ); - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_request_context", - "protocol": "clawpal_server", - "round": round, - "planKind": "repair", - "instanceId": instance_id, - "targetLocation": target_location, - "configContext": config_excerpt_log_summary(&config_context), - "diagnosisIssueCount": diagnosis.issues.len(), - "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), - }), - ); - if config_context.config_parse_error.is_some() { - append_remote_doctor_log( - session_id, - json!({ - "event": "config_recovery_context", - "round": round, - "context": config_excerpt_log_summary(&config_context), - }), - ); - } - let plan = request_clawpal_server_plan( - client, - session_id, - round, - instance_id, - target_location, - &diagnosis, - &config_context, - ) - .await?; - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_received", - "protocol": "clawpal_server", - "round": round, - "planKind": "repair", - "planId": plan.plan_id, - "summary": plan.summary, - "stepCount": plan.steps.len(), - "stepTypeCounts": clawpal_server_step_type_summary(&plan.steps), - }), - ); - - let mut current_config = config_context.config_excerpt.clone(); - let mut rediagnosed = false; - let mut round_step_types = Vec::new(); - for (step_index, step) in plan.steps.iter().enumerate() { - round_step_types.push(step.step_type.clone()); - let mut result = CommandResult { - argv: Vec::new(), - exit_code: Some(0), - stdout: String::new(), - stderr: String::new(), - duration_ms: 0, - timed_out: false, - }; - let started = Instant::now(); - match step.step_type.as_str() { - "configSet" => { - let path = step.path.as_deref().ok_or("configSet step missing path")?; - let value = step.value.clone().ok_or("configSet step missing value")?; - emit_progress( - Some(app), - session_id, - round, - "executing_repair", - format!("Applying config set: {path}"), - Some(PlanKind::Repair), - None, - ); - apply_config_set(&mut current_config, path, value)?; - write_target_config(app, target_location, instance_id, ¤t_config).await?; - restart_target_gateway(app, target_location, instance_id).await?; - result.argv = vec!["configSet".into(), path.into()]; - result.stdout = format!("Updated {path}"); - } - "configUnset" => { - let path = step - .path - .as_deref() - .ok_or("configUnset step missing path")?; - emit_progress( - Some(app), - session_id, - round, - "executing_repair", - format!("Applying config unset: {path}"), - Some(PlanKind::Repair), - None, - ); - apply_config_unset(&mut current_config, path)?; - write_target_config(app, target_location, instance_id, ¤t_config).await?; - restart_target_gateway(app, target_location, instance_id).await?; - result.argv = vec!["configUnset".into(), path.into()]; - result.stdout = format!("Removed {path}"); - } - "doctorRediagnose" => { - emit_progress( - Some(app), - session_id, - round, - "planning_detect", - format!("Running rescue diagnosis after repair plan round {round}"), - Some(PlanKind::Detect), - None, - ); - diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "post_step_rediagnose", round, &diagnosis); - rediagnosed = true; - result.argv = vec!["doctorRediagnose".into()]; - result.stdout = format!( - "Diagnosis status={} issues={}", - diagnosis.status, - diagnosis.issues.len() - ); - } - other => { - result.exit_code = Some(1); - result.stderr = format!("Unsupported clawpal-server step type: {other}"); - } - } - result.duration_ms = started.elapsed().as_millis() as u64; - last_command = Some(result.argv.clone()); - append_remote_doctor_log( - session_id, - json!({ - "event": "command_result", - "protocol": "clawpal_server", - "round": round, - "planKind": "repair", - "stepIndex": step_index, - "step": step, - "result": result, - }), - ); - report_clawpal_server_step_result(client, &plan.plan_id, step_index, step, &result) - .await; - if result.exit_code.unwrap_or(1) != 0 { - return Err(result.stderr); - } - } - - if !rediagnosed { - diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "post_round", round, &diagnosis); - } - if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { - repair_rescue_gateway_if_needed( - app, - session_id, - round, - target_location, - instance_id, - &mut diagnosis, - ) - .await?; - } - last_step_types = round_step_types.clone(); - round_observations.push(RepairRoundObservation::new( - round, - &round_step_types, - &diagnosis, - )); - if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { - let observation = round_observations - .last() - .expect("stalled observations should contain current round"); - append_remote_doctor_log( - session_id, - json!({ - "event": "repair_plan_stalled", - "protocol": "clawpal_server", - "round": round, - "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, - "latestStepTypes": observation.step_types, - "issues": observation.issue_summaries, - }), - ); - return Err(stalled_plan_error_message(observation)); - } - let healthy = diagnosis_is_healthy(&diagnosis); - report_clawpal_server_final_result(client, &plan.plan_id, healthy, &diagnosis).await; - if healthy { - return Ok(result_for_completion( - session_id, - round, - PlanKind::Repair, - last_command, - "Remote Doctor repair completed with a healthy rescue diagnosis.", - )); - } - } - - Err(round_limit_error_message(&diagnosis, &last_step_types)) -} - -async fn run_agent_planner_repair_loop( - app: &AppHandle, - client: &NodeClient, - bridge_client: &BridgeClient, - pool: &SshConnectionPool, - session_id: &str, - instance_id: &str, - target_location: TargetLocation, -) -> Result { - let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "initial", 0, &diagnosis); - if diagnosis_is_healthy(&diagnosis) { - return Ok(result_for_completion( - session_id, - 0, - PlanKind::Detect, - None, - "Remote Doctor repair skipped because diagnosis is already healthy.", - )); - } - - let mut previous_results: Vec = Vec::new(); - let mut last_command = None; - let mut last_step_types: Vec = Vec::new(); - let mut round_observations: Vec = Vec::new(); - - for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { - let kind = next_agent_plan_kind_for_round(&diagnosis, &previous_results); - let config_context = build_config_excerpt_context( - &read_target_config_raw(app, target_location, instance_id).await?, - ); - let phase = match kind { - PlanKind::Detect => "planning_detect", - PlanKind::Investigate => "planning_investigate", - PlanKind::Repair => "planning_repair", - }; - let line = match kind { - PlanKind::Detect => format!("Requesting detection plan for round {round}"), - PlanKind::Investigate => format!("Requesting investigation plan for round {round}"), - PlanKind::Repair => format!("Requesting repair plan for round {round}"), - }; - emit_progress(Some(app), session_id, round, phase, line, Some(kind), None); - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_request_context", - "protocol": "agent", - "round": round, - "planKind": match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "instanceId": instance_id, - "targetLocation": target_location, - "configContext": config_excerpt_log_summary(&config_context), - "diagnosisIssueCount": diagnosis.issues.len(), - "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), - }), - ); - let plan = request_agent_plan( - app, - client, - bridge_client, - pool, - session_id, - round, - kind, - target_location, - instance_id, - &diagnosis, - &config_context, - &previous_results, - ) - .await?; - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_received", - "protocol": "agent", - "round": round, - "planKind": match plan.plan_kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "planId": plan.plan_id, - "summary": plan.summary, - "commandCount": plan.commands.len(), - "healthy": plan.healthy, - "done": plan.done, - "success": plan.success, - }), - ); - previous_results.clear(); - last_step_types = agent_plan_step_types(&plan); - for command in &plan.commands { - last_command = Some(command.argv.clone()); - emit_progress( - Some(app), - session_id, - round, - match kind { - PlanKind::Detect => "executing_detect", - PlanKind::Investigate => "executing_investigate", - PlanKind::Repair => "executing_repair", - }, - format!( - "Running {} command: {}", - match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - command.argv.join(" ") - ), - Some(kind), - Some(command.argv.clone()), - ); - append_remote_doctor_log( - session_id, - json!({ - "event": "command_start", - "round": round, - "planKind": match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "argv": command.argv, - "timeoutSec": command.timeout_sec, - "purpose": command.purpose, - }), - ); - let command_result = - match execute_plan_command(app, pool, target_location, instance_id, &command.argv) - .await - { - Ok(result) => result, - Err(error) => { - return Err(plan_command_failure_message( - kind, - round, - &command.argv, - &error, - )); - } - }; - append_remote_doctor_log( - session_id, - json!({ - "event": "command_result", - "round": round, - "planKind": match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "result": command_result, - }), - ); - if command_result.exit_code.unwrap_or(1) != 0 - && !command.continue_on_failure.unwrap_or(false) - { - return Err(format!( - "{} command failed in round {round}: {}", - match kind { - PlanKind::Detect => "Detect", - PlanKind::Investigate => "Investigate", - PlanKind::Repair => "Repair", - }, - command.argv.join(" ") - )); - } - previous_results.push(command_result); - } - - diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "post_round", round, &diagnosis); - if diagnosis_is_healthy(&diagnosis) { - return Ok(result_for_completion( - session_id, - round, - kind, - last_command, - "Remote Doctor repair completed with a healthy rescue diagnosis.", - )); - } - if matches!(kind, PlanKind::Repair) - && plan.done - && plan.commands.is_empty() - && diagnosis_has_only_non_auto_fixable_issues(&diagnosis) - { - return Ok(result_for_completion_with_warnings( - session_id, - round, - kind, - last_command, - "Remote Doctor completed all safe automatic repairs. Remaining issues are non-auto-fixable warnings.", - )); - } - - round_observations.push(RepairRoundObservation::new( - round, - &last_step_types, - &diagnosis, - )); - if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { - let observation = round_observations - .last() - .expect("stalled observations should contain current round"); - append_remote_doctor_log( - session_id, - json!({ - "event": "repair_plan_stalled", - "protocol": "agent", - "round": round, - "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, - "latestStepTypes": observation.step_types, - "issues": observation.issue_summaries, - }), - ); - return Err(stalled_plan_error_message(observation)); - } - } - - Err(round_limit_error_message(&diagnosis, &last_step_types)) -} - -async fn start_remote_doctor_repair_impl( - app: AppHandle, - pool: &SshConnectionPool, - instance_id: String, - target_location: String, -) -> Result { - let target_location = parse_target_location(&target_location)?; - if matches!(target_location, TargetLocation::RemoteOpenclaw) { - ensure_remote_target_connected(pool, &instance_id).await?; - } - let session_id = Uuid::new_v4().to_string(); - let gateway = remote_doctor_gateway_config()?; - let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref())?; - log_dev(format!( - "[remote_doctor] start session={} instance_id={} target_location={:?} gateway_url={} auth_token_override={}", - session_id, - instance_id, - target_location, - gateway.url, - gateway.auth_token_override.is_some() - )); - append_remote_doctor_log( - &session_id, - json!({ - "event": "session_start", - "instanceId": instance_id, - "targetLocation": target_location, - "gatewayUrl": gateway.url, - "gatewayAuthTokenOverride": gateway.auth_token_override.is_some(), - }), - ); - - let client = NodeClient::new(); - client.connect(&gateway.url, app.clone(), creds).await?; - let bridge = BridgeClient::new(); - - let forced_protocol = configured_remote_doctor_protocol(); - let active_protocol = forced_protocol.unwrap_or(default_remote_doctor_protocol()); - let pool_ref: &SshConnectionPool = pool; - let app_handle = app.clone(); - let bridge_client = bridge.clone(); - let gateway_url = gateway.url.clone(); - let gateway_auth_override = gateway.auth_token_override.clone(); - if matches!(active_protocol, RemoteDoctorProtocol::AgentPlanner) - && gateway_url_is_local(&gateway_url) - { - ensure_local_remote_doctor_agent_ready()?; - } - if protocol_requires_bridge(active_protocol) { - ensure_agent_bridge_connected( - &app, - &bridge, - &gateway_url, - gateway_auth_override.as_deref(), - &session_id, - ) - .await; - } - let result = match active_protocol { - RemoteDoctorProtocol::AgentPlanner => { - let agent = run_agent_planner_repair_loop( - &app, - &client, - &bridge_client, - pool_ref, - &session_id, - &instance_id, - target_location, - ) - .await; - - if forced_protocol.is_none() - && matches!(&agent, Err(error) if is_unknown_method_error(error)) - { - append_remote_doctor_log( - &session_id, - json!({ - "event": "protocol_fallback", - "from": "agent", - "to": "legacy_doctor", - "reason": agent.as_ref().err(), - }), - ); - run_remote_doctor_repair_loop( - Some(&app), - pool_ref, - &session_id, - &instance_id, - target_location, - |kind, round, previous_results| { - let method = match kind { - PlanKind::Detect => detect_method_name(), - PlanKind::Investigate => repair_method_name(), - PlanKind::Repair => repair_method_name(), - }; - let client = &client; - let session_id = &session_id; - let instance_id = &instance_id; - async move { - request_plan( - client, - &method, - kind, - session_id, - round, - target_location, - instance_id, - &previous_results, - ) - .await - } - }, - ) - .await - } else { - agent - } - } - RemoteDoctorProtocol::LegacyDoctor => { - let legacy = run_remote_doctor_repair_loop( - Some(&app), - pool_ref, - &session_id, - &instance_id, - target_location, - |kind, round, previous_results| { - let method = match kind { - PlanKind::Detect => detect_method_name(), - PlanKind::Investigate => repair_method_name(), - PlanKind::Repair => repair_method_name(), - }; - let client = &client; - let session_id = &session_id; - let instance_id = &instance_id; - async move { - request_plan( - client, - &method, - kind, - session_id, - round, - target_location, - instance_id, - &previous_results, - ) - .await - } - }, - ) - .await; - - if forced_protocol.is_none() - && matches!(&legacy, Err(error) if is_unknown_method_error(error)) - { - append_remote_doctor_log( - &session_id, - json!({ - "event": "protocol_fallback", - "from": "legacy_doctor", - "to": "clawpal_server", - "reason": legacy.as_ref().err(), - }), - ); - log_dev(format!( - "[remote_doctor] session={} protocol fallback legacy_doctor -> clawpal_server", - session_id - )); - run_clawpal_server_repair_loop( - &app, - &client, - &session_id, - &instance_id, - target_location, - ) - .await - } else { - legacy - } - } - RemoteDoctorProtocol::ClawpalServer => { - let clawpal_server = run_clawpal_server_repair_loop( - &app, - &client, - &session_id, - &instance_id, - target_location, - ) - .await; - if forced_protocol.is_none() - && matches!(&clawpal_server, Err(error) if is_unknown_method_error(error)) - { - append_remote_doctor_log( - &session_id, - json!({ - "event": "protocol_fallback", - "from": "clawpal_server", - "to": "agent", - "reason": clawpal_server.as_ref().err(), - }), - ); - let agent = run_remote_doctor_repair_loop( - Some(&app), - pool_ref, - &session_id, - &instance_id, - target_location, - |kind, round, previous_results| { - let client = &client; - let session_id = &session_id; - let instance_id = &instance_id; - let app_handle = app_handle.clone(); - let bridge_client = bridge_client.clone(); - let gateway_url = gateway_url.clone(); - let gateway_auth_override = gateway_auth_override.clone(); - let empty_diagnosis = empty_diagnosis(); - let empty_config = empty_config_excerpt_context(); - async move { - ensure_agent_bridge_connected( - &app_handle, - &bridge_client, - &gateway_url, - gateway_auth_override.as_deref(), - session_id, - ) - .await; - let text = if bridge_client.is_connected().await { - run_agent_request_with_bridge( - &app_handle, - client, - &bridge_client, - pool_ref, - target_location, - instance_id, - remote_doctor_agent_id(), - &remote_doctor_agent_session_key(session_id), - &build_agent_plan_prompt( - kind, - session_id, - round, - target_location, - instance_id, - &empty_diagnosis, - &empty_config, - &previous_results, - ), - ) - .await? - } else { - client - .run_agent_request( - remote_doctor_agent_id(), - &remote_doctor_agent_session_key(session_id), - &build_agent_plan_prompt( - kind, - session_id, - round, - target_location, - instance_id, - &empty_diagnosis, - &empty_config, - &previous_results, - ), - ) - .await? - }; - parse_agent_plan_response(kind, &text) - } - }, - ) - .await; - if matches!(&agent, Err(error) if is_unknown_method_error(error)) { - append_remote_doctor_log( - &session_id, - json!({ - "event": "protocol_fallback", - "from": "agent", - "to": "legacy_doctor", - "reason": agent.as_ref().err(), - }), - ); - run_remote_doctor_repair_loop( - Some(&app), - pool_ref, - &session_id, - &instance_id, - target_location, - |kind, round, previous_results| { - let method = match kind { - PlanKind::Detect => detect_method_name(), - PlanKind::Investigate => repair_method_name(), - PlanKind::Repair => repair_method_name(), - }; - let client = &client; - let session_id = &session_id; - let instance_id = &instance_id; - async move { - request_plan( - client, - &method, - kind, - session_id, - round, - target_location, - instance_id, - &previous_results, - ) - .await - } - }, - ) - .await - } else { - agent - } - } else { - clawpal_server - } - } - }; - - let _ = client.disconnect().await; - let _ = bridge.disconnect().await; - - match result { - Ok(done) => { - append_remote_doctor_log( - &session_id, - json!({ - "event": "session_complete", - "status": "completed", - "latestDiagnosisHealthy": done.latest_diagnosis_healthy, - }), - ); - Ok(done) - } - Err(error) => { - append_remote_doctor_log( - &session_id, - json!({ - "event": "session_complete", - "status": "failed", - "reason": error, - }), - ); - Err(error) - } - } -} - -#[tauri::command] -pub async fn start_remote_doctor_repair( - app: AppHandle, - pool: State<'_, SshConnectionPool>, - instance_id: String, - target_location: String, -) -> Result { - start_remote_doctor_repair_impl(app, &pool, instance_id, target_location).await -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::cli_runner::{set_active_clawpal_data_override, set_active_openclaw_home_override}; - use crate::ssh::SshHostConfig; - use std::net::TcpStream; - use tauri::test::mock_app; - - #[test] - fn build_shell_command_escapes_single_quotes() { - let command = build_shell_command(&["echo".into(), "a'b".into()]); - assert_eq!(command, "'echo' 'a'\\''b'"); - } - - #[test] - fn parse_target_location_rejects_unknown_values() { - let error = parse_target_location("elsewhere").unwrap_err(); - assert!(error.contains("Unsupported target location")); - } - - #[test] - fn apply_config_set_creates_missing_object_path() { - let mut value = json!({}); - apply_config_set( - &mut value, - "models.providers.openai.baseUrl", - json!("http://127.0.0.1:3000/v1"), - ) - .expect("config set"); - assert_eq!( - value - .pointer("/models/providers/openai/baseUrl") - .and_then(Value::as_str), - Some("http://127.0.0.1:3000/v1") - ); - } - - #[test] - fn apply_config_unset_removes_existing_leaf() { - let mut value = json!({ - "models": { - "providers": { - "openai": { - "baseUrl": "http://127.0.0.1:3000/v1", - "models": [{"id": "gpt-4.1"}] - } - } - } - }); - apply_config_unset(&mut value, "models.providers.openai.baseUrl").expect("config unset"); - assert!(value.pointer("/models/providers/openai/baseUrl").is_none()); - assert!(value.pointer("/models/providers/openai/models").is_some()); - } - - #[test] - fn parse_agent_plan_response_reads_json_payload() { - let text = r#"preface -{"planId":"detect-1","planKind":"detect","summary":"ok","commands":[{"argv":["openclaw","doctor","--json"]}],"healthy":false,"done":false,"success":false} -"#; - let plan = parse_agent_plan_response(PlanKind::Detect, text).expect("parse plan"); - assert_eq!(plan.plan_id, "detect-1"); - assert_eq!(plan.commands[0].argv, vec!["openclaw", "doctor", "--json"]); - } - - #[test] - fn build_agent_plan_prompt_mentions_target_and_schema() { - let prompt = build_agent_plan_prompt( - PlanKind::Repair, - "sess-1", - 3, - TargetLocation::RemoteOpenclaw, - "ssh:vm1", - &sample_diagnosis(Vec::new()), - &ConfigExcerptContext { - config_excerpt: json!({"ok": true}), - config_excerpt_raw: None, - config_parse_error: None, - }, - &[], - ); - assert!(prompt.contains("Task: produce the next repair plan")); - assert!(prompt.contains("Target location: remote_openclaw")); - assert!(prompt.contains("\"planKind\": \"repair\"")); - assert!(prompt.contains("\"configExcerpt\"")); - assert!(prompt.contains("clawpal doctor probe-openclaw")); - assert!(prompt.contains("openclaw gateway status")); - assert!(prompt.contains("Output valid JSON only.")); - } - - #[test] - fn default_remote_doctor_protocol_prefers_agent() { - assert_eq!( - default_remote_doctor_protocol(), - RemoteDoctorProtocol::AgentPlanner - ); - } - - #[test] - fn unreadable_config_requires_investigate_plan_kind() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "primary.config.unreadable", - "code": "primary.config.unreadable", - "severity": "error", - "message": "Primary configuration could not be read", - "autoFixable": false, - "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", - "source": "primary" - })]); - assert_eq!(next_agent_plan_kind(&diagnosis), PlanKind::Investigate); - } - - #[test] - fn unreadable_config_switches_to_repair_after_investigation_results_exist() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "primary.config.unreadable", - "code": "primary.config.unreadable", - "severity": "error", - "message": "Primary configuration could not be read", - "autoFixable": false, - "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", - "source": "primary" - })]); - let previous_results = vec![CommandResult { - argv: vec!["clawpal".into(), "doctor".into(), "config-read-raw".into()], - exit_code: Some(0), - stdout: "{\"raw\":\"{\\n ddd\\n}\"}".into(), - stderr: String::new(), - duration_ms: 1, - timed_out: false, - }]; - assert_eq!( - next_agent_plan_kind_for_round(&diagnosis, &previous_results), - PlanKind::Repair - ); - } - - #[test] - fn non_auto_fixable_warning_only_diagnosis_is_terminal() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "rescue.gateway.unhealthy", - "code": "rescue.gateway.unhealthy", - "severity": "warn", - "message": "Rescue gateway is not healthy", - "autoFixable": false, - "fixHint": "Inspect rescue gateway logs before using failover", - "source": "rescue" - })]); - assert!(diagnosis_has_only_non_auto_fixable_issues(&diagnosis)); - } - - #[test] - fn investigate_prompt_requires_read_only_diagnosis_steps() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "primary.config.unreadable", - "code": "primary.config.unreadable", - "severity": "error", - "message": "Primary configuration could not be read", - "autoFixable": false, - "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", - "source": "primary" - })]); - let prompt = build_agent_plan_prompt( - PlanKind::Investigate, - "sess-1", - 1, - TargetLocation::RemoteOpenclaw, - "ssh:vm1", - &diagnosis, - &build_config_excerpt_context("{\n ddd\n}"), - &[], - ); - assert!(prompt.contains("read-only")); - assert!(prompt.contains("Do not modify files")); - assert!(prompt.contains("\"planKind\": \"investigate\"")); - assert!(prompt.contains("configParseError")); - } - - #[test] - fn investigate_prompt_discourages_long_running_log_commands() { - let prompt = build_agent_plan_prompt( - PlanKind::Investigate, - "sess-1", - 1, - TargetLocation::RemoteOpenclaw, - "ssh:vm1", - &sample_diagnosis(Vec::new()), - &empty_config_excerpt_context(), - &[], - ); - assert!(prompt.contains("Do not run follow/tail commands")); - assert!(prompt.contains("bounded")); - assert!(prompt.contains("Do not use heredocs")); - } - - #[test] - fn repair_prompt_discourages_unverified_openclaw_subcommands() { - let prompt = build_agent_plan_prompt( - PlanKind::Repair, - "sess-1", - 2, - TargetLocation::RemoteOpenclaw, - "ssh:vm1", - &sample_diagnosis(Vec::new()), - &empty_config_excerpt_context(), - &[], - ); - assert!(prompt.contains("Do not invent OpenClaw subcommands")); - assert!(prompt.contains("Do not use `openclaw auth")); - assert!(prompt.contains("Do not use `openclaw doctor --json`")); - assert!(!prompt.contains("- `openclaw doctor --json`")); - } - - #[test] - fn remote_doctor_agent_id_is_dedicated() { - assert_eq!(remote_doctor_agent_id(), "clawpal-remote-doctor"); - assert!(!remote_doctor_agent_session_key("sess-1").contains("main")); - assert!( - remote_doctor_agent_session_key("sess-1").starts_with("agent:clawpal-remote-doctor:") - ); - } - - #[test] - fn ensure_local_remote_doctor_agent_creates_workspace_bootstrap_files() { - let temp_root = std::env::temp_dir().join(format!( - "clawpal-remote-doctor-agent-test-{}", - Uuid::new_v4() - )); - let home_dir = temp_root.join("home"); - let clawpal_dir = temp_root.join("clawpal"); - let openclaw_dir = home_dir.join(".openclaw"); - std::fs::create_dir_all(&openclaw_dir).expect("create openclaw dir"); - std::fs::create_dir_all(&clawpal_dir).expect("create clawpal dir"); - std::fs::write( - openclaw_dir.join("openclaw.json"), - r#"{ - "gateway": { "port": 18789, "auth": { "token": "gw-test-token" } }, - "agents": { - "defaults": { "model": "openai/gpt-4o-mini" }, - "list": [{ "id": "main", "workspace": "~/.openclaw/workspaces/main" }] - } -} -"#, - ) - .expect("write config"); - - set_active_openclaw_home_override(Some(home_dir.to_string_lossy().to_string())) - .expect("set openclaw override"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal override"); - - let result = ensure_local_remote_doctor_agent_ready(); - - set_active_openclaw_home_override(None).expect("clear openclaw override"); - set_active_clawpal_data_override(None).expect("clear clawpal override"); - - if let Err(error) = &result { - let _ = std::fs::remove_dir_all(&temp_root); - panic!("ensure agent ready: {error}"); - } - - let cfg: Value = serde_json::from_str( - &std::fs::read_to_string(openclaw_dir.join("openclaw.json")).expect("read config"), - ) - .expect("parse config"); - let agent = cfg["agents"]["list"] - .as_array() - .and_then(|agents| { - agents.iter().find(|agent| { - agent.get("id").and_then(Value::as_str) == Some(remote_doctor_agent_id()) - }) - }) - .expect("dedicated agent entry"); - let workspace = agent["workspace"] - .as_str() - .expect("agent workspace") - .replace("~/", &format!("{}/", home_dir.to_string_lossy())); - for file_name in ["IDENTITY.md", "USER.md", "BOOTSTRAP.md", "AGENTS.md"] { - let content = std::fs::read_to_string(std::path::Path::new(&workspace).join(file_name)) - .unwrap_or_else(|error| panic!("read {file_name}: {error}")); - assert!( - !content.trim().is_empty(), - "{file_name} should not be empty" - ); - } - - let _ = std::fs::remove_dir_all(&temp_root); - } - - #[test] - fn only_agent_planner_protocol_requires_bridge() { - assert!(protocol_requires_bridge(RemoteDoctorProtocol::AgentPlanner)); - assert!(!protocol_requires_bridge( - RemoteDoctorProtocol::ClawpalServer - )); - assert!(!protocol_requires_bridge( - RemoteDoctorProtocol::LegacyDoctor - )); - } - - #[test] - fn clawpal_server_protocol_skips_local_rescue_preflight() { - assert!(!protocol_runs_rescue_preflight( - RemoteDoctorProtocol::ClawpalServer - )); - assert!(!protocol_runs_rescue_preflight( - RemoteDoctorProtocol::AgentPlanner - )); - } - - #[test] - fn remote_target_host_id_candidates_include_exact_and_stripped_ids() { - assert_eq!( - remote_target_host_id_candidates("ssh:15-235-214-81"), - vec!["ssh:15-235-214-81".to_string(), "15-235-214-81".to_string()] - ); - assert_eq!( - remote_target_host_id_candidates("e2e-remote-doctor"), - vec!["e2e-remote-doctor".to_string()] - ); - } - - #[test] - fn primary_remote_target_host_id_prefers_exact_instance_id() { - assert_eq!( - primary_remote_target_host_id("ssh:15-235-214-81").unwrap(), - "ssh:15-235-214-81" - ); - } - - #[test] - fn parse_invoke_argv_supports_command_string_payloads() { - let argv = parse_invoke_argv( - "clawpal", - &json!({ - "command": "doctor config-read models.providers.openai" - }), - ) - .expect("parse invoke argv"); - assert_eq!( - argv, - vec![ - "clawpal", - "doctor", - "config-read", - "models.providers.openai" - ] - ); - } - - #[test] - fn plan_commands_treat_clawpal_as_internal_tool() { - assert!(plan_command_uses_internal_clawpal_tool(&[ - "clawpal".to_string(), - "doctor".to_string(), - "config-read".to_string(), - ])); - assert!(!plan_command_uses_internal_clawpal_tool(&[ - "openclaw".to_string(), - "doctor".to_string(), - ])); - } - - #[test] - fn unsupported_openclaw_subcommand_is_rejected_early() { - let error = validate_plan_command_argv(&[ - "openclaw".to_string(), - "auth".to_string(), - "list".to_string(), - ]) - .unwrap_err(); - assert!(error.contains("Unsupported openclaw plan command")); - assert!(error.contains("openclaw auth list")); - } - - #[test] - fn openclaw_doctor_json_is_rejected_early() { - let error = validate_plan_command_argv(&[ - "openclaw".to_string(), - "doctor".to_string(), - "--json".to_string(), - ]) - .unwrap_err(); - assert!(error.contains("Unsupported openclaw plan command")); - assert!(error.contains("openclaw doctor --json")); - } - - #[test] - fn multiline_clawpal_exec_is_rejected_early() { - let error = validate_plan_command_argv(&[ - "clawpal".to_string(), - "doctor".to_string(), - "exec".to_string(), - "--tool".to_string(), - "python3".to_string(), - "--args".to_string(), - "- <<'PY'\nprint('hi')\nPY".to_string(), - ]) - .unwrap_err(); - assert!(error.contains("Unsupported clawpal doctor exec args")); - assert!(error.contains("heredocs")); - } - - #[test] - fn plan_command_failure_message_mentions_command_and_error() { - let error = plan_command_failure_message( - PlanKind::Investigate, - 2, - &[ - "openclaw".to_string(), - "gateway".to_string(), - "logs".to_string(), - ], - "ssh command failed: russh exec timed out after 25s", - ); - assert!(error.contains("Investigate command failed in round 2")); - assert!(error.contains("openclaw gateway logs")); - assert!(error.contains("timed out after 25s")); - } - - fn sample_diagnosis(issues: Vec) -> RescuePrimaryDiagnosisResult { - serde_json::from_value(json!({ - "status": if issues.is_empty() { "healthy" } else { "broken" }, - "checkedAt": "2026-03-18T00:00:00Z", - "targetProfile": "primary", - "rescueProfile": "rescue", - "rescueConfigured": true, - "rescuePort": 18789, - "summary": { - "status": if issues.is_empty() { "healthy" } else { "broken" }, - "headline": if issues.is_empty() { "Healthy" } else { "Broken" }, - "recommendedAction": if issues.is_empty() { "No action needed" } else { "Repair issues" }, - "fixableIssueCount": issues.len(), - "selectedFixIssueIds": issues.iter().filter_map(|issue| issue.get("id").and_then(Value::as_str)).collect::>(), - "rootCauseHypotheses": [], - "fixSteps": [], - "confidence": 0.8, - "citations": [], - "versionAwareness": null - }, - "sections": [], - "checks": [], - "issues": issues - })) - .expect("sample diagnosis") - } - - #[test] - fn diagnosis_issue_summaries_capture_code_severity_and_message() { - let diagnosis = sample_diagnosis(vec![ - json!({ - "id": "gateway.unhealthy", - "code": "gateway.unhealthy", - "severity": "high", - "message": "Gateway is unhealthy", - "autoFixable": true, - "fixHint": "Restart gateway", - "source": "gateway" - }), - json!({ - "id": "providers.base_url", - "code": "invalid.base_url", - "severity": "medium", - "message": "Provider base URL is invalid", - "autoFixable": true, - "fixHint": "Reset baseUrl", - "source": "config" - }), - ]); - - let summary = diagnosis_issue_summaries(&diagnosis); - assert_eq!(summary.len(), 2); - assert_eq!(summary[0]["code"], "gateway.unhealthy"); - assert_eq!(summary[0]["severity"], "high"); - assert_eq!(summary[0]["title"], "Gateway is unhealthy"); - assert_eq!(summary[0]["target"], "gateway"); - assert_eq!(summary[1]["code"], "invalid.base_url"); - } - - #[test] - fn repeated_rediagnose_only_rounds_are_detected_as_stalled() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "providers.base_url", - "code": "invalid.base_url", - "severity": "medium", - "message": "Provider base URL is invalid", - "autoFixable": true, - "fixHint": "Reset baseUrl", - "source": "config" - })]); - let step_types = vec!["doctorRediagnose".to_string()]; - - assert!(!repair_plan_stalled( - &[ - RepairRoundObservation::new(1, &step_types, &diagnosis), - RepairRoundObservation::new(2, &step_types, &diagnosis), - ], - 3, - )); - assert!(repair_plan_stalled( - &[ - RepairRoundObservation::new(1, &step_types, &diagnosis), - RepairRoundObservation::new(2, &step_types, &diagnosis), - RepairRoundObservation::new(3, &step_types, &diagnosis), - ], - 3, - )); - } - - #[test] - fn round_limit_error_message_includes_latest_issues_and_step_types() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "providers.base_url", - "code": "invalid.base_url", - "severity": "medium", - "message": "Provider base URL is invalid", - "autoFixable": true, - "fixHint": "Reset baseUrl", - "source": "config" - })]); - let error = round_limit_error_message(&diagnosis, &["doctorRediagnose".to_string()]); - assert!(error.contains("invalid.base_url")); - assert!(error.contains("doctorRediagnose")); - assert!(error.contains("Provider base URL is invalid")); - } - - #[test] - fn unreadable_config_context_uses_raw_excerpt_and_parse_error() { - let context = build_config_excerpt_context("{\n ddd\n}"); - assert!(context.config_excerpt.is_null()); - assert!(context - .config_excerpt_raw - .as_deref() - .unwrap_or_default() - .contains("ddd")); - assert!(context - .config_parse_error - .as_deref() - .unwrap_or_default() - .contains("key must be a string")); - } - - #[test] - fn unreadable_config_context_summary_marks_excerpt_missing() { - let context = build_config_excerpt_context("{\n ddd\n}"); - let summary = config_excerpt_log_summary(&context); - assert_eq!(summary["configExcerptPresent"], json!(false)); - assert_eq!(summary["configExcerptRawPresent"], json!(true)); - assert!(summary["configParseError"] - .as_str() - .unwrap_or_default() - .contains("key must be a string")); - } - - #[test] - fn config_read_response_returns_raw_context_for_unreadable_json() { - let value = config_read_response("{\n ddd\n}", None).expect("config read response"); - assert!(value["value"].is_null()); - assert!(value["raw"].as_str().unwrap_or_default().contains("ddd")); - assert!(value["parseError"] - .as_str() - .unwrap_or_default() - .contains("key must be a string")); - } - - #[test] - fn decode_base64_config_payload_reads_utf8_text() { - use base64::Engine as _; - let encoded = base64::engine::general_purpose::STANDARD.encode("{\"ok\":true}"); - let decoded = decode_base64_config_payload(&encoded).expect("decode payload"); - assert_eq!(decoded, "{\"ok\":true}"); - } - - #[test] - fn diagnosis_missing_rescue_profile_is_detected() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "rescue.profile.missing", - "code": "rescue.profile.missing", - "severity": "error", - "message": "Rescue profile \"rescue\" is not configured", - "autoFixable": false, - "fixHint": "Activate Rescue Bot first", - "source": "rescue" - })]); - assert!(diagnosis_missing_rescue_profile(&diagnosis)); - } - - #[test] - fn diagnosis_unhealthy_rescue_gateway_is_detected() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "rescue.gateway.unhealthy", - "code": "rescue.gateway.unhealthy", - "severity": "warn", - "message": "Rescue gateway is not healthy", - "autoFixable": false, - "fixHint": "Inspect rescue gateway logs before using failover", - "source": "rescue" - })]); - assert!(diagnosis_unhealthy_rescue_gateway(&diagnosis)); - } - - #[test] - fn rescue_setup_command_result_reports_activation() { - let result = rescue_setup_command_result("activate", "rescue", true, true, "active"); - assert_eq!(result.argv, vec!["manage_rescue_bot", "activate", "rescue"]); - assert_eq!(result.exit_code, Some(0)); - assert!(result.stdout.contains("configured=true")); - assert!(result.stdout.contains("active=true")); - } - - #[test] - fn rescue_setup_activation_error_mentions_runtime_state() { - let error = rescue_activation_error_message( - "rescue", - false, - "configured_inactive", - &[ - "manage_rescue_bot status rescue".to_string(), - "openclaw --profile rescue gateway status".to_string(), - ], - ); - assert!(error.contains("rescue")); - assert!(error.contains("configured_inactive")); - assert!(error.contains("did not become active")); - assert!(error.contains("manage_rescue_bot status rescue")); - assert!(error.contains("openclaw --profile rescue gateway status")); - } - - #[test] - fn rescue_activation_diagnostic_commands_include_status_and_gateway_checks() { - let commands = rescue_activation_diagnostic_commands("rescue"); - let rendered = commands - .iter() - .map(|command| command.join(" ")) - .collect::>(); - assert!(rendered.contains(&"manage_rescue_bot status rescue".to_string())); - assert!(rendered.contains(&"openclaw --profile rescue gateway status".to_string())); - assert!(rendered - .contains(&"openclaw --profile rescue config get gateway.port --json".to_string())); - } - - const E2E_CONTAINER_NAME: &str = "clawpal-e2e-remote-doctor"; - const E2E_SSH_PORT: u16 = 2399; - const E2E_ROOT_PASSWORD: &str = "clawpal-remote-doctor-pass"; - const E2E_DOCKERFILE: &str = r#" -FROM ubuntu:22.04 -ENV DEBIAN_FRONTEND=noninteractive -RUN apt-get update && apt-get install -y openssh-server && rm -rf /var/lib/apt/lists/* && mkdir /var/run/sshd -RUN echo "root:ROOTPASS" | chpasswd && \ - sed -i 's/#PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config && \ - sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config && \ - echo "PasswordAuthentication yes" >> /etc/ssh/sshd_config -RUN mkdir -p /root/.openclaw -RUN cat > /root/.openclaw/openclaw.json <<'EOF' -{ - "gateway": { "port": 18789, "auth": { "token": "gw-test-token" } }, - "auth": { - "profiles": { - "openai-default": { - "provider": "openai", - "apiKey": "sk-test" - } - } - }, - "models": { - "providers": { - "openai": { - "baseUrl": "http://127.0.0.1:9/v1", - "models": [{ "id": "gpt-4o-mini", "name": "gpt-4o-mini" }] - } - } - }, - "agents": { - "defaults": { "model": "openai/gpt-4o-mini" }, - "list": [ { "id": "main", "model": "anthropic/claude-sonnet-4-20250514" } ] - }, - "channels": { - "discord": { - "guilds": { - "guild-1": { - "channels": { - "general": { "model": "openai/gpt-4o-mini" } - } - } - } - } - } -} -EOF -RUN cat > /usr/local/bin/openclaw <<'EOF' && chmod +x /usr/local/bin/openclaw -#!/bin/sh -STATE_DIR="${OPENCLAW_STATE_DIR:-${OPENCLAW_HOME:-$HOME/.openclaw}}" -CONFIG_PATH="$STATE_DIR/openclaw.json" -PROFILE="primary" -if [ "$1" = "--profile" ]; then - PROFILE="$2" - shift 2 -fi -case "$1" in - --version) - echo "openclaw 2026.3.2-test" - ;; - doctor) - if grep -q '127.0.0.1:9/v1' "$CONFIG_PATH"; then - echo '{"ok":false,"score":40,"issues":[{"id":"primary.models.base_url","code":"invalid.base_url","severity":"error","message":"provider baseUrl points to test blackhole","autoFixable":true,"fixHint":"Remove the bad baseUrl override"}]}' - else - echo '{"ok":true,"score":100,"issues":[],"checks":[{"id":"test","status":"ok"}]}' - fi - ;; - agents) - if [ "$2" = "list" ] && [ "$3" = "--json" ]; then - echo '[{"id":"main"}]' - else - echo "unsupported openclaw agents command" >&2 - exit 1 - fi - ;; - models) - if [ "$2" = "list" ] && [ "$3" = "--all" ] && [ "$4" = "--json" ] && [ "$5" = "--no-color" ]; then - echo '{"models":[{"key":"openai/gpt-4o-mini","provider":"openai","id":"gpt-4o-mini","name":"gpt-4o-mini","baseUrl":"https://api.openai.com/v1"}],"providers":{"openai":{"baseUrl":"https://api.openai.com/v1"}}}' - else - echo "unsupported openclaw models command" >&2 - exit 1 - fi - ;; - config) - if [ "$2" = "get" ] && [ "$3" = "gateway.port" ] && [ "$4" = "--json" ]; then - if [ "$PROFILE" = "rescue" ]; then - echo '19789' - else - echo '18789' - fi - else - echo "unsupported openclaw config command: $*" >&2 - exit 1 - fi - ;; - gateway) - case "$2" in - status) - if [ "$PROFILE" = "rescue" ] && [ "${OPENCLAW_RESCUE_GATEWAY_ACTIVE:-1}" != "1" ]; then - echo '{"running":false,"healthy":false,"gateway":{"running":false},"health":{"ok":false}}' - else - echo '{"running":true,"healthy":true,"gateway":{"running":true},"health":{"ok":true}}' - fi - ;; - restart|start|stop) - echo '{"ok":true}' - ;; - *) - echo "unsupported openclaw gateway command: $*" >&2 - exit 1 - ;; - esac - ;; - *) - echo "unsupported openclaw command: $*" >&2 - exit 1 - ;; -esac -EOF -EXPOSE 22 -CMD ["/usr/sbin/sshd", "-D"] -"#; - - fn should_run_docker_e2e() -> bool { - std::env::var("CLAWPAL_RUN_REMOTE_DOCTOR_E2E") - .ok() - .as_deref() - == Some("1") - } - - fn live_gateway_url() -> Option { - std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL") - .ok() - .map(|value| value.trim().to_string()) - .filter(|value| !value.is_empty()) - } - - fn live_gateway_token() -> Option { - std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN") - .ok() - .map(|value| value.trim().to_string()) - .filter(|value| !value.is_empty()) - } - - fn live_gateway_instance_id() -> String { - std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_INSTANCE_ID") - .ok() - .map(|value| value.trim().to_string()) - .filter(|value| !value.is_empty()) - .unwrap_or_else(|| "local".to_string()) - } - - fn live_gateway_target_location() -> TargetLocation { - match std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TARGET_LOCATION") - .ok() - .as_deref() - { - Some("remote_openclaw") => TargetLocation::RemoteOpenclaw, - _ => TargetLocation::LocalOpenclaw, - } - } - - fn live_gateway_protocol() -> String { - std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_PROTOCOL") - .ok() - .map(|value| value.trim().to_string()) - .filter(|value| !value.is_empty()) - .unwrap_or_else(|| "clawpal_server".to_string()) - } - - fn docker_available() -> bool { - Command::new("docker") - .args(["info"]) - .stdout(std::process::Stdio::null()) - .stderr(std::process::Stdio::null()) - .status() - .map(|status| status.success()) - .unwrap_or(false) - } - - fn cleanup_e2e_container() { - let _ = Command::new("docker") - .args(["rm", "-f", E2E_CONTAINER_NAME]) - .stdout(std::process::Stdio::null()) - .stderr(std::process::Stdio::null()) - .status(); - let _ = Command::new("docker") - .args(["rmi", "-f", &format!("{E2E_CONTAINER_NAME}:latest")]) - .stdout(std::process::Stdio::null()) - .stderr(std::process::Stdio::null()) - .status(); - } - - fn build_e2e_image() -> Result<(), String> { - let dockerfile = E2E_DOCKERFILE.replace("ROOTPASS", E2E_ROOT_PASSWORD); - let output = Command::new("docker") - .args([ - "build", - "-t", - &format!("{E2E_CONTAINER_NAME}:latest"), - "-f", - "-", - ".", - ]) - .stdin(std::process::Stdio::piped()) - .stdout(std::process::Stdio::piped()) - .stderr(std::process::Stdio::piped()) - .current_dir(std::env::temp_dir()) - .spawn() - .and_then(|mut child| { - if let Some(ref mut stdin) = child.stdin { - stdin.write_all(dockerfile.as_bytes())?; - } - child.wait_with_output() - }) - .map_err(|error| format!("docker build failed: {error}"))?; - if !output.status.success() { - return Err(String::from_utf8_lossy(&output.stderr).to_string()); - } - Ok(()) - } - - fn start_e2e_container() -> Result<(), String> { - start_e2e_container_with_env(&[]) - } - - fn start_e2e_container_with_env(env: &[(&str, &str)]) -> Result<(), String> { - let mut args = vec![ - "run".to_string(), - "-d".to_string(), - "--name".to_string(), - E2E_CONTAINER_NAME.to_string(), - ]; - for (key, value) in env { - args.push("-e".to_string()); - args.push(format!("{key}={value}")); - } - args.extend([ - "-p".to_string(), - format!("{E2E_SSH_PORT}:22"), - format!("{E2E_CONTAINER_NAME}:latest"), - ]); - let output = Command::new("docker") - .args(&args) - .output() - .map_err(|error| format!("docker run failed: {error}"))?; - if !output.status.success() { - return Err(String::from_utf8_lossy(&output.stderr).to_string()); - } - Ok(()) - } - - fn wait_for_ssh(timeout_secs: u64) -> Result<(), String> { - let start = Instant::now(); - while start.elapsed().as_secs() < timeout_secs { - if TcpStream::connect(format!("127.0.0.1:{E2E_SSH_PORT}")).is_ok() { - std::thread::sleep(std::time::Duration::from_millis(500)); - return Ok(()); - } - std::thread::sleep(std::time::Duration::from_millis(300)); - } - Err("timeout waiting for ssh".into()) - } - - fn e2e_host_config() -> SshHostConfig { - SshHostConfig { - id: "e2e-remote-doctor".into(), - label: "E2E Remote Doctor".into(), - host: "127.0.0.1".into(), - port: E2E_SSH_PORT, - username: "root".into(), - auth_method: "password".into(), - key_path: None, - password: Some(E2E_ROOT_PASSWORD.into()), - passphrase: None, - } - } - - #[tokio::test] - async fn remote_doctor_docker_e2e_loop_completes() { - if !should_run_docker_e2e() { - eprintln!("skip: set CLAWPAL_RUN_REMOTE_DOCTOR_E2E=1 to enable"); - return; - } - if !docker_available() { - eprintln!("skip: docker not available"); - return; - } - - cleanup_e2e_container(); - build_e2e_image().expect("docker build"); - start_e2e_container().expect("docker run"); - struct Cleanup; - impl Drop for Cleanup { - fn drop(&mut self) { - cleanup_e2e_container(); - } - } - let _cleanup = Cleanup; - wait_for_ssh(30).expect("ssh should become available"); - - let temp_root = - std::env::temp_dir().join(format!("clawpal-remote-doctor-e2e-{}", Uuid::new_v4())); - let clawpal_dir = temp_root.join(".clawpal"); - create_dir_all(&clawpal_dir).expect("create clawpal dir"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal data"); - set_active_openclaw_home_override(None).expect("clear openclaw home override"); - - let pool = SshConnectionPool::new(); - let cfg = e2e_host_config(); - pool.connect(&cfg).await.expect("ssh connect"); - - let session_id = Uuid::new_v4().to_string(); - let marker = "/tmp/clawpal-remote-doctor-fixed"; - let result = run_remote_doctor_repair_loop( - Option::<&AppHandle>::None, - &pool, - &session_id, - &format!("ssh:{}", cfg.id), - TargetLocation::RemoteOpenclaw, - |kind, round, previous_results| async move { - match (kind, round) { - (PlanKind::Detect, 1) => Ok(PlanResponse { - plan_id: "detect-1".into(), - plan_kind: PlanKind::Detect, - summary: "Initial detect".into(), - commands: vec![PlanCommand { - argv: vec!["openclaw".into(), "--version".into()], - timeout_sec: Some(10), - purpose: Some("collect version".into()), - continue_on_failure: Some(false), - }], - healthy: false, - done: false, - success: false, - }), - (PlanKind::Repair, 1) => { - assert_eq!(previous_results.len(), 1); - Ok(PlanResponse { - plan_id: "repair-1".into(), - plan_kind: PlanKind::Repair, - summary: "Write marker".into(), - commands: vec![PlanCommand { - argv: vec![ - "sh".into(), - "-lc".into(), - format!("printf 'fixed' > {marker}"), - ], - timeout_sec: Some(10), - purpose: Some("mark repaired".into()), - continue_on_failure: Some(false), - }], - healthy: false, - done: false, - success: false, - }) - } - (PlanKind::Detect, 2) => { - assert_eq!(previous_results.len(), 1); - assert_eq!( - previous_results[0].stdout.trim(), - "", - "repair command should not print to stdout" - ); - Ok(PlanResponse { - plan_id: "detect-2".into(), - plan_kind: PlanKind::Detect, - summary: "Marker exists".into(), - commands: Vec::new(), - healthy: true, - done: true, - success: true, - }) - } - _ => Err(format!( - "unexpected planner request: {:?} round {}", - kind, round - )), - } - }, - ) - .await - .expect("remote doctor loop should complete"); - - assert_eq!(result.status, "completed"); - assert!(result.latest_diagnosis_healthy); - assert_eq!(result.round, 2); - - let marker_result = pool - .exec(&cfg.id, &format!("test -f {marker}")) - .await - .expect("marker check"); - assert_eq!(marker_result.exit_code, 0); - - let log_path = clawpal_dir - .join("doctor") - .join("remote") - .join(format!("{session_id}.jsonl")); - let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor log"); - assert!(log_text.contains("\"planKind\":\"detect\"")); - assert!(log_text.contains("\"planKind\":\"repair\"")); - let _ = std::fs::remove_dir_all(temp_root); - set_active_clawpal_data_override(None).expect("clear clawpal data"); - } - - #[tokio::test] - async fn remote_doctor_docker_e2e_rescue_activation_fails_when_gateway_stays_inactive() { - if !should_run_docker_e2e() { - eprintln!("skip: set CLAWPAL_RUN_REMOTE_DOCTOR_E2E=1 to enable"); - return; - } - if !docker_available() { - eprintln!("skip: docker not available"); - return; - } - - cleanup_e2e_container(); - build_e2e_image().expect("docker build"); - start_e2e_container_with_env(&[("OPENCLAW_RESCUE_GATEWAY_ACTIVE", "0")]) - .expect("docker run"); - struct Cleanup; - impl Drop for Cleanup { - fn drop(&mut self) { - cleanup_e2e_container(); - } - } - let _cleanup = Cleanup; - wait_for_ssh(30).expect("ssh should become available"); - - let app = mock_app(); - let app_handle = app.handle().clone(); - app_handle.manage(SshConnectionPool::new()); - let pool = app_handle.state::(); - let cfg = e2e_host_config(); - pool.connect(&cfg).await.expect("ssh connect"); - - let error = ensure_rescue_profile_ready( - &app_handle, - TargetLocation::RemoteOpenclaw, - &format!("ssh:{}", cfg.id), - ) - .await - .expect_err("rescue activation should fail when gateway remains inactive"); - - assert!(error.message.contains("did not become active")); - assert!(error.message.contains("configured_inactive")); - assert!(error - .diagnostics - .iter() - .any(|result| result.argv.join(" ") == "manage_rescue_bot status rescue")); - } - - #[tokio::test] - async fn remote_doctor_live_gateway_uses_configured_url_and_token() { - let Some(url) = live_gateway_url() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); - return; - }; - let Some(token) = live_gateway_token() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); - return; - }; - - let app = mock_app(); - let app_handle = app.handle().clone(); - app_handle.manage(SshConnectionPool::new()); - let temp_root = - std::env::temp_dir().join(format!("clawpal-remote-doctor-live-{}", Uuid::new_v4())); - let clawpal_dir = temp_root.join(".clawpal"); - create_dir_all(&clawpal_dir).expect("create clawpal dir"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal data"); - - std::fs::write( - clawpal_dir.join("app-preferences.json"), - serde_json::to_string(&json!({ - "remoteDoctorGatewayUrl": url, - "remoteDoctorGatewayAuthToken": token, - })) - .expect("serialize prefs"), - ) - .expect("write app preferences"); - - let gateway = remote_doctor_gateway_config().expect("gateway config"); - assert_eq!(gateway.url, url); - assert_eq!(gateway.auth_token_override.as_deref(), Some(token.as_str())); - - let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref()) - .expect("gateway credentials"); - assert!(creds.is_some(), "expected token override credentials"); - - let client = NodeClient::new(); - client - .connect(&gateway.url, app.handle().clone(), creds) - .await - .expect("connect live remote doctor gateway"); - assert!(client.is_connected().await); - match live_gateway_protocol().as_str() { - "clawpal_server" => { - let response = client - .send_request( - "remote_repair_plan.request", - json!({ - "requestId": format!("live-e2e-{}", Uuid::new_v4()), - "targetId": live_gateway_instance_id(), - "context": { - "configExcerpt": { - "models": { - "providers": { - "openai-codex": { - "baseUrl": "http://127.0.0.1:9/v1" - } - } - } - } - } - }), - ) - .await - .expect("request clawpal-server remote repair plan"); - let plan_id = response - .get("planId") - .and_then(|value| value.as_str()) - .unwrap_or_default(); - assert!( - !plan_id.trim().is_empty(), - "clawpal-server response should include a plan id" - ); - let steps = response - .get("steps") - .and_then(|value| value.as_array()) - .cloned() - .unwrap_or_default(); - assert!( - !steps.is_empty(), - "clawpal-server response should include repair steps" - ); - } - _ => { - let detect_plan = request_plan( - &client, - &detect_method_name(), - PlanKind::Detect, - &format!("live-e2e-{}", Uuid::new_v4()), - 1, - live_gateway_target_location(), - &live_gateway_instance_id(), - &[], - ) - .await - .expect("request live detection plan"); - assert!( - !detect_plan.plan_id.trim().is_empty(), - "live detection plan should include a plan id" - ); - } - } - client.disconnect().await.expect("disconnect"); - - set_active_clawpal_data_override(None).expect("clear clawpal data"); - let _ = std::fs::remove_dir_all(temp_root); - } - - #[tokio::test] - async fn remote_doctor_live_gateway_full_repair_loop_completes() { - let Some(url) = live_gateway_url() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); - return; - }; - let Some(token) = live_gateway_token() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); - return; - }; - if !docker_available() { - eprintln!("skip: docker not available"); - return; - } - - cleanup_e2e_container(); - build_e2e_image().expect("docker build"); - start_e2e_container().expect("docker run"); - struct Cleanup; - impl Drop for Cleanup { - fn drop(&mut self) { - cleanup_e2e_container(); - } - } - let _cleanup = Cleanup; - wait_for_ssh(30).expect("ssh should become available"); - - let app = mock_app(); - let app_handle = app.handle().clone(); - app_handle.manage(SshConnectionPool::new()); - let temp_root = std::env::temp_dir().join(format!( - "clawpal-remote-doctor-live-loop-{}", - Uuid::new_v4() - )); - let clawpal_dir = temp_root.join(".clawpal"); - create_dir_all(&clawpal_dir).expect("create clawpal dir"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal data"); - set_active_openclaw_home_override(None).expect("clear openclaw home override"); - - std::fs::write( - clawpal_dir.join("app-preferences.json"), - serde_json::to_string(&json!({ - "remoteDoctorGatewayUrl": url, - "remoteDoctorGatewayAuthToken": token, - })) - .expect("serialize prefs"), - ) - .expect("write app preferences"); - - let cfg = e2e_host_config(); - let pool = app_handle.state::(); - pool.connect(&cfg).await.expect("ssh connect"); - - let gateway = remote_doctor_gateway_config().expect("gateway config"); - let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref()) - .expect("gateway credentials"); - let client = NodeClient::new(); - client - .connect(&gateway.url, app_handle.clone(), creds) - .await - .expect("connect live remote doctor gateway"); - - let session_id = Uuid::new_v4().to_string(); - let result = run_clawpal_server_repair_loop( - &app_handle, - &client, - &session_id, - &format!("ssh:{}", cfg.id), - TargetLocation::RemoteOpenclaw, - ) - .await - .expect("full live remote doctor repair loop should complete"); - - assert_eq!(result.status, "completed"); - assert!(result.latest_diagnosis_healthy); - - client.disconnect().await.expect("disconnect"); - set_active_clawpal_data_override(None).expect("clear clawpal data"); - let _ = std::fs::remove_dir_all(temp_root); - } - - #[tokio::test] - async fn remote_doctor_live_start_command_remote_target_completes_without_bridge_pairing() { - let Some(url) = live_gateway_url() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); - return; - }; - let Some(token) = live_gateway_token() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); - return; - }; - if !docker_available() { - eprintln!("skip: docker not available"); - return; - } - - cleanup_e2e_container(); - build_e2e_image().expect("docker build"); - start_e2e_container().expect("docker run"); - struct Cleanup; - impl Drop for Cleanup { - fn drop(&mut self) { - cleanup_e2e_container(); - } - } - let _cleanup = Cleanup; - wait_for_ssh(30).expect("ssh should become available"); - - let app = mock_app(); - let app_handle = app.handle().clone(); - app_handle.manage(SshConnectionPool::new()); - let temp_root = std::env::temp_dir().join(format!( - "clawpal-remote-doctor-live-start-{}", - Uuid::new_v4() - )); - let clawpal_dir = temp_root.join(".clawpal"); - create_dir_all(&clawpal_dir).expect("create clawpal dir"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal data"); - set_active_openclaw_home_override(None).expect("clear openclaw home override"); - - std::fs::write( - clawpal_dir.join("app-preferences.json"), - serde_json::to_string(&json!({ - "remoteDoctorGatewayUrl": url, - "remoteDoctorGatewayAuthToken": token, - })) - .expect("serialize prefs"), - ) - .expect("write app preferences"); - - let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); - let pool = app_handle.state::(); - - let result = start_remote_doctor_repair_impl( - app_handle.clone(), - &pool, - format!("ssh:{}", cfg.id), - "remote_openclaw".to_string(), - ) - .await - .expect("start command should complete remote repair"); - - assert_eq!(result.status, "completed"); - assert!(result.latest_diagnosis_healthy); - - let log_path = clawpal_dir - .join("doctor") - .join("remote") - .join(format!("{}.jsonl", result.session_id)); - let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor session log"); - assert!( - !log_text.contains("\"event\":\"bridge_connect_failed\""), - "clawpal_server path should not attempt bridge pairing: {log_text}" - ); - - set_active_clawpal_data_override(None).expect("clear clawpal data"); - let _ = std::fs::remove_dir_all(temp_root); - } - - #[tokio::test] - async fn remote_doctor_live_gateway_repairs_unreadable_remote_config() { - let Some(url) = live_gateway_url() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); - return; - }; - let Some(token) = live_gateway_token() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); - return; - }; - if !docker_available() { - eprintln!("skip: docker not available"); - return; - } - - cleanup_e2e_container(); - build_e2e_image().expect("docker build"); - start_e2e_container().expect("docker run"); - struct Cleanup; - impl Drop for Cleanup { - fn drop(&mut self) { - cleanup_e2e_container(); - } - } - let _cleanup = Cleanup; - wait_for_ssh(30).expect("ssh should become available"); - - let app = mock_app(); - let app_handle = app.handle().clone(); - app_handle.manage(SshConnectionPool::new()); - let temp_root = std::env::temp_dir().join(format!( - "clawpal-remote-doctor-live-raw-config-{}", - Uuid::new_v4() - )); - let clawpal_dir = temp_root.join(".clawpal"); - create_dir_all(&clawpal_dir).expect("create clawpal dir"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal data"); - set_active_openclaw_home_override(None).expect("clear openclaw home override"); - - std::fs::write( - clawpal_dir.join("app-preferences.json"), - serde_json::to_string(&json!({ - "remoteDoctorGatewayUrl": url, - "remoteDoctorGatewayAuthToken": token, - })) - .expect("serialize prefs"), - ) - .expect("write app preferences"); - - let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); - let pool = app_handle.state::(); - pool.connect(&cfg).await.expect("ssh connect"); - pool.exec_login( - &cfg.id, - "cat > ~/.openclaw/openclaw.json <<'EOF'\n{\n ddd\n}\nEOF", - ) - .await - .expect("corrupt remote config"); - - let result = start_remote_doctor_repair_impl( - app_handle.clone(), - &pool, - cfg.id.clone(), - "remote_openclaw".to_string(), - ) - .await - .expect("start command should repair unreadable config"); - - assert_eq!(result.status, "completed"); - assert!(result.latest_diagnosis_healthy); - - let repaired = pool - .exec_login(&cfg.id, "python3 - <<'PY'\nimport json, pathlib\njson.load(open(pathlib.Path.home()/'.openclaw'/'openclaw.json'))\nprint('ok')\nPY") - .await - .expect("read repaired config"); - assert_eq!( - repaired.exit_code, 0, - "repaired config should be valid JSON: {}", - repaired.stderr - ); - assert_eq!(repaired.stdout.trim(), "ok"); - - set_active_clawpal_data_override(None).expect("clear clawpal data"); - let _ = std::fs::remove_dir_all(temp_root); - } -} diff --git a/src-tauri/src/remote_doctor/config.rs b/src-tauri/src/remote_doctor/config.rs index b2b7170b..a244dda7 100644 --- a/src-tauri/src/remote_doctor/config.rs +++ b/src-tauri/src/remote_doctor/config.rs @@ -1 +1,323 @@ -// Placeholder for remote doctor configuration and target I/O helpers. +use std::fs::create_dir_all; +use std::path::PathBuf; + +use ed25519_dalek::pkcs8::EncodePrivateKey; +use ed25519_dalek::SigningKey; +use serde_json::{json, Value}; +use sha2::{Digest, Sha256}; + +use super::session::append_session_log; +use super::types::{ + diagnosis_issue_summaries, ConfigExcerptContext, PlanKind, StoredRemoteDoctorIdentity, +}; +use crate::commands::preferences::load_app_preferences_from_paths; +use crate::commands::RescuePrimaryDiagnosisResult; +use crate::models::resolve_paths; +use crate::node_client::GatewayCredentials; + +const DEFAULT_GATEWAY_HOST: &str = "127.0.0.1"; +const DEFAULT_GATEWAY_PORT: u16 = 18789; + +#[derive(Debug, Clone)] +pub(crate) struct RemoteDoctorGatewayConfig { + pub(crate) url: String, + pub(crate) auth_token_override: Option, +} + +pub(crate) fn load_gateway_config() -> Result { + let paths = resolve_paths(); + let app_preferences = load_app_preferences_from_paths(&paths); + if let Some(url) = app_preferences.remote_doctor_gateway_url { + return Ok(RemoteDoctorGatewayConfig { + url, + auth_token_override: app_preferences.remote_doctor_gateway_auth_token, + }); + } + let configured_port = std::fs::read_to_string(&paths.config_path) + .ok() + .and_then(|text| serde_json::from_str::(&text).ok()) + .and_then(|config| { + config + .get("gateway") + .and_then(|gateway| gateway.get("port")) + .and_then(|value| value.as_u64()) + }) + .map(|value| value as u16) + .unwrap_or(DEFAULT_GATEWAY_PORT); + Ok(RemoteDoctorGatewayConfig { + url: format!("ws://{DEFAULT_GATEWAY_HOST}:{configured_port}"), + auth_token_override: app_preferences.remote_doctor_gateway_auth_token, + }) +} + +pub(crate) fn build_gateway_credentials( + auth_token_override: Option<&str>, +) -> Result, String> { + let Some(token) = auth_token_override.filter(|value| !value.trim().is_empty()) else { + return Ok(None); + }; + let identity = load_or_create_remote_doctor_identity()?; + Ok(Some(GatewayCredentials { + token: token.to_string(), + device_id: identity.device_id, + private_key_pem: identity.private_key_pem, + })) +} + +pub(crate) fn remote_doctor_identity_path() -> PathBuf { + resolve_paths() + .clawpal_dir + .join("remote-doctor") + .join("device-identity.json") +} + +pub(crate) fn load_or_create_remote_doctor_identity() -> Result { + let path = remote_doctor_identity_path(); + if let Ok(text) = std::fs::read_to_string(&path) { + if let Ok(identity) = serde_json::from_str::(&text) { + if identity.version == 1 + && !identity.device_id.trim().is_empty() + && !identity.private_key_pem.trim().is_empty() + { + return Ok(identity); + } + } + } + + let parent = path + .parent() + .ok_or("Failed to resolve remote doctor identity directory")?; + create_dir_all(parent) + .map_err(|e| format!("Failed to create remote doctor identity dir: {e}"))?; + + let mut secret = [0u8; 32]; + getrandom::getrandom(&mut secret) + .map_err(|e| format!("Failed to generate remote doctor device secret: {e}"))?; + let signing_key = SigningKey::from_bytes(&secret); + let raw_public = signing_key.verifying_key().to_bytes(); + let device_id = Sha256::digest(raw_public) + .iter() + .map(|b| format!("{b:02x}")) + .collect::(); + let private_key_pem = signing_key + .to_pkcs8_pem(Default::default()) + .map_err(|e| format!("Failed to encode remote doctor private key: {e}"))? + .to_string(); + let created_at_ms = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .map_err(|e| format!("Failed to get system time: {e}"))? + .as_millis() as u64; + let identity = StoredRemoteDoctorIdentity { + version: 1, + created_at_ms, + device_id, + private_key_pem, + }; + let text = serde_json::to_string_pretty(&identity) + .map_err(|e| format!("Failed to serialize remote doctor identity: {e}"))?; + std::fs::write(&path, format!("{text}\n")) + .map_err(|e| format!("Failed to persist remote doctor identity: {e}"))?; + Ok(identity) +} + +pub(crate) fn build_config_excerpt_context(raw: &str) -> ConfigExcerptContext { + match serde_json::from_str::(raw) { + Ok(config_excerpt) => ConfigExcerptContext { + config_excerpt, + config_excerpt_raw: None, + config_parse_error: None, + }, + Err(error) => ConfigExcerptContext { + config_excerpt: Value::Null, + config_excerpt_raw: Some(raw.to_string()), + config_parse_error: Some(format!("Failed to parse target config: {error}")), + }, + } +} + +pub(crate) fn config_excerpt_log_summary(context: &ConfigExcerptContext) -> Value { + json!({ + "configExcerptPresent": !context.config_excerpt.is_null(), + "configExcerptBytes": serde_json::to_string(&context.config_excerpt).ok().map(|text| text.len()).unwrap_or(0), + "configExcerptRawPresent": context.config_excerpt_raw.as_ref().map(|text| !text.trim().is_empty()).unwrap_or(false), + "configExcerptRawBytes": context.config_excerpt_raw.as_ref().map(|text| text.len()).unwrap_or(0), + "configParseError": context.config_parse_error, + }) +} + +pub(crate) fn empty_config_excerpt_context() -> ConfigExcerptContext { + ConfigExcerptContext { + config_excerpt: Value::Null, + config_excerpt_raw: None, + config_parse_error: None, + } +} + +pub(crate) fn empty_diagnosis() -> RescuePrimaryDiagnosisResult { + serde_json::from_value(json!({ + "status": "healthy", + "checkedAt": "2026-03-18T00:00:00Z", + "targetProfile": "primary", + "rescueProfile": "rescue", + "summary": { + "status": "healthy", + "headline": "Healthy", + "recommendedAction": null, + "fixableIssueCount": 0, + "selectedFixIssueIds": [] + }, + "issues": [], + "sections": [] + })) + .expect("empty diagnosis should deserialize") +} + +pub(crate) fn diagnosis_has_only_non_auto_fixable_issues( + diagnosis: &RescuePrimaryDiagnosisResult, +) -> bool { + !diagnosis.issues.is_empty() && diagnosis.issues.iter().all(|issue| !issue.auto_fixable) +} + +pub(crate) fn diagnosis_is_healthy(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { + diagnosis.status == "healthy" + && diagnosis.summary.status == "healthy" + && diagnosis.issues.is_empty() +} + +pub(crate) fn diagnosis_context(diagnosis: &RescuePrimaryDiagnosisResult) -> Value { + json!({ + "status": diagnosis.status, + "summary": { + "status": diagnosis.summary.status, + "headline": diagnosis.summary.headline, + "recommendedAction": diagnosis.summary.recommended_action, + "fixableIssueCount": diagnosis.summary.fixable_issue_count, + "selectedFixIssueIds": diagnosis.summary.selected_fix_issue_ids, + }, + "issues": diagnosis.issues, + "sections": diagnosis.sections, + }) +} + +pub(crate) fn diagnosis_missing_rescue_profile(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { + diagnosis + .issues + .iter() + .any(|issue| issue.code == "rescue.profile.missing") +} + +pub(crate) fn diagnosis_unhealthy_rescue_gateway( + diagnosis: &RescuePrimaryDiagnosisResult, +) -> bool { + diagnosis + .issues + .iter() + .any(|issue| issue.code == "rescue.gateway.unhealthy") +} + +pub(crate) fn append_diagnosis_log( + session_id: &str, + stage: &str, + round: usize, + diagnosis: &RescuePrimaryDiagnosisResult, +) { + append_session_log( + session_id, + json!({ + "event": "diagnosis_result", + "stage": stage, + "round": round, + "status": diagnosis.status, + "summaryStatus": diagnosis.summary.status, + "headline": diagnosis.summary.headline, + "recommendedAction": diagnosis.summary.recommended_action, + "issueCount": diagnosis.issues.len(), + "issues": diagnosis_issue_summaries(diagnosis), + }), + ); +} + +#[cfg(test)] +mod tests { + use std::sync::{Mutex, OnceLock}; + + use super::*; + use crate::cli_runner::set_active_clawpal_data_override; + + fn override_lock() -> &'static Mutex<()> { + static LOCK: OnceLock> = OnceLock::new(); + LOCK.get_or_init(|| Mutex::new(())) + } + + #[test] + fn load_gateway_config_prefers_app_preferences() { + let _guard = override_lock().lock().expect("lock override state"); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-config-pref-test-{}", + uuid::Uuid::new_v4() + )); + let clawpal_dir = temp_root.join(".clawpal"); + let openclaw_dir = temp_root.join(".openclaw"); + std::fs::create_dir_all(&clawpal_dir).expect("create clawpal dir"); + std::fs::create_dir_all(&openclaw_dir).expect("create openclaw dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal override"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": "ws://example.test:9999", + "remoteDoctorGatewayAuthToken": "abc", + })) + .expect("serialize prefs"), + ) + .expect("write prefs"); + + let config = load_gateway_config().expect("load gateway config"); + assert_eq!(config.url, "ws://example.test:9999"); + assert_eq!(config.auth_token_override.as_deref(), Some("abc")); + + set_active_clawpal_data_override(None).expect("clear clawpal override"); + let _ = std::fs::remove_dir_all(&temp_root); + } + + #[test] + fn build_gateway_credentials_returns_none_for_empty_override() { + let result = build_gateway_credentials(Some(" ")).expect("build credentials"); + assert!(result.is_none()); + } + + #[test] + fn load_or_create_remote_doctor_identity_persists_usable_identity() { + let _guard = override_lock().lock().expect("lock override state"); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-identity-test-{}", + uuid::Uuid::new_v4() + )); + let clawpal_dir = temp_root.join(".clawpal"); + std::fs::create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal override"); + + let identity = load_or_create_remote_doctor_identity().expect("create identity"); + assert_eq!(identity.version, 1); + assert!(!identity.device_id.is_empty()); + assert!(!identity.private_key_pem.is_empty()); + assert!(remote_doctor_identity_path().exists()); + + set_active_clawpal_data_override(None).expect("clear clawpal override"); + let _ = std::fs::remove_dir_all(&temp_root); + } + + #[test] + fn build_config_excerpt_context_records_parse_errors() { + let context = build_config_excerpt_context("{\n ddd\n}"); + assert!(context.config_excerpt.is_null()); + assert!(context.config_excerpt_raw.is_some()); + assert!(context + .config_parse_error + .as_deref() + .unwrap_or_default() + .contains("Failed to parse target config")); + } +} diff --git a/src-tauri/src/remote_doctor/legacy.rs b/src-tauri/src/remote_doctor/legacy.rs index e4b689b3..2b2bfd56 100644 --- a/src-tauri/src/remote_doctor/legacy.rs +++ b/src-tauri/src/remote_doctor/legacy.rs @@ -5,13 +5,20 @@ use std::process::Command; use std::time::Instant; use base64::Engine; -use ed25519_dalek::pkcs8::EncodePrivateKey; -use ed25519_dalek::SigningKey; use serde_json::{json, Value}; -use sha2::{Digest, Sha256}; use tauri::{AppHandle, Manager, Runtime, State}; use uuid::Uuid; +use super::config::{ + append_diagnosis_log, build_config_excerpt_context, + build_gateway_credentials as remote_doctor_gateway_credentials, + config_excerpt_log_summary, diagnosis_context, + diagnosis_has_only_non_auto_fixable_issues, diagnosis_is_healthy, + diagnosis_missing_rescue_profile, diagnosis_unhealthy_rescue_gateway, empty_config_excerpt_context, + empty_diagnosis, load_gateway_config as remote_doctor_gateway_config, + load_or_create_remote_doctor_identity, remote_doctor_identity_path, + RemoteDoctorGatewayConfig, +}; use super::session::{ append_session_log as append_remote_doctor_log, emit_session_progress as emit_progress, result_for_completion, @@ -26,7 +33,6 @@ use super::types::{ use crate::bridge_client::BridgeClient; use crate::cli_runner::{get_active_openclaw_home_override, run_openclaw, run_openclaw_remote}; use crate::commands::logs::log_dev; -use crate::commands::preferences::load_app_preferences_from_paths; use crate::commands::{agent::create_agent, agent::setup_agent_identity}; use crate::commands::{ diagnose_primary_via_rescue, manage_rescue_bot, read_raw_config, @@ -35,119 +41,15 @@ use crate::commands::{ }; use crate::config_io::read_openclaw_config; use crate::models::resolve_paths; -use crate::node_client::{GatewayCredentials, NodeClient}; +use crate::node_client::NodeClient; use crate::ssh::SshConnectionPool; -const DEFAULT_GATEWAY_HOST: &str = "127.0.0.1"; -const DEFAULT_GATEWAY_PORT: u16 = 18789; const DEFAULT_DETECT_METHOD: &str = "doctor.get_detection_plan"; const DEFAULT_REPAIR_METHOD: &str = "doctor.get_repair_plan"; const MAX_REMOTE_DOCTOR_ROUNDS: usize = 50; const REPAIR_PLAN_STALL_THRESHOLD: usize = 3; const REMOTE_DOCTOR_AGENT_ID: &str = "clawpal-remote-doctor"; -#[derive(Debug, Clone)] -struct RemoteDoctorGatewayConfig { - url: String, - auth_token_override: Option, -} - -fn remote_doctor_gateway_config() -> Result { - let paths = resolve_paths(); - let app_preferences = load_app_preferences_from_paths(&paths); - if let Some(url) = app_preferences.remote_doctor_gateway_url { - return Ok(RemoteDoctorGatewayConfig { - url, - auth_token_override: app_preferences.remote_doctor_gateway_auth_token, - }); - } - let configured_port = std::fs::read_to_string(&paths.config_path) - .ok() - .and_then(|text| serde_json::from_str::(&text).ok()) - .and_then(|config| { - config - .get("gateway") - .and_then(|gateway| gateway.get("port")) - .and_then(|value| value.as_u64()) - }) - .map(|value| value as u16) - .unwrap_or(DEFAULT_GATEWAY_PORT); - Ok(RemoteDoctorGatewayConfig { - url: format!("ws://{DEFAULT_GATEWAY_HOST}:{configured_port}"), - auth_token_override: app_preferences.remote_doctor_gateway_auth_token, - }) -} - -fn remote_doctor_gateway_credentials( - auth_token_override: Option<&str>, -) -> Result, String> { - let Some(token) = auth_token_override.filter(|value| !value.trim().is_empty()) else { - return Ok(None); - }; - let identity = load_or_create_remote_doctor_identity()?; - Ok(Some(GatewayCredentials { - token: token.to_string(), - device_id: identity.device_id, - private_key_pem: identity.private_key_pem, - })) -} - -fn remote_doctor_identity_path() -> PathBuf { - resolve_paths() - .clawpal_dir - .join("remote-doctor") - .join("device-identity.json") -} - -fn load_or_create_remote_doctor_identity() -> Result { - let path = remote_doctor_identity_path(); - if let Ok(text) = std::fs::read_to_string(&path) { - if let Ok(identity) = serde_json::from_str::(&text) { - if identity.version == 1 - && !identity.device_id.trim().is_empty() - && !identity.private_key_pem.trim().is_empty() - { - return Ok(identity); - } - } - } - - let parent = path - .parent() - .ok_or("Failed to resolve remote doctor identity directory")?; - create_dir_all(parent) - .map_err(|e| format!("Failed to create remote doctor identity dir: {e}"))?; - - let mut secret = [0u8; 32]; - getrandom::getrandom(&mut secret) - .map_err(|e| format!("Failed to generate remote doctor device secret: {e}"))?; - let signing_key = SigningKey::from_bytes(&secret); - let raw_public = signing_key.verifying_key().to_bytes(); - let device_id = Sha256::digest(raw_public) - .iter() - .map(|b| format!("{b:02x}")) - .collect::(); - let private_key_pem = signing_key - .to_pkcs8_pem(Default::default()) - .map_err(|e| format!("Failed to encode remote doctor private key: {e}"))? - .to_string(); - let created_at_ms = std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .map_err(|e| format!("Failed to get system time: {e}"))? - .as_millis() as u64; - let identity = StoredRemoteDoctorIdentity { - version: 1, - created_at_ms, - device_id, - private_key_pem, - }; - let text = serde_json::to_string_pretty(&identity) - .map_err(|e| format!("Failed to serialize remote doctor identity: {e}"))?; - std::fs::write(&path, format!("{text}\n")) - .map_err(|e| format!("Failed to persist remote doctor identity: {e}"))?; - Ok(identity) -} - fn detect_method_name() -> String { std::env::var("CLAWPAL_REMOTE_DOCTOR_DETECT_METHOD") .unwrap_or_else(|_| DEFAULT_DETECT_METHOD.to_string()) @@ -357,10 +259,6 @@ fn is_unknown_method_error(error: &str) -> bool { || error.contains("\"code\": \"INVALID_REQUEST\"") } -fn diagnosis_has_only_non_auto_fixable_issues(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { - !diagnosis.issues.is_empty() && diagnosis.issues.iter().all(|issue| !issue.auto_fixable) -} - async fn run_rescue_diagnosis( app: &AppHandle, target_location: TargetLocation, @@ -411,58 +309,6 @@ async fn read_target_config_raw( } } -fn build_config_excerpt_context(raw: &str) -> ConfigExcerptContext { - match serde_json::from_str::(raw) { - Ok(config_excerpt) => ConfigExcerptContext { - config_excerpt, - config_excerpt_raw: None, - config_parse_error: None, - }, - Err(error) => ConfigExcerptContext { - config_excerpt: Value::Null, - config_excerpt_raw: Some(raw.to_string()), - config_parse_error: Some(format!("Failed to parse target config: {error}")), - }, - } -} - -fn config_excerpt_log_summary(context: &ConfigExcerptContext) -> Value { - json!({ - "configExcerptPresent": !context.config_excerpt.is_null(), - "configExcerptBytes": serde_json::to_string(&context.config_excerpt).ok().map(|text| text.len()).unwrap_or(0), - "configExcerptRawPresent": context.config_excerpt_raw.as_ref().map(|text| !text.trim().is_empty()).unwrap_or(false), - "configExcerptRawBytes": context.config_excerpt_raw.as_ref().map(|text| text.len()).unwrap_or(0), - "configParseError": context.config_parse_error, - }) -} - -fn empty_config_excerpt_context() -> ConfigExcerptContext { - ConfigExcerptContext { - config_excerpt: Value::Null, - config_excerpt_raw: None, - config_parse_error: None, - } -} - -fn empty_diagnosis() -> RescuePrimaryDiagnosisResult { - serde_json::from_value(json!({ - "status": "healthy", - "checkedAt": "2026-03-18T00:00:00Z", - "targetProfile": "primary", - "rescueProfile": "rescue", - "summary": { - "status": "healthy", - "headline": "Healthy", - "recommendedAction": null, - "fixableIssueCount": 0, - "selectedFixIssueIds": [] - }, - "issues": [], - "sections": [] - })) - .expect("empty diagnosis should deserialize") -} - async fn write_target_config( app: &AppHandle, target_location: TargetLocation, @@ -529,41 +375,6 @@ async fn restart_target_gateway( Ok(()) } -fn diagnosis_is_healthy(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { - diagnosis.status == "healthy" - && diagnosis.summary.status == "healthy" - && diagnosis.issues.is_empty() -} - -fn diagnosis_context(diagnosis: &RescuePrimaryDiagnosisResult) -> Value { - json!({ - "status": diagnosis.status, - "summary": { - "status": diagnosis.summary.status, - "headline": diagnosis.summary.headline, - "recommendedAction": diagnosis.summary.recommended_action, - "fixableIssueCount": diagnosis.summary.fixable_issue_count, - "selectedFixIssueIds": diagnosis.summary.selected_fix_issue_ids, - }, - "issues": diagnosis.issues, - "sections": diagnosis.sections, - }) -} - -fn diagnosis_missing_rescue_profile(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { - diagnosis - .issues - .iter() - .any(|issue| issue.code == "rescue.profile.missing") -} - -fn diagnosis_unhealthy_rescue_gateway(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { - diagnosis - .issues - .iter() - .any(|issue| issue.code == "rescue.gateway.unhealthy") -} - fn rescue_setup_command_result( action: &str, profile: &str, @@ -903,28 +714,6 @@ async fn repair_rescue_gateway_if_needed( Ok(()) } -fn append_diagnosis_log( - session_id: &str, - stage: &str, - round: usize, - diagnosis: &RescuePrimaryDiagnosisResult, -) { - append_remote_doctor_log( - session_id, - json!({ - "event": "diagnosis_result", - "stage": stage, - "round": round, - "status": diagnosis.status, - "summaryStatus": diagnosis.summary.status, - "headline": diagnosis.summary.headline, - "recommendedAction": diagnosis.summary.recommended_action, - "issueCount": diagnosis.issues.len(), - "issues": diagnosis_issue_summaries(diagnosis), - }), - ); -} - fn clawpal_server_step_type_summary(steps: &[ClawpalServerPlanStep]) -> Value { let mut counts = serde_json::Map::new(); for step in steps { From 413bbf5a77fe065ca3def2822462ea3883cc8366 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 15:12:17 +0800 Subject: [PATCH 09/20] refactor: extract remote doctor config and agent helpers --- src-tauri/src/remote_doctor/agent.rs | 357 +++++++++++++++++++++++- src-tauri/src/remote_doctor/config.rs | 197 +++++++++++++- src-tauri/src/remote_doctor/legacy.rs | 375 +------------------------- 3 files changed, 564 insertions(+), 365 deletions(-) diff --git a/src-tauri/src/remote_doctor/agent.rs b/src-tauri/src/remote_doctor/agent.rs index 540f4356..e5fa1464 100644 --- a/src-tauri/src/remote_doctor/agent.rs +++ b/src-tauri/src/remote_doctor/agent.rs @@ -1 +1,356 @@ -// Placeholder for agent-planner-specific helpers. +use std::fs::create_dir_all; +use std::path::PathBuf; + +use serde_json::{json, Value}; + +use super::config::diagnosis_context; +use super::types::{CommandResult, ConfigExcerptContext, PlanKind, RemoteDoctorProtocol, TargetLocation}; +use crate::commands::{agent::create_agent, agent::setup_agent_identity, RescuePrimaryDiagnosisResult}; +use crate::config_io::read_openclaw_config; +use crate::models::resolve_paths; + +const DEFAULT_DETECT_METHOD: &str = "doctor.get_detection_plan"; +const DEFAULT_REPAIR_METHOD: &str = "doctor.get_repair_plan"; +const REMOTE_DOCTOR_AGENT_ID: &str = "clawpal-remote-doctor"; + +pub(crate) fn detect_method_name() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_DETECT_METHOD") + .unwrap_or_else(|_| DEFAULT_DETECT_METHOD.to_string()) +} + +pub(crate) fn repair_method_name() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_REPAIR_METHOD") + .unwrap_or_else(|_| DEFAULT_REPAIR_METHOD.to_string()) +} + +pub(crate) fn configured_remote_doctor_protocol() -> Option { + match std::env::var("CLAWPAL_REMOTE_DOCTOR_PROTOCOL") + .ok() + .as_deref() + .map(str::trim) + { + Some("agent") => Some(RemoteDoctorProtocol::AgentPlanner), + Some("legacy") | Some("legacy_doctor") => Some(RemoteDoctorProtocol::LegacyDoctor), + Some("clawpal_server") => Some(RemoteDoctorProtocol::ClawpalServer), + _ => None, + } +} + +pub(crate) fn default_remote_doctor_protocol() -> RemoteDoctorProtocol { + RemoteDoctorProtocol::AgentPlanner +} + +pub(crate) fn protocol_requires_bridge(protocol: RemoteDoctorProtocol) -> bool { + matches!(protocol, RemoteDoctorProtocol::AgentPlanner) +} + +pub(crate) fn protocol_runs_rescue_preflight(protocol: RemoteDoctorProtocol) -> bool { + matches!(protocol, RemoteDoctorProtocol::LegacyDoctor) +} + +pub(crate) fn next_agent_plan_kind(diagnosis: &RescuePrimaryDiagnosisResult) -> PlanKind { + next_agent_plan_kind_for_round(diagnosis, &[]) +} + +pub(crate) fn next_agent_plan_kind_for_round( + diagnosis: &RescuePrimaryDiagnosisResult, + previous_results: &[CommandResult], +) -> PlanKind { + if diagnosis + .issues + .iter() + .any(|issue| issue.code == "primary.config.unreadable") + { + if !previous_results.is_empty() { + return PlanKind::Repair; + } + PlanKind::Investigate + } else { + PlanKind::Repair + } +} + +pub(crate) fn remote_doctor_agent_id() -> &'static str { + REMOTE_DOCTOR_AGENT_ID +} + +pub(crate) fn remote_doctor_agent_session_key(session_id: &str) -> String { + format!("agent:{}:{session_id}", remote_doctor_agent_id()) +} + +fn agent_workspace_bootstrap_files() -> [(&'static str, &'static str); 4] { + [ + ( + "AGENTS.md", + "# Remote Doctor\nUse this workspace only for ClawPal remote doctor planning sessions.\nReturn structured, operational answers.\n", + ), + ( + "BOOTSTRAP.md", + "Bootstrap is already complete for this workspace.\nDo not ask who you are or who the user is.\nUse IDENTITY.md and USER.md as the canonical identity context.\n", + ), + ( + "USER.md", + "- Name: ClawPal Desktop\n- Role: desktop repair orchestrator\n- Preferences: concise, operational, no bootstrap chatter\n", + ), + ( + "HEARTBEAT.md", + "Status: active remote-doctor planning workspace.\n", + ), + ] +} + +pub(crate) fn gateway_url_is_local(url: &str) -> bool { + let rest = url + .split_once("://") + .map(|(_, remainder)| remainder) + .unwrap_or(url); + let host_port = rest.split('/').next().unwrap_or(rest); + let host = host_port + .strip_prefix('[') + .and_then(|value| value.split_once(']').map(|(host, _)| host)) + .unwrap_or_else(|| host_port.split(':').next().unwrap_or(host_port)); + matches!(host, "127.0.0.1" | "localhost") +} + +pub(crate) fn ensure_agent_workspace_ready() -> Result<(), String> { + let agent_id = remote_doctor_agent_id().to_string(); + if let Err(error) = create_agent(agent_id.clone(), None, Some(true)) { + if !error.contains("already exists") { + return Err(format!("Failed to create remote doctor agent: {error}")); + } + } + + setup_agent_identity(agent_id.clone(), "ClawPal Remote Doctor".to_string(), None)?; + + let paths = resolve_paths(); + let cfg = read_openclaw_config(&paths)?; + let workspace = + clawpal_core::doctor::resolve_agent_workspace_from_config(&cfg, &agent_id, None) + .map(|path| shellexpand::tilde(&path).to_string())?; + create_dir_all(&workspace) + .map_err(|error| format!("Failed to create remote doctor workspace: {error}"))?; + + for (file_name, content) in agent_workspace_bootstrap_files() { + std::fs::write(PathBuf::from(&workspace).join(file_name), content) + .map_err(|error| format!("Failed to write remote doctor {file_name}: {error}"))?; + } + + Ok(()) +} + +pub(crate) fn build_agent_plan_prompt( + kind: PlanKind, + session_id: &str, + round: usize, + target_location: TargetLocation, + instance_id: &str, + diagnosis: &RescuePrimaryDiagnosisResult, + config_context: &ConfigExcerptContext, + previous_results: &[CommandResult], +) -> String { + let kind_label = match kind { + PlanKind::Detect => "detection", + PlanKind::Investigate => "investigation", + PlanKind::Repair => "repair", + }; + let target_label = match target_location { + TargetLocation::LocalOpenclaw => "local_openclaw", + TargetLocation::RemoteOpenclaw => "remote_openclaw", + }; + let diagnosis_json = + serde_json::to_string_pretty(&diagnosis_context(diagnosis)).unwrap_or_else(|_| "{}".into()); + let config_context_json = serde_json::to_string_pretty(&json!({ + "configExcerpt": config_context.config_excerpt, + "configExcerptRaw": config_context.config_excerpt_raw, + "configParseError": config_context.config_parse_error, + })) + .unwrap_or_else(|_| "{}".into()); + let previous_results_json = + serde_json::to_string_pretty(previous_results).unwrap_or_else(|_| "[]".into()); + let phase_rules = match kind { + PlanKind::Detect => "For detection plans, gather only the commands needed to confirm current state. Set healthy=true and done=true only when no issue remains.", + PlanKind::Investigate => "For investigation plans, return read-only diagnosis steps only. Do not modify files, delete files, overwrite config, or restart services. Prefer commands that inspect, validate, backup, or print evidence for why the config is unreadable. Do not run follow/tail commands, streaming log readers, or any unbounded command; every investigation command must be bounded and return promptly. Do not use heredocs, multiline scripts, or commands that wait on stdin. Prefer single-line commands over shell scripting.", + PlanKind::Repair => "For repair plans, return the minimal safe repair commands. Reference prior investigation evidence when config is unreadable. Back up the file before changing it and include validation/rediagnosis steps as needed. Do not invent OpenClaw subcommands. Use only the verified OpenClaw commands listed below or the `clawpal doctor ...` tools. Do not use `openclaw auth ...` commands. Do not use `openclaw doctor --json`; use `clawpal doctor probe-openclaw` or `clawpal doctor exec --tool doctor` instead. Do not use heredocs, multiline scripts, or commands that wait on stdin.", + }; + format!( + "Identity bootstrap for this session:\n\ +- Your name: ClawPal Remote Doctor\n\ +- Your creature: maintenance daemon\n\ +- Your vibe: direct, terse, operational\n\ +- Your emoji: none\n\ +- The user is: ClawPal desktop app\n\ +- The user timezone is: Asia/Shanghai\n\ +- Do not ask identity/bootstrap questions.\n\ +- Do not ask who you are or who the user is.\n\ +- Do not modify IDENTITY.md, USER.md, or workspace bootstrap files.\n\ +\n\ +You are ClawPal Remote Doctor planner.\n\ +Return ONLY one JSON object and no markdown.\n\ +Task: produce the next {kind_label} plan for OpenClaw.\n\ +Session: {session_id}\n\ +Round: {round}\n\ +Target location: {target_label}\n\ +Instance id: {instance_id}\n\ +Diagnosis JSON:\n{diagnosis_json}\n\n\ +Config context JSON:\n{config_context_json}\n\n\ +Previous command results JSON:\n{previous_results_json}\n\n\ +Available gateway tools:\n\ +- `clawpal doctor probe-openclaw`\n\ +- `clawpal doctor config-read [path]`\n\ +- `clawpal doctor config-read-raw`\n\ +- `clawpal doctor config-upsert `\n\ +- `clawpal doctor config-delete `\n\ +- `clawpal doctor config-write-raw-base64 `\n\ +- `clawpal doctor exec --tool [--args ]`\n\ +- Verified direct OpenClaw commands only:\n\ + - `openclaw --version`\n\ + - `openclaw gateway status`\n\ +You may invoke these tools before answering when you need fresh diagnostics or config state.\n\ +If you already have enough information, return the JSON plan directly.\n\n\ +Return this exact JSON schema:\n\ +{{\n \"planId\": \"string\",\n \"planKind\": \"{kind}\",\n \"summary\": \"string\",\n \"commands\": [{{\"argv\": [\"cmd\"], \"timeoutSec\": 60, \"purpose\": \"why\", \"continueOnFailure\": false}}],\n \"healthy\": false,\n \"done\": false,\n \"success\": false\n}}\n\ +Rules:\n\ +- {phase_rules}\n\ +- For repair plans, return shell/openclaw commands in commands.\n\ +- Keep commands empty when no command is needed.\n\ +- Output valid JSON only.", + kind = match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + } + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::cli_runner::{set_active_clawpal_data_override, set_active_openclaw_home_override}; + use crate::commands::{RescuePrimaryDiagnosisResult, RescuePrimaryIssue, RescuePrimarySummary}; + + #[test] + fn default_remote_doctor_protocol_prefers_agent() { + assert_eq!( + default_remote_doctor_protocol(), + RemoteDoctorProtocol::AgentPlanner + ); + } + + #[test] + fn unreadable_config_switches_to_repair_after_investigation_results_exist() { + let diagnosis = sample_diagnosis(vec![RescuePrimaryIssue { + id: "issue-1".into(), + code: "primary.config.unreadable".into(), + severity: "error".into(), + message: "Primary configuration could not be read".into(), + auto_fixable: false, + fix_hint: Some("Repair".into()), + source: "primary".into(), + }]); + let previous_results = vec![CommandResult { + argv: vec!["clawpal".into(), "doctor".into(), "config-read-raw".into()], + exit_code: Some(0), + stdout: "{}".into(), + stderr: String::new(), + duration_ms: 1, + timed_out: false, + }]; + assert_eq!( + next_agent_plan_kind_for_round(&diagnosis, &previous_results), + PlanKind::Repair + ); + } + + #[test] + fn build_agent_plan_prompt_mentions_target_and_schema() { + let prompt = build_agent_plan_prompt( + PlanKind::Repair, + "sess-1", + 3, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &sample_diagnosis(Vec::new()), + &ConfigExcerptContext { + config_excerpt: json!({"ok": true}), + config_excerpt_raw: None, + config_parse_error: None, + }, + &[], + ); + assert!(prompt.contains("Task: produce the next repair plan")); + assert!(prompt.contains("Target location: remote_openclaw")); + assert!(prompt.contains("\"planKind\": \"repair\"")); + assert!(prompt.contains("\"configExcerpt\"")); + } + + #[test] + fn remote_doctor_agent_id_is_dedicated() { + assert_eq!(remote_doctor_agent_id(), "clawpal-remote-doctor"); + assert!(!remote_doctor_agent_session_key("sess-1").contains("main")); + } + + #[test] + fn ensure_agent_workspace_ready_creates_workspace_bootstrap_files() { + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-agent-test-{}", + uuid::Uuid::new_v4() + )); + let home_dir = temp_root.join("home"); + let clawpal_dir = temp_root.join("clawpal"); + let openclaw_dir = home_dir.join(".openclaw"); + std::fs::create_dir_all(&openclaw_dir).expect("create openclaw dir"); + std::fs::create_dir_all(&clawpal_dir).expect("create clawpal dir"); + std::fs::write( + openclaw_dir.join("openclaw.json"), + r#"{ + "gateway": { "port": 18789, "auth": { "token": "gw-test-token" } }, + "agents": { + "defaults": { "model": "openai/gpt-4o-mini" }, + "list": [{ "id": "main", "workspace": "~/.openclaw/workspaces/main" }] + } +} +"#, + ) + .expect("write config"); + set_active_openclaw_home_override(Some(home_dir.to_string_lossy().to_string())) + .expect("set openclaw override"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal override"); + + let result = ensure_agent_workspace_ready(); + + set_active_openclaw_home_override(None).expect("clear openclaw override"); + set_active_clawpal_data_override(None).expect("clear clawpal override"); + if let Err(error) = &result { + let _ = std::fs::remove_dir_all(&temp_root); + panic!("ensure agent ready: {error}"); + } + let _ = std::fs::remove_dir_all(&temp_root); + } + + fn sample_diagnosis(issues: Vec) -> RescuePrimaryDiagnosisResult { + RescuePrimaryDiagnosisResult { + status: "degraded".to_string(), + checked_at: "2026-03-19T00:00:00Z".to_string(), + target_profile: "primary".to_string(), + rescue_profile: "rescue".to_string(), + rescue_configured: true, + rescue_port: Some(18789), + summary: RescuePrimarySummary { + status: "degraded".to_string(), + headline: "Issues found".to_string(), + recommended_action: "Repair".to_string(), + fixable_issue_count: 0, + selected_fix_issue_ids: Vec::new(), + root_cause_hypotheses: Vec::new(), + fix_steps: Vec::new(), + confidence: None, + citations: Vec::new(), + version_awareness: None, + }, + sections: Vec::new(), + checks: Vec::new(), + issues, + } + } +} diff --git a/src-tauri/src/remote_doctor/config.rs b/src-tauri/src/remote_doctor/config.rs index a244dda7..dc08becf 100644 --- a/src-tauri/src/remote_doctor/config.rs +++ b/src-tauri/src/remote_doctor/config.rs @@ -5,15 +5,21 @@ use ed25519_dalek::pkcs8::EncodePrivateKey; use ed25519_dalek::SigningKey; use serde_json::{json, Value}; use sha2::{Digest, Sha256}; +use tauri::{AppHandle, Manager, Runtime}; use super::session::append_session_log; use super::types::{ - diagnosis_issue_summaries, ConfigExcerptContext, PlanKind, StoredRemoteDoctorIdentity, + diagnosis_issue_summaries, ConfigExcerptContext, TargetLocation, StoredRemoteDoctorIdentity, }; use crate::commands::preferences::load_app_preferences_from_paths; -use crate::commands::RescuePrimaryDiagnosisResult; +use crate::commands::{ + diagnose_primary_via_rescue, read_raw_config, remote_diagnose_primary_via_rescue, + remote_read_raw_config, remote_restart_gateway, remote_write_raw_config, restart_gateway, + RescuePrimaryDiagnosisResult, +}; use crate::models::resolve_paths; use crate::node_client::GatewayCredentials; +use crate::ssh::SshConnectionPool; const DEFAULT_GATEWAY_HOST: &str = "127.0.0.1"; const DEFAULT_GATEWAY_PORT: u16 = 18789; @@ -237,12 +243,150 @@ pub(crate) fn append_diagnosis_log( ); } +pub(crate) fn remote_target_host_id_candidates(instance_id: &str) -> Vec { + let mut candidates = Vec::new(); + let trimmed = instance_id.trim(); + if !trimmed.is_empty() { + candidates.push(trimmed.to_string()); + } + if let Some(stripped) = trimmed.strip_prefix("ssh:").map(str::trim) { + if !stripped.is_empty() && !candidates.iter().any(|value| value == stripped) { + candidates.push(stripped.to_string()); + } + } + candidates +} + +pub(crate) fn primary_remote_target_host_id(instance_id: &str) -> Result { + remote_target_host_id_candidates(instance_id) + .into_iter() + .next() + .ok_or_else(|| "Remote Doctor repair requires an ssh instance id".to_string()) +} + +pub(crate) async fn run_rescue_diagnosis( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result { + match target_location { + TargetLocation::LocalOpenclaw => diagnose_primary_via_rescue(None, None).await, + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_diagnose_primary_via_rescue( + app.state::(), + host_id, + None, + None, + ) + .await + } + } +} + +pub(crate) async fn read_target_config( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result { + let raw = match target_location { + TargetLocation::LocalOpenclaw => read_raw_config()?, + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_read_raw_config(app.state::(), host_id).await? + } + }; + serde_json::from_str::(&raw) + .map_err(|error| format!("Failed to parse target config: {error}")) +} + +pub(crate) async fn read_target_config_raw( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result { + match target_location { + TargetLocation::LocalOpenclaw => read_raw_config(), + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_read_raw_config(app.state::(), host_id).await + } + } +} + +pub(crate) async fn write_target_config( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, + config: &Value, +) -> Result<(), String> { + let text = serde_json::to_string_pretty(config).map_err(|error| error.to_string())?; + let validated = clawpal_core::config::validate_config_json(&text) + .map_err(|error| format!("Invalid config after remote doctor patch: {error}"))?; + let validated_text = + serde_json::to_string_pretty(&validated).map_err(|error| error.to_string())?; + match target_location { + TargetLocation::LocalOpenclaw => { + let paths = resolve_paths(); + crate::config_io::write_text(&paths.config_path, &validated_text)?; + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_write_raw_config(app.state::(), host_id, validated_text) + .await?; + } + } + Ok(()) +} + +pub(crate) async fn write_target_config_raw( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, + text: &str, +) -> Result<(), String> { + let validated = clawpal_core::config::validate_config_json(text) + .map_err(|error| format!("Invalid raw config payload: {error}"))?; + let validated_text = + serde_json::to_string_pretty(&validated).map_err(|error| error.to_string())?; + match target_location { + TargetLocation::LocalOpenclaw => { + let paths = resolve_paths(); + crate::config_io::write_text(&paths.config_path, &validated_text)?; + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_write_raw_config(app.state::(), host_id, validated_text) + .await?; + } + } + Ok(()) +} + +pub(crate) async fn restart_target_gateway( + app: &AppHandle, + target_location: TargetLocation, + instance_id: &str, +) -> Result<(), String> { + match target_location { + TargetLocation::LocalOpenclaw => restart_gateway().await.map(|_| ()), + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + remote_restart_gateway(app.state::(), host_id) + .await + .map(|_| ()) + } + } +} + #[cfg(test)] mod tests { use std::sync::{Mutex, OnceLock}; use super::*; - use crate::cli_runner::set_active_clawpal_data_override; + use crate::cli_runner::{ + set_active_clawpal_data_override, set_active_openclaw_home_override, + }; fn override_lock() -> &'static Mutex<()> { static LOCK: OnceLock> = OnceLock::new(); @@ -320,4 +464,51 @@ mod tests { .unwrap_or_default() .contains("Failed to parse target config")); } + + #[test] + fn remote_target_host_id_candidates_include_exact_and_stripped_ids() { + assert_eq!( + remote_target_host_id_candidates("ssh:15-235-214-81"), + vec!["ssh:15-235-214-81".to_string(), "15-235-214-81".to_string()] + ); + assert_eq!( + remote_target_host_id_candidates("e2e-remote-doctor"), + vec!["e2e-remote-doctor".to_string()] + ); + } + + #[test] + fn primary_remote_target_host_id_prefers_exact_instance_id() { + assert_eq!( + primary_remote_target_host_id("ssh:15-235-214-81").unwrap(), + "ssh:15-235-214-81" + ); + } + + #[tokio::test] + async fn read_target_config_raw_returns_current_file_contents() { + let _guard = override_lock().lock().expect("lock override state"); + let app = tauri::test::mock_app(); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-read-config-test-{}", + uuid::Uuid::new_v4() + )); + let openclaw_home = temp_root.join("home"); + let openclaw_dir = openclaw_home.join(".openclaw"); + std::fs::create_dir_all(&openclaw_dir).expect("create openclaw dir"); + set_active_openclaw_home_override(Some(openclaw_home.to_string_lossy().to_string())) + .expect("set openclaw override"); + let raw = "{\n \"ok\": true\n}"; + std::fs::write(openclaw_dir.join("openclaw.json"), raw).expect("write config"); + + let result = + read_target_config_raw(&app.handle().clone(), TargetLocation::LocalOpenclaw, "") + .await + .expect("read raw config"); + + set_active_openclaw_home_override(None).expect("clear openclaw override"); + let _ = std::fs::remove_dir_all(&temp_root); + + assert!(result.contains("\"ok\": true")); + } } diff --git a/src-tauri/src/remote_doctor/legacy.rs b/src-tauri/src/remote_doctor/legacy.rs index 2b2bfd56..e39e4fdb 100644 --- a/src-tauri/src/remote_doctor/legacy.rs +++ b/src-tauri/src/remote_doctor/legacy.rs @@ -14,10 +14,19 @@ use super::config::{ build_gateway_credentials as remote_doctor_gateway_credentials, config_excerpt_log_summary, diagnosis_context, diagnosis_has_only_non_auto_fixable_issues, diagnosis_is_healthy, - diagnosis_missing_rescue_profile, diagnosis_unhealthy_rescue_gateway, empty_config_excerpt_context, - empty_diagnosis, load_gateway_config as remote_doctor_gateway_config, - load_or_create_remote_doctor_identity, remote_doctor_identity_path, - RemoteDoctorGatewayConfig, + diagnosis_missing_rescue_profile, diagnosis_unhealthy_rescue_gateway, + empty_config_excerpt_context, empty_diagnosis, + load_gateway_config as remote_doctor_gateway_config, primary_remote_target_host_id, + read_target_config, read_target_config_raw, remote_target_host_id_candidates, + restart_target_gateway, run_rescue_diagnosis, write_target_config, + write_target_config_raw, RemoteDoctorGatewayConfig, +}; +use super::agent::{ + build_agent_plan_prompt, configured_remote_doctor_protocol, default_remote_doctor_protocol, + detect_method_name, ensure_agent_workspace_ready as ensure_local_remote_doctor_agent_ready, + gateway_url_is_local, next_agent_plan_kind, next_agent_plan_kind_for_round, + protocol_requires_bridge, protocol_runs_rescue_preflight, remote_doctor_agent_id, + remote_doctor_agent_session_key, repair_method_name, }; use super::session::{ append_session_log as append_remote_doctor_log, @@ -33,147 +42,12 @@ use super::types::{ use crate::bridge_client::BridgeClient; use crate::cli_runner::{get_active_openclaw_home_override, run_openclaw, run_openclaw_remote}; use crate::commands::logs::log_dev; -use crate::commands::{agent::create_agent, agent::setup_agent_identity}; -use crate::commands::{ - diagnose_primary_via_rescue, manage_rescue_bot, read_raw_config, - remote_diagnose_primary_via_rescue, remote_manage_rescue_bot, remote_read_raw_config, - remote_restart_gateway, remote_write_raw_config, restart_gateway, RescuePrimaryDiagnosisResult, -}; -use crate::config_io::read_openclaw_config; -use crate::models::resolve_paths; +use crate::commands::{manage_rescue_bot, remote_manage_rescue_bot, RescuePrimaryDiagnosisResult}; use crate::node_client::NodeClient; use crate::ssh::SshConnectionPool; -const DEFAULT_DETECT_METHOD: &str = "doctor.get_detection_plan"; -const DEFAULT_REPAIR_METHOD: &str = "doctor.get_repair_plan"; const MAX_REMOTE_DOCTOR_ROUNDS: usize = 50; const REPAIR_PLAN_STALL_THRESHOLD: usize = 3; -const REMOTE_DOCTOR_AGENT_ID: &str = "clawpal-remote-doctor"; - -fn detect_method_name() -> String { - std::env::var("CLAWPAL_REMOTE_DOCTOR_DETECT_METHOD") - .unwrap_or_else(|_| DEFAULT_DETECT_METHOD.to_string()) -} - -fn repair_method_name() -> String { - std::env::var("CLAWPAL_REMOTE_DOCTOR_REPAIR_METHOD") - .unwrap_or_else(|_| DEFAULT_REPAIR_METHOD.to_string()) -} - -fn configured_remote_doctor_protocol() -> Option { - match std::env::var("CLAWPAL_REMOTE_DOCTOR_PROTOCOL") - .ok() - .as_deref() - .map(str::trim) - { - Some("agent") => Some(RemoteDoctorProtocol::AgentPlanner), - Some("legacy") | Some("legacy_doctor") => Some(RemoteDoctorProtocol::LegacyDoctor), - Some("clawpal_server") => Some(RemoteDoctorProtocol::ClawpalServer), - _ => None, - } -} - -fn default_remote_doctor_protocol() -> RemoteDoctorProtocol { - RemoteDoctorProtocol::AgentPlanner -} - -fn protocol_requires_bridge(protocol: RemoteDoctorProtocol) -> bool { - matches!(protocol, RemoteDoctorProtocol::AgentPlanner) -} - -fn protocol_runs_rescue_preflight(protocol: RemoteDoctorProtocol) -> bool { - matches!(protocol, RemoteDoctorProtocol::LegacyDoctor) -} - -fn next_agent_plan_kind(diagnosis: &RescuePrimaryDiagnosisResult) -> PlanKind { - next_agent_plan_kind_for_round(diagnosis, &[]) -} - -fn next_agent_plan_kind_for_round( - diagnosis: &RescuePrimaryDiagnosisResult, - previous_results: &[CommandResult], -) -> PlanKind { - if diagnosis - .issues - .iter() - .any(|issue| issue.code == "primary.config.unreadable") - { - if !previous_results.is_empty() { - return PlanKind::Repair; - } - PlanKind::Investigate - } else { - PlanKind::Repair - } -} - -fn remote_doctor_agent_id() -> &'static str { - REMOTE_DOCTOR_AGENT_ID -} - -fn remote_doctor_agent_session_key(session_id: &str) -> String { - format!("agent:{}:{session_id}", remote_doctor_agent_id()) -} - -fn remote_doctor_agent_workspace_files() -> [(&'static str, &'static str); 4] { - [ - ( - "AGENTS.md", - "# Remote Doctor\nUse this workspace only for ClawPal remote doctor planning sessions.\nReturn structured, operational answers.\n", - ), - ( - "BOOTSTRAP.md", - "Bootstrap is already complete for this workspace.\nDo not ask who you are or who the user is.\nUse IDENTITY.md and USER.md as the canonical identity context.\n", - ), - ( - "USER.md", - "- Name: ClawPal Desktop\n- Role: desktop repair orchestrator\n- Preferences: concise, operational, no bootstrap chatter\n", - ), - ( - "HEARTBEAT.md", - "Status: active remote-doctor planning workspace.\n", - ), - ] -} - -fn gateway_url_is_local(url: &str) -> bool { - let rest = url - .split_once("://") - .map(|(_, remainder)| remainder) - .unwrap_or(url); - let host_port = rest.split('/').next().unwrap_or(rest); - let host = host_port - .strip_prefix('[') - .and_then(|value| value.split_once(']').map(|(host, _)| host)) - .unwrap_or_else(|| host_port.split(':').next().unwrap_or(host_port)); - matches!(host, "127.0.0.1" | "localhost") -} - -fn ensure_local_remote_doctor_agent_ready() -> Result<(), String> { - let agent_id = remote_doctor_agent_id().to_string(); - if let Err(error) = create_agent(agent_id.clone(), None, Some(true)) { - if !error.contains("already exists") { - return Err(format!("Failed to create remote doctor agent: {error}")); - } - } - - setup_agent_identity(agent_id.clone(), "ClawPal Remote Doctor".to_string(), None)?; - - let paths = resolve_paths(); - let cfg = read_openclaw_config(&paths)?; - let workspace = - clawpal_core::doctor::resolve_agent_workspace_from_config(&cfg, &agent_id, None) - .map(|path| shellexpand::tilde(&path).to_string())?; - create_dir_all(&workspace) - .map_err(|error| format!("Failed to create remote doctor workspace: {error}"))?; - - for (file_name, content) in remote_doctor_agent_workspace_files() { - std::fs::write(PathBuf::from(&workspace).join(file_name), content) - .map_err(|error| format!("Failed to write remote doctor {file_name}: {error}"))?; - } - - Ok(()) -} async fn ensure_agent_bridge_connected( app: &AppHandle, @@ -232,149 +106,12 @@ async fn ensure_remote_target_connected( } } -fn remote_target_host_id_candidates(instance_id: &str) -> Vec { - let mut candidates = Vec::new(); - let trimmed = instance_id.trim(); - if !trimmed.is_empty() { - candidates.push(trimmed.to_string()); - } - if let Some(stripped) = trimmed.strip_prefix("ssh:").map(str::trim) { - if !stripped.is_empty() && !candidates.iter().any(|value| value == stripped) { - candidates.push(stripped.to_string()); - } - } - candidates -} - -fn primary_remote_target_host_id(instance_id: &str) -> Result { - remote_target_host_id_candidates(instance_id) - .into_iter() - .next() - .ok_or_else(|| "Remote Doctor repair requires an ssh instance id".to_string()) -} - fn is_unknown_method_error(error: &str) -> bool { error.contains("unknown method") || error.contains("\"code\":\"INVALID_REQUEST\"") || error.contains("\"code\": \"INVALID_REQUEST\"") } -async fn run_rescue_diagnosis( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, -) -> Result { - match target_location { - TargetLocation::LocalOpenclaw => diagnose_primary_via_rescue(None, None).await, - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_diagnose_primary_via_rescue( - app.state::(), - host_id, - None, - None, - ) - .await - } - } -} - -async fn read_target_config( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, -) -> Result { - let raw = match target_location { - TargetLocation::LocalOpenclaw => read_raw_config()?, - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_read_raw_config(app.state::(), host_id).await? - } - }; - serde_json::from_str::(&raw) - .map_err(|error| format!("Failed to parse target config: {error}")) -} - -async fn read_target_config_raw( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, -) -> Result { - match target_location { - TargetLocation::LocalOpenclaw => read_raw_config(), - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_read_raw_config(app.state::(), host_id).await - } - } -} - -async fn write_target_config( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, - config: &Value, -) -> Result<(), String> { - let text = serde_json::to_string_pretty(config).map_err(|error| error.to_string())?; - let validated = clawpal_core::config::validate_config_json(&text) - .map_err(|error| format!("Invalid config after remote doctor patch: {error}"))?; - let validated_text = - serde_json::to_string_pretty(&validated).map_err(|error| error.to_string())?; - match target_location { - TargetLocation::LocalOpenclaw => { - let paths = resolve_paths(); - crate::config_io::write_text(&paths.config_path, &validated_text)?; - } - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_write_raw_config(app.state::(), host_id, validated_text) - .await?; - } - } - Ok(()) -} - -async fn write_target_config_raw( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, - text: &str, -) -> Result<(), String> { - let validated = clawpal_core::config::validate_config_json(text) - .map_err(|error| format!("Invalid raw config payload: {error}"))?; - let validated_text = - serde_json::to_string_pretty(&validated).map_err(|error| error.to_string())?; - match target_location { - TargetLocation::LocalOpenclaw => { - let paths = resolve_paths(); - crate::config_io::write_text(&paths.config_path, &validated_text)?; - } - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_write_raw_config(app.state::(), host_id, validated_text) - .await?; - } - } - Ok(()) -} - -async fn restart_target_gateway( - app: &AppHandle, - target_location: TargetLocation, - instance_id: &str, -) -> Result<(), String> { - match target_location { - TargetLocation::LocalOpenclaw => { - restart_gateway().await?; - } - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - remote_restart_gateway(app.state::(), host_id).await?; - } - } - Ok(()) -} - fn rescue_setup_command_result( action: &str, profile: &str, @@ -829,90 +566,6 @@ fn extract_json_block(text: &str) -> Option<&str> { clawpal_core::doctor::extract_json_from_output(text) } -fn build_agent_plan_prompt( - kind: PlanKind, - session_id: &str, - round: usize, - target_location: TargetLocation, - instance_id: &str, - diagnosis: &RescuePrimaryDiagnosisResult, - config_context: &ConfigExcerptContext, - previous_results: &[CommandResult], -) -> String { - let kind_label = match kind { - PlanKind::Detect => "detection", - PlanKind::Investigate => "investigation", - PlanKind::Repair => "repair", - }; - let target_label = match target_location { - TargetLocation::LocalOpenclaw => "local_openclaw", - TargetLocation::RemoteOpenclaw => "remote_openclaw", - }; - let diagnosis_json = - serde_json::to_string_pretty(&diagnosis_context(diagnosis)).unwrap_or_else(|_| "{}".into()); - let config_context_json = serde_json::to_string_pretty(&json!({ - "configExcerpt": config_context.config_excerpt, - "configExcerptRaw": config_context.config_excerpt_raw, - "configParseError": config_context.config_parse_error, - })) - .unwrap_or_else(|_| "{}".into()); - let previous_results_json = - serde_json::to_string_pretty(previous_results).unwrap_or_else(|_| "[]".into()); - let phase_rules = match kind { - PlanKind::Detect => "For detection plans, gather only the commands needed to confirm current state. Set healthy=true and done=true only when no issue remains.", - PlanKind::Investigate => "For investigation plans, return read-only diagnosis steps only. Do not modify files, delete files, overwrite config, or restart services. Prefer commands that inspect, validate, backup, or print evidence for why the config is unreadable. Do not run follow/tail commands, streaming log readers, or any unbounded command; every investigation command must be bounded and return promptly. Do not use heredocs, multiline scripts, or commands that wait on stdin. Prefer single-line commands over shell scripting.", - PlanKind::Repair => "For repair plans, return the minimal safe repair commands. Reference prior investigation evidence when config is unreadable. Back up the file before changing it and include validation/rediagnosis steps as needed. Do not invent OpenClaw subcommands. Use only the verified OpenClaw commands listed below or the `clawpal doctor ...` tools. Do not use `openclaw auth ...` commands. Do not use `openclaw doctor --json`; use `clawpal doctor probe-openclaw` or `clawpal doctor exec --tool doctor` instead. Do not use heredocs, multiline scripts, or commands that wait on stdin.", - }; - format!( - "Identity bootstrap for this session:\n\ -- Your name: ClawPal Remote Doctor\n\ -- Your creature: maintenance daemon\n\ -- Your vibe: direct, terse, operational\n\ -- Your emoji: none\n\ -- The user is: ClawPal desktop app\n\ -- The user timezone is: Asia/Shanghai\n\ -- Do not ask identity/bootstrap questions.\n\ -- Do not ask who you are or who the user is.\n\ -- Do not modify IDENTITY.md, USER.md, or workspace bootstrap files.\n\ -\n\ -You are ClawPal Remote Doctor planner.\n\ -Return ONLY one JSON object and no markdown.\n\ -Task: produce the next {kind_label} plan for OpenClaw.\n\ -Session: {session_id}\n\ -Round: {round}\n\ -Target location: {target_label}\n\ -Instance id: {instance_id}\n\ -Diagnosis JSON:\n{diagnosis_json}\n\n\ -Config context JSON:\n{config_context_json}\n\n\ -Previous command results JSON:\n{previous_results_json}\n\n\ -Available gateway tools:\n\ -- `clawpal doctor probe-openclaw`\n\ -- `clawpal doctor config-read [path]`\n\ -- `clawpal doctor config-read-raw`\n\ -- `clawpal doctor config-upsert `\n\ -- `clawpal doctor config-delete `\n\ -- `clawpal doctor config-write-raw-base64 `\n\ -- `clawpal doctor exec --tool [--args ]`\n\ -- Verified direct OpenClaw commands only:\n\ - - `openclaw --version`\n\ - - `openclaw gateway status`\n\ -You may invoke these tools before answering when you need fresh diagnostics or config state.\n\ -If you already have enough information, return the JSON plan directly.\n\n\ -Return this exact JSON schema:\n\ -{{\n \"planId\": \"string\",\n \"planKind\": \"{kind}\",\n \"summary\": \"string\",\n \"commands\": [{{\"argv\": [\"cmd\"], \"timeoutSec\": 60, \"purpose\": \"why\", \"continueOnFailure\": false}}],\n \"healthy\": false,\n \"done\": false,\n \"success\": false\n}}\n\ -Rules:\n\ -- {phase_rules}\n\ -- For repair plans, return shell/openclaw commands in commands.\n\ -- Keep commands empty when no command is needed.\n\ -- Output valid JSON only.", - kind = match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - } - ) -} - fn parse_agent_plan_response(kind: PlanKind, text: &str) -> Result { let json_block = extract_json_block(text) .ok_or_else(|| format!("Remote doctor agent did not return JSON: {text}"))?; From 1e5cbcf75d503b28eefdfb861d9fc88126046f20 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 15:18:17 +0800 Subject: [PATCH 10/20] refactor: extract remote doctor plan helpers --- src-tauri/src/remote_doctor/legacy.rs | 650 +----------------------- src-tauri/src/remote_doctor/plan.rs | 678 +++++++++++++++++++++++++- 2 files changed, 686 insertions(+), 642 deletions(-) diff --git a/src-tauri/src/remote_doctor/legacy.rs b/src-tauri/src/remote_doctor/legacy.rs index e39e4fdb..27ec68b5 100644 --- a/src-tauri/src/remote_doctor/legacy.rs +++ b/src-tauri/src/remote_doctor/legacy.rs @@ -28,6 +28,15 @@ use super::agent::{ protocol_requires_bridge, protocol_runs_rescue_preflight, remote_doctor_agent_id, remote_doctor_agent_session_key, repair_method_name, }; +use super::plan::{ + agent_plan_step_types, apply_config_set, apply_config_unset, command_result_stdout, + config_read_response, decode_base64_config_payload, execute_clawpal_command, execute_command, + execute_invoke_payload, execute_plan_command, parse_invoke_argv, parse_plan_response, + plan_command_failure_message, plan_command_uses_internal_clawpal_tool, + report_clawpal_server_final_result, report_clawpal_server_step_result, request_clawpal_server_plan, + request_plan, shell_escape, validate_clawpal_exec_args, validate_plan_command_argv, + build_shell_command, +}; use super::session::{ append_session_log as append_remote_doctor_log, emit_session_progress as emit_progress, result_for_completion, @@ -509,59 +518,6 @@ fn stalled_plan_error_message(observation: &RepairRoundObservation) -> String { ) } -fn ensure_object(value: &mut Value) -> Result<&mut serde_json::Map, String> { - if !value.is_object() { - *value = json!({}); - } - value - .as_object_mut() - .ok_or_else(|| "Expected object while applying remote doctor config step".to_string()) -} - -fn apply_config_set(root: &mut Value, path: &str, value: Value) -> Result<(), String> { - let segments = path - .split('.') - .filter(|segment| !segment.trim().is_empty()) - .collect::>(); - if segments.is_empty() { - return Err("Config set path cannot be empty".into()); - } - let mut cursor = root; - for segment in &segments[..segments.len() - 1] { - let object = ensure_object(cursor)?; - cursor = object - .entry((*segment).to_string()) - .or_insert_with(|| json!({})); - } - let object = ensure_object(cursor)?; - object.insert(segments[segments.len() - 1].to_string(), value); - Ok(()) -} - -fn apply_config_unset(root: &mut Value, path: &str) -> Result<(), String> { - let segments = path - .split('.') - .filter(|segment| !segment.trim().is_empty()) - .collect::>(); - if segments.is_empty() { - return Err("Config unset path cannot be empty".into()); - } - let mut cursor = root; - for segment in &segments[..segments.len() - 1] { - let Some(next) = cursor - .as_object_mut() - .and_then(|object| object.get_mut(*segment)) - else { - return Ok(()); - }; - cursor = next; - } - if let Some(object) = cursor.as_object_mut() { - object.remove(segments[segments.len() - 1]); - } - Ok(()) -} - fn extract_json_block(text: &str) -> Option<&str> { clawpal_core::doctor::extract_json_from_output(text) } @@ -574,256 +530,6 @@ fn parse_agent_plan_response(kind: PlanKind, text: &str) -> Result Result, String> { - if let Some(argv) = args.get("argv").and_then(Value::as_array) { - let parsed = argv - .iter() - .map(|value| { - value - .as_str() - .map(str::to_string) - .ok_or_else(|| "invoke argv entries must be strings".to_string()) - }) - .collect::, _>>()?; - if parsed.is_empty() { - return Err("invoke argv cannot be empty".into()); - } - return Ok(parsed); - } - - let arg_string = args - .get("args") - .and_then(Value::as_str) - .or_else(|| args.get("command").and_then(Value::as_str)) - .unwrap_or(""); - let mut parsed = if arg_string.trim().is_empty() { - Vec::new() - } else { - shell_words::split(arg_string) - .map_err(|error| format!("Failed to parse invoke args: {error}"))? - }; - if parsed.first().map(String::as_str) != Some(command) { - parsed.insert(0, command.to_string()); - } - Ok(parsed) -} - -async fn execute_clawpal_command( - app: &AppHandle, - pool: &SshConnectionPool, - target_location: TargetLocation, - instance_id: &str, - argv: &[String], -) -> Result { - match argv.get(1).map(String::as_str) { - Some("doctor") => { - execute_clawpal_doctor_command(app, pool, target_location, instance_id, argv).await - } - other => Err(format!( - "Unsupported clawpal command in remote doctor agent session: {:?}", - other - )), - } -} - -async fn execute_clawpal_doctor_command( - app: &AppHandle, - pool: &SshConnectionPool, - target_location: TargetLocation, - instance_id: &str, - argv: &[String], -) -> Result { - match argv.get(2).map(String::as_str) { - Some("probe-openclaw") => { - let version_result = execute_command( - pool, - target_location, - instance_id, - &["openclaw".into(), "--version".into()], - ) - .await?; - let which_result = match target_location { - TargetLocation::LocalOpenclaw => { - execute_command( - pool, - target_location, - instance_id, - &[ - "sh".into(), - "-lc".into(), - "command -v openclaw || true".into(), - ], - ) - .await? - } - TargetLocation::RemoteOpenclaw => { - execute_command( - pool, - target_location, - instance_id, - &[ - "sh".into(), - "-lc".into(), - "command -v openclaw || true".into(), - ], - ) - .await? - } - }; - Ok(json!({ - "ok": version_result.exit_code == Some(0), - "version": version_result.stdout.trim(), - "openclawPath": which_result.stdout.trim(), - })) - } - Some("config-read") => { - let maybe_path = argv - .get(3) - .map(String::as_str) - .filter(|value| !value.starts_with("--")); - let raw = read_target_config_raw(app, target_location, instance_id).await?; - config_read_response(&raw, maybe_path) - } - Some("config-read-raw") => { - let raw = read_target_config_raw(app, target_location, instance_id).await?; - Ok(json!({ - "raw": raw, - })) - } - Some("config-delete") => { - let path = argv - .get(3) - .ok_or("clawpal doctor config-delete requires a path")?; - let mut config = read_target_config(app, target_location, instance_id).await?; - apply_config_unset(&mut config, path)?; - write_target_config(app, target_location, instance_id, &config).await?; - restart_target_gateway(app, target_location, instance_id).await?; - Ok(json!({ "deleted": true, "path": path })) - } - Some("config-write-raw-base64") => { - let encoded = argv - .get(3) - .ok_or("clawpal doctor config-write-raw-base64 requires a base64 payload")?; - let decoded = decode_base64_config_payload(encoded)?; - write_target_config_raw(app, target_location, instance_id, &decoded).await?; - restart_target_gateway(app, target_location, instance_id).await?; - Ok(json!({ - "written": true, - "bytes": decoded.len(), - })) - } - Some("config-upsert") => { - let path = argv - .get(3) - .ok_or("clawpal doctor config-upsert requires a path")?; - let value_raw = argv - .get(4) - .ok_or("clawpal doctor config-upsert requires a value")?; - let value: Value = serde_json::from_str(value_raw) - .map_err(|error| format!("Invalid JSON value for config-upsert: {error}"))?; - let mut config = read_target_config(app, target_location, instance_id).await?; - apply_config_set(&mut config, path, value)?; - write_target_config(app, target_location, instance_id, &config).await?; - restart_target_gateway(app, target_location, instance_id).await?; - Ok(json!({ "upserted": true, "path": path })) - } - Some("exec") => { - let tool_idx = argv - .iter() - .position(|part| part == "--tool") - .ok_or("clawpal doctor exec requires --tool")?; - let tool = argv - .get(tool_idx + 1) - .ok_or("clawpal doctor exec missing tool name")?; - let args_idx = argv.iter().position(|part| part == "--args"); - let mut exec_argv = vec![tool.clone()]; - if let Some(index) = args_idx { - if let Some(arg_string) = argv.get(index + 1) { - exec_argv.extend(shell_words::split(arg_string).map_err(|error| { - format!("Failed to parse clawpal doctor exec args: {error}") - })?); - } - } - let result = execute_command(pool, target_location, instance_id, &exec_argv).await?; - Ok(json!({ - "argv": result.argv, - "exitCode": result.exit_code, - "stdout": result.stdout, - "stderr": result.stderr, - })) - } - other => Err(format!( - "Unsupported clawpal doctor subcommand in remote doctor agent session: {:?}", - other - )), - } -} - -fn config_read_response(raw: &str, path: Option<&str>) -> Result { - let context = build_config_excerpt_context(raw); - if let Some(parse_error) = context.config_parse_error { - return Ok(json!({ - "value": Value::Null, - "path": path, - "raw": context.config_excerpt_raw.unwrap_or_else(|| raw.to_string()), - "parseError": parse_error, - })); - } - - let value = if let Some(path) = path { - clawpal_core::doctor::select_json_value_from_str( - &serde_json::to_string_pretty(&context.config_excerpt).unwrap_or_else(|_| "{}".into()), - Some(path), - "remote doctor config", - )? - } else { - context.config_excerpt - }; - - Ok(json!({ - "value": value, - "path": path, - })) -} - -fn decode_base64_config_payload(encoded: &str) -> Result { - let bytes = base64::engine::general_purpose::STANDARD - .decode(encoded.trim()) - .map_err(|error| format!("Failed to decode base64 config payload: {error}"))?; - String::from_utf8(bytes) - .map_err(|error| format!("Base64 config payload is not valid UTF-8: {error}")) -} - -async fn execute_invoke_payload( - app: &AppHandle, - pool: &SshConnectionPool, - target_location: TargetLocation, - instance_id: &str, - payload: &Value, -) -> Result { - let command = payload - .get("command") - .and_then(Value::as_str) - .ok_or("invoke payload missing command")?; - let args = payload.get("args").cloned().unwrap_or(Value::Null); - let argv = parse_invoke_argv(command, &args)?; - match command { - "openclaw" => { - let result = execute_command(pool, target_location, instance_id, &argv).await?; - Ok(json!({ - "argv": result.argv, - "exitCode": result.exit_code, - "stdout": result.stdout, - "stderr": result.stderr, - })) - } - "clawpal" => execute_clawpal_command(app, pool, target_location, instance_id, &argv).await, - other => Err(format!( - "Unsupported invoke command in remote doctor agent session: {other}" - )), - } -} - async fn run_agent_request_with_bridge( app: &AppHandle, client: &NodeClient, @@ -878,251 +584,6 @@ async fn run_agent_request_with_bridge( } } -fn shell_escape(value: &str) -> String { - format!("'{}'", value.replace('\'', "'\\''")) -} - -fn build_shell_command(argv: &[String]) -> String { - argv.iter() - .map(|part| shell_escape(part)) - .collect::>() - .join(" ") -} - -async fn execute_command( - pool: &SshConnectionPool, - target_location: TargetLocation, - instance_id: &str, - argv: &[String], -) -> Result { - let started = Instant::now(); - if argv.is_empty() { - return Err("Plan command argv cannot be empty".into()); - } - let result = match target_location { - TargetLocation::LocalOpenclaw => { - if argv[0] == "openclaw" { - let arg_refs = argv - .iter() - .skip(1) - .map(String::as_str) - .collect::>(); - let output = run_openclaw(&arg_refs)?; - CommandResult { - argv: argv.to_vec(), - exit_code: Some(output.exit_code), - stdout: output.stdout, - stderr: output.stderr, - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - } - } else { - let mut command = std::process::Command::new(&argv[0]); - command.args(argv.iter().skip(1)); - if let Some(openclaw_home) = get_active_openclaw_home_override() { - command.env("OPENCLAW_HOME", openclaw_home); - } - let output = command.output().map_err(|error| { - format!("Failed to execute local command {:?}: {error}", argv) - })?; - CommandResult { - argv: argv.to_vec(), - exit_code: output.status.code(), - stdout: String::from_utf8_lossy(&output.stdout).to_string(), - stderr: String::from_utf8_lossy(&output.stderr).to_string(), - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - } - } - } - TargetLocation::RemoteOpenclaw => { - let host_id = primary_remote_target_host_id(instance_id)?; - if argv[0] == "openclaw" { - let arg_refs = argv - .iter() - .skip(1) - .map(String::as_str) - .collect::>(); - let output = run_openclaw_remote(pool, &host_id, &arg_refs).await?; - CommandResult { - argv: argv.to_vec(), - exit_code: Some(output.exit_code), - stdout: output.stdout, - stderr: output.stderr, - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - } - } else { - let output = pool - .exec_login(&host_id, &build_shell_command(argv)) - .await?; - CommandResult { - argv: argv.to_vec(), - exit_code: Some(output.exit_code as i32), - stdout: output.stdout, - stderr: output.stderr, - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - } - } - } - }; - Ok(result) -} - -fn plan_command_uses_internal_clawpal_tool(argv: &[String]) -> bool { - argv.first().map(String::as_str) == Some("clawpal") -} - -fn validate_clawpal_exec_args(argv: &[String]) -> Result<(), String> { - if argv.get(0).map(String::as_str) != Some("clawpal") - || argv.get(1).map(String::as_str) != Some("doctor") - || argv.get(2).map(String::as_str) != Some("exec") - { - return Ok(()); - } - - let args_idx = argv.iter().position(|part| part == "--args"); - let Some(index) = args_idx else { - return Ok(()); - }; - let Some(arg_string) = argv.get(index + 1) else { - return Ok(()); - }; - if arg_string.contains('\n') || arg_string.contains("<<") { - return Err(format!( - "Unsupported clawpal doctor exec args: {}. Use bounded single-line commands without heredocs or stdin-driven scripts.", - argv.join(" ") - )); - } - Ok(()) -} - -fn validate_plan_command_argv(argv: &[String]) -> Result<(), String> { - if argv.is_empty() { - return Err("Plan command argv cannot be empty".into()); - } - validate_clawpal_exec_args(argv)?; - if argv[0] != "openclaw" { - return Ok(()); - } - - let supported = argv == ["openclaw", "--version"] || argv == ["openclaw", "gateway", "status"]; - if supported { - Ok(()) - } else { - Err(format!( - "Unsupported openclaw plan command: {}", - argv.join(" ") - )) - } -} - -fn plan_command_failure_message( - kind: PlanKind, - round: usize, - argv: &[String], - error: &str, -) -> String { - let kind_label = match kind { - PlanKind::Detect => "Detect", - PlanKind::Investigate => "Investigate", - PlanKind::Repair => "Repair", - }; - format!( - "{kind_label} command failed in round {round}: {}: {error}", - argv.join(" ") - ) -} - -fn command_result_stdout(value: &Value) -> String { - value - .get("stdout") - .and_then(Value::as_str) - .map(str::to_string) - .unwrap_or_else(|| { - serde_json::to_string_pretty(value).unwrap_or_else(|_| value.to_string()) - }) -} - -async fn execute_plan_command( - app: &AppHandle, - pool: &SshConnectionPool, - target_location: TargetLocation, - instance_id: &str, - argv: &[String], -) -> Result { - let started = Instant::now(); - validate_plan_command_argv(argv)?; - if plan_command_uses_internal_clawpal_tool(argv) { - let value = execute_clawpal_command(app, pool, target_location, instance_id, argv).await?; - let exit_code = value - .get("exitCode") - .and_then(Value::as_i64) - .map(|code| code as i32) - .unwrap_or(0); - let stderr = value - .get("stderr") - .and_then(Value::as_str) - .unwrap_or("") - .to_string(); - return Ok(CommandResult { - argv: argv.to_vec(), - exit_code: Some(exit_code), - stdout: command_result_stdout(&value), - stderr, - duration_ms: started.elapsed().as_millis() as u64, - timed_out: false, - }); - } - - execute_command(pool, target_location, instance_id, argv).await -} - -fn parse_plan_response(kind: PlanKind, value: Value) -> Result { - let mut response: PlanResponse = serde_json::from_value(value) - .map_err(|error| format!("Failed to parse remote doctor plan response: {error}"))?; - response.plan_kind = kind; - if response.plan_id.trim().is_empty() { - response.plan_id = format!("plan-{}", Uuid::new_v4()); - } - Ok(response) -} - -async fn request_plan( - client: &NodeClient, - method: &str, - kind: PlanKind, - session_id: &str, - round: usize, - target_location: TargetLocation, - instance_id: &str, - previous_results: &[CommandResult], -) -> Result { - let response = client - .send_request( - method, - json!({ - "sessionId": session_id, - "round": round, - "planKind": match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "targetLocation": match target_location { - TargetLocation::LocalOpenclaw => "local_openclaw", - TargetLocation::RemoteOpenclaw => "remote_openclaw", - }, - "instanceId": instance_id, - "hostId": instance_id.strip_prefix("ssh:"), - "previousResults": previous_results, - }), - ) - .await?; - parse_plan_response(kind, response) -} - async fn request_agent_plan( app: &AppHandle, client: &NodeClient, @@ -1169,99 +630,6 @@ async fn request_agent_plan( parse_agent_plan_response(kind, &text) } -fn agent_plan_step_types(plan: &PlanResponse) -> Vec { - if plan.commands.is_empty() { - return vec![format!( - "plan:{}", - match plan.plan_kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - } - )]; - } - plan.commands - .iter() - .map(|command| { - command - .argv - .first() - .cloned() - .unwrap_or_else(|| "empty-command".to_string()) - }) - .collect() -} - -async fn request_clawpal_server_plan( - client: &NodeClient, - session_id: &str, - round: usize, - instance_id: &str, - target_location: TargetLocation, - diagnosis: &RescuePrimaryDiagnosisResult, - config_context: &ConfigExcerptContext, -) -> Result { - let response = client - .send_request( - "remote_repair_plan.request", - json!({ - "requestId": format!("{session_id}-round-{round}"), - "targetId": instance_id, - "targetLocation": match target_location { - TargetLocation::LocalOpenclaw => "local_openclaw", - TargetLocation::RemoteOpenclaw => "remote_openclaw", - }, - "context": { - "configExcerpt": config_context.config_excerpt, - "configExcerptRaw": config_context.config_excerpt_raw, - "configParseError": config_context.config_parse_error, - "diagnosis": diagnosis_context(diagnosis), - } - }), - ) - .await?; - serde_json::from_value::(response) - .map_err(|error| format!("Failed to parse clawpal-server plan response: {error}")) -} - -async fn report_clawpal_server_step_result( - client: &NodeClient, - plan_id: &str, - step_index: usize, - step: &ClawpalServerPlanStep, - result: &CommandResult, -) { - let _ = client - .send_request( - "remote_repair_plan.step_result", - json!({ - "planId": plan_id, - "stepIndex": step_index, - "step": step, - "result": result, - }), - ) - .await; -} - -async fn report_clawpal_server_final_result( - client: &NodeClient, - plan_id: &str, - healthy: bool, - diagnosis: &RescuePrimaryDiagnosisResult, -) { - let _ = client - .send_request( - "remote_repair_plan.final_result", - json!({ - "planId": plan_id, - "healthy": healthy, - "diagnosis": diagnosis_context(diagnosis), - }), - ) - .await; -} - async fn run_remote_doctor_repair_loop( app: Option<&AppHandle>, pool: &SshConnectionPool, diff --git a/src-tauri/src/remote_doctor/plan.rs b/src-tauri/src/remote_doctor/plan.rs index fc8b9e0a..60a0cdd3 100644 --- a/src-tauri/src/remote_doctor/plan.rs +++ b/src-tauri/src/remote_doctor/plan.rs @@ -1 +1,677 @@ -// Placeholder for plan parsing, validation, and execution helpers. +use std::time::Instant; + +use base64::Engine; +use serde_json::{json, Value}; +use tauri::{AppHandle, Runtime}; +use uuid::Uuid; + +use super::config::{ + build_config_excerpt_context, diagnosis_context, primary_remote_target_host_id, + read_target_config, read_target_config_raw, restart_target_gateway, write_target_config, + write_target_config_raw, +}; +use super::types::{ + ClawpalServerPlanResponse, ClawpalServerPlanStep, CommandResult, ConfigExcerptContext, + PlanKind, PlanResponse, TargetLocation, +}; +use crate::cli_runner::{get_active_openclaw_home_override, run_openclaw, run_openclaw_remote}; +use crate::commands::RescuePrimaryDiagnosisResult; +use crate::node_client::NodeClient; +use crate::ssh::SshConnectionPool; + +pub(crate) fn parse_invoke_argv(command: &str, args: &Value) -> Result, String> { + if let Some(argv) = args.get("argv").and_then(Value::as_array) { + let parsed = argv + .iter() + .map(|value| { + value + .as_str() + .map(str::to_string) + .ok_or_else(|| "invoke argv entries must be strings".to_string()) + }) + .collect::, _>>()?; + if parsed.is_empty() { + return Err("invoke argv cannot be empty".into()); + } + return Ok(parsed); + } + + let arg_string = args + .get("args") + .and_then(Value::as_str) + .or_else(|| args.get("command").and_then(Value::as_str)) + .unwrap_or(""); + let mut parsed = if arg_string.trim().is_empty() { + Vec::new() + } else { + shell_words::split(arg_string) + .map_err(|error| format!("Failed to parse invoke args: {error}"))? + }; + if parsed.first().map(String::as_str) != Some(command) { + parsed.insert(0, command.to_string()); + } + Ok(parsed) +} + +pub(crate) async fn execute_clawpal_command( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + match argv.get(1).map(String::as_str) { + Some("doctor") => { + execute_clawpal_doctor_command(app, pool, target_location, instance_id, argv).await + } + other => Err(format!( + "Unsupported clawpal command in remote doctor agent session: {:?}", + other + )), + } +} + +pub(crate) async fn execute_clawpal_doctor_command( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + match argv.get(2).map(String::as_str) { + Some("probe-openclaw") => { + let version_result = execute_command( + pool, + target_location, + instance_id, + &["openclaw".into(), "--version".into()], + ) + .await?; + let which_result = execute_command( + pool, + target_location, + instance_id, + &["sh".into(), "-lc".into(), "command -v openclaw || true".into()], + ) + .await?; + Ok(json!({ + "ok": version_result.exit_code == Some(0), + "version": version_result.stdout.trim(), + "openclawPath": which_result.stdout.trim(), + })) + } + Some("config-read") => { + let maybe_path = argv + .get(3) + .map(String::as_str) + .filter(|value| !value.starts_with("--")); + let raw = read_target_config_raw(app, target_location, instance_id).await?; + config_read_response(&raw, maybe_path) + } + Some("config-read-raw") => { + let raw = read_target_config_raw(app, target_location, instance_id).await?; + Ok(json!({ "raw": raw })) + } + Some("config-delete") => { + let path = argv + .get(3) + .ok_or("clawpal doctor config-delete requires a path")?; + let mut config = read_target_config(app, target_location, instance_id).await?; + apply_config_unset(&mut config, path)?; + write_target_config(app, target_location, instance_id, &config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + Ok(json!({ "deleted": true, "path": path })) + } + Some("config-write-raw-base64") => { + let encoded = argv + .get(3) + .ok_or("clawpal doctor config-write-raw-base64 requires a base64 payload")?; + let decoded = decode_base64_config_payload(encoded)?; + write_target_config_raw(app, target_location, instance_id, &decoded).await?; + restart_target_gateway(app, target_location, instance_id).await?; + Ok(json!({ "written": true, "bytes": decoded.len() })) + } + Some("config-upsert") => { + let path = argv + .get(3) + .ok_or("clawpal doctor config-upsert requires a path")?; + let value_raw = argv + .get(4) + .ok_or("clawpal doctor config-upsert requires a value")?; + let value: Value = serde_json::from_str(value_raw) + .map_err(|error| format!("Invalid JSON value for config-upsert: {error}"))?; + let mut config = read_target_config(app, target_location, instance_id).await?; + apply_config_set(&mut config, path, value)?; + write_target_config(app, target_location, instance_id, &config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + Ok(json!({ "upserted": true, "path": path })) + } + Some("exec") => { + let tool_idx = argv + .iter() + .position(|part| part == "--tool") + .ok_or("clawpal doctor exec requires --tool")?; + let tool = argv + .get(tool_idx + 1) + .ok_or("clawpal doctor exec missing tool name")?; + let args_idx = argv.iter().position(|part| part == "--args"); + let mut exec_argv = vec![tool.clone()]; + if let Some(index) = args_idx { + if let Some(arg_string) = argv.get(index + 1) { + exec_argv.extend(shell_words::split(arg_string).map_err(|error| { + format!("Failed to parse clawpal doctor exec args: {error}") + })?); + } + } + let result = execute_command(pool, target_location, instance_id, &exec_argv).await?; + Ok(json!({ + "argv": result.argv, + "exitCode": result.exit_code, + "stdout": result.stdout, + "stderr": result.stderr, + })) + } + other => Err(format!( + "Unsupported clawpal doctor subcommand in remote doctor agent session: {:?}", + other + )), + } +} + +pub(crate) fn config_read_response(raw: &str, path: Option<&str>) -> Result { + let context = build_config_excerpt_context(raw); + if let Some(parse_error) = context.config_parse_error { + return Ok(json!({ + "value": Value::Null, + "path": path, + "raw": context.config_excerpt_raw.unwrap_or_else(|| raw.to_string()), + "parseError": parse_error, + })); + } + + let value = if let Some(path) = path { + clawpal_core::doctor::select_json_value_from_str( + &serde_json::to_string_pretty(&context.config_excerpt).unwrap_or_else(|_| "{}".into()), + Some(path), + "remote doctor config", + )? + } else { + context.config_excerpt + }; + + Ok(json!({ + "value": value, + "path": path, + })) +} + +pub(crate) fn decode_base64_config_payload(encoded: &str) -> Result { + let bytes = base64::engine::general_purpose::STANDARD + .decode(encoded.trim()) + .map_err(|error| format!("Failed to decode base64 config payload: {error}"))?; + String::from_utf8(bytes) + .map_err(|error| format!("Base64 config payload is not valid UTF-8: {error}")) +} + +pub(crate) async fn execute_invoke_payload( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + payload: &Value, +) -> Result { + let command = payload + .get("command") + .and_then(Value::as_str) + .ok_or("invoke payload missing command")?; + let args = payload.get("args").cloned().unwrap_or(Value::Null); + let argv = parse_invoke_argv(command, &args)?; + match command { + "openclaw" => { + let result = execute_command(pool, target_location, instance_id, &argv).await?; + Ok(json!({ + "argv": result.argv, + "exitCode": result.exit_code, + "stdout": result.stdout, + "stderr": result.stderr, + })) + } + "clawpal" => execute_clawpal_command(app, pool, target_location, instance_id, &argv).await, + other => Err(format!( + "Unsupported invoke command in remote doctor agent session: {other}" + )), + } +} + +pub(crate) fn shell_escape(value: &str) -> String { + format!("'{}'", value.replace('\'', "'\\''")) +} + +pub(crate) fn build_shell_command(argv: &[String]) -> String { + argv.iter() + .map(|part| shell_escape(part)) + .collect::>() + .join(" ") +} + +pub(crate) async fn execute_command( + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + let started = Instant::now(); + if argv.is_empty() { + return Err("Plan command argv cannot be empty".into()); + } + let result = match target_location { + TargetLocation::LocalOpenclaw => { + if argv[0] == "openclaw" { + let arg_refs = argv.iter().skip(1).map(String::as_str).collect::>(); + let output = run_openclaw(&arg_refs)?; + CommandResult { + argv: argv.to_vec(), + exit_code: Some(output.exit_code), + stdout: output.stdout, + stderr: output.stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } else { + let mut command = std::process::Command::new(&argv[0]); + command.args(argv.iter().skip(1)); + if let Some(openclaw_home) = get_active_openclaw_home_override() { + command.env("OPENCLAW_HOME", openclaw_home); + } + let output = command.output().map_err(|error| { + format!("Failed to execute local command {:?}: {error}", argv) + })?; + CommandResult { + argv: argv.to_vec(), + exit_code: output.status.code(), + stdout: String::from_utf8_lossy(&output.stdout).to_string(), + stderr: String::from_utf8_lossy(&output.stderr).to_string(), + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } + } + TargetLocation::RemoteOpenclaw => { + let host_id = primary_remote_target_host_id(instance_id)?; + if argv[0] == "openclaw" { + let arg_refs = argv.iter().skip(1).map(String::as_str).collect::>(); + let output = run_openclaw_remote(pool, &host_id, &arg_refs).await?; + CommandResult { + argv: argv.to_vec(), + exit_code: Some(output.exit_code), + stdout: output.stdout, + stderr: output.stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } else { + let output = pool.exec_login(&host_id, &build_shell_command(argv)).await?; + CommandResult { + argv: argv.to_vec(), + exit_code: Some(output.exit_code as i32), + stdout: output.stdout, + stderr: output.stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + } + } + } + }; + Ok(result) +} + +pub(crate) fn plan_command_uses_internal_clawpal_tool(argv: &[String]) -> bool { + argv.first().map(String::as_str) == Some("clawpal") +} + +pub(crate) fn validate_clawpal_exec_args(argv: &[String]) -> Result<(), String> { + if argv.get(0).map(String::as_str) != Some("clawpal") + || argv.get(1).map(String::as_str) != Some("doctor") + || argv.get(2).map(String::as_str) != Some("exec") + { + return Ok(()); + } + + let args_idx = argv.iter().position(|part| part == "--args"); + let Some(index) = args_idx else { + return Ok(()); + }; + let Some(arg_string) = argv.get(index + 1) else { + return Ok(()); + }; + if arg_string.contains('\n') || arg_string.contains("<<") { + return Err(format!( + "Unsupported clawpal doctor exec args: {}. Use bounded single-line commands without heredocs or stdin-driven scripts.", + argv.join(" ") + )); + } + Ok(()) +} + +pub(crate) fn validate_plan_command_argv(argv: &[String]) -> Result<(), String> { + if argv.is_empty() { + return Err("Plan command argv cannot be empty".into()); + } + validate_clawpal_exec_args(argv)?; + if argv[0] != "openclaw" { + return Ok(()); + } + let supported = argv == ["openclaw", "--version"] || argv == ["openclaw", "gateway", "status"]; + if supported { + Ok(()) + } else { + Err(format!("Unsupported openclaw plan command: {}", argv.join(" "))) + } +} + +pub(crate) fn plan_command_failure_message( + kind: PlanKind, + round: usize, + argv: &[String], + error: &str, +) -> String { + let kind_label = match kind { + PlanKind::Detect => "Detect", + PlanKind::Investigate => "Investigate", + PlanKind::Repair => "Repair", + }; + format!( + "{kind_label} command failed in round {round}: {}: {error}", + argv.join(" ") + ) +} + +pub(crate) fn command_result_stdout(value: &Value) -> String { + value + .get("stdout") + .and_then(Value::as_str) + .map(str::to_string) + .unwrap_or_else(|| serde_json::to_string_pretty(value).unwrap_or_else(|_| value.to_string())) +} + +pub(crate) async fn execute_plan_command( + app: &AppHandle, + pool: &SshConnectionPool, + target_location: TargetLocation, + instance_id: &str, + argv: &[String], +) -> Result { + let started = Instant::now(); + validate_plan_command_argv(argv)?; + if plan_command_uses_internal_clawpal_tool(argv) { + let value = execute_clawpal_command(app, pool, target_location, instance_id, argv).await?; + let exit_code = value + .get("exitCode") + .and_then(Value::as_i64) + .map(|code| code as i32) + .unwrap_or(0); + let stderr = value + .get("stderr") + .and_then(Value::as_str) + .unwrap_or("") + .to_string(); + return Ok(CommandResult { + argv: argv.to_vec(), + exit_code: Some(exit_code), + stdout: command_result_stdout(&value), + stderr, + duration_ms: started.elapsed().as_millis() as u64, + timed_out: false, + }); + } + + execute_command(pool, target_location, instance_id, argv).await +} + +pub(crate) fn parse_plan_response(kind: PlanKind, value: Value) -> Result { + let mut response: PlanResponse = serde_json::from_value(value) + .map_err(|error| format!("Failed to parse remote doctor plan response: {error}"))?; + response.plan_kind = kind; + if response.plan_id.trim().is_empty() { + response.plan_id = format!("plan-{}", Uuid::new_v4()); + } + Ok(response) +} + +pub(crate) async fn request_plan( + client: &NodeClient, + method: &str, + kind: PlanKind, + session_id: &str, + round: usize, + target_location: TargetLocation, + instance_id: &str, + previous_results: &[CommandResult], +) -> Result { + let response = client + .send_request( + method, + json!({ + "sessionId": session_id, + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "targetLocation": match target_location { + TargetLocation::LocalOpenclaw => "local_openclaw", + TargetLocation::RemoteOpenclaw => "remote_openclaw", + }, + "instanceId": instance_id, + "hostId": instance_id.strip_prefix("ssh:"), + "previousResults": previous_results, + }), + ) + .await?; + parse_plan_response(kind, response) +} + +pub(crate) fn agent_plan_step_types(plan: &PlanResponse) -> Vec { + if plan.commands.is_empty() { + return vec![format!( + "plan:{}", + match plan.plan_kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + } + )]; + } + plan.commands + .iter() + .map(|command| { + command + .argv + .first() + .cloned() + .unwrap_or_else(|| "empty-command".to_string()) + }) + .collect() +} + +pub(crate) async fn request_clawpal_server_plan( + client: &NodeClient, + session_id: &str, + round: usize, + instance_id: &str, + target_location: TargetLocation, + diagnosis: &RescuePrimaryDiagnosisResult, + config_context: &ConfigExcerptContext, +) -> Result { + let response = client + .send_request( + "remote_repair_plan.request", + json!({ + "requestId": format!("{session_id}-round-{round}"), + "targetId": instance_id, + "targetLocation": match target_location { + TargetLocation::LocalOpenclaw => "local_openclaw", + TargetLocation::RemoteOpenclaw => "remote_openclaw", + }, + "context": { + "configExcerpt": config_context.config_excerpt, + "configExcerptRaw": config_context.config_excerpt_raw, + "configParseError": config_context.config_parse_error, + "diagnosis": diagnosis_context(diagnosis), + } + }), + ) + .await?; + serde_json::from_value::(response) + .map_err(|error| format!("Failed to parse clawpal-server plan response: {error}")) +} + +pub(crate) async fn report_clawpal_server_step_result( + client: &NodeClient, + plan_id: &str, + step_index: usize, + step: &ClawpalServerPlanStep, + result: &CommandResult, +) { + let _ = client + .send_request( + "remote_repair_plan.step_result", + json!({ + "planId": plan_id, + "stepIndex": step_index, + "step": step, + "result": result, + }), + ) + .await; +} + +pub(crate) async fn report_clawpal_server_final_result( + client: &NodeClient, + plan_id: &str, + healthy: bool, + diagnosis: &RescuePrimaryDiagnosisResult, +) { + let _ = client + .send_request( + "remote_repair_plan.final_result", + json!({ + "planId": plan_id, + "healthy": healthy, + "diagnosis": diagnosis_context(diagnosis), + }), + ) + .await; +} + +fn ensure_object(value: &mut Value) -> Result<&mut serde_json::Map, String> { + if !value.is_object() { + *value = json!({}); + } + value + .as_object_mut() + .ok_or_else(|| "Expected object while applying remote doctor config step".to_string()) +} + +pub(crate) fn apply_config_set(root: &mut Value, path: &str, value: Value) -> Result<(), String> { + let segments = path + .split('.') + .filter(|segment| !segment.trim().is_empty()) + .collect::>(); + if segments.is_empty() { + return Err("Config set path cannot be empty".into()); + } + let mut cursor = root; + for segment in &segments[..segments.len() - 1] { + let object = ensure_object(cursor)?; + cursor = object + .entry((*segment).to_string()) + .or_insert_with(|| json!({})); + } + let object = ensure_object(cursor)?; + object.insert(segments[segments.len() - 1].to_string(), value); + Ok(()) +} + +pub(crate) fn apply_config_unset(root: &mut Value, path: &str) -> Result<(), String> { + let segments = path + .split('.') + .filter(|segment| !segment.trim().is_empty()) + .collect::>(); + if segments.is_empty() { + return Err("Config unset path cannot be empty".into()); + } + let mut cursor = root; + for segment in &segments[..segments.len() - 1] { + let Some(next) = cursor + .as_object_mut() + .and_then(|object| object.get_mut(*segment)) + else { + return Ok(()); + }; + cursor = next; + } + if let Some(object) = cursor.as_object_mut() { + object.remove(segments[segments.len() - 1]); + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn build_shell_command_escapes_single_quotes() { + let command = build_shell_command(&["echo".into(), "a'b".into()]); + assert_eq!(command, "'echo' 'a'\\''b'"); + } + + #[test] + fn parse_invoke_argv_supports_command_string_payloads() { + let argv = parse_invoke_argv( + "clawpal", + &json!({ + "command": "doctor config-read models.providers.openai" + }), + ) + .expect("parse invoke argv"); + assert_eq!( + argv, + vec![ + "clawpal", + "doctor", + "config-read", + "models.providers.openai" + ] + ); + } + + #[test] + fn unsupported_openclaw_subcommand_is_rejected_early() { + let error = validate_plan_command_argv(&[ + "openclaw".to_string(), + "auth".to_string(), + "list".to_string(), + ]) + .unwrap_err(); + assert!(error.contains("Unsupported openclaw plan command")); + } + + #[test] + fn parse_plan_response_generates_plan_id_when_missing() { + let plan = parse_plan_response( + PlanKind::Detect, + json!({ + "planId": "", + "planKind": "detect", + "summary": "ok", + "commands": [] + }), + ) + .expect("parse plan"); + assert!(!plan.plan_id.is_empty()); + assert_eq!(plan.plan_kind, PlanKind::Detect); + } +} From 8d091ddbf68ffe214c165132a18e3bdc1f55db50 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 15:31:03 +0800 Subject: [PATCH 11/20] refactor: route remote doctor entry through repair loops --- src-tauri/src/remote_doctor/agent.rs | 2 +- src-tauri/src/remote_doctor/legacy.rs | 1175 +------------------ src-tauri/src/remote_doctor/mod.rs | 2 +- src-tauri/src/remote_doctor/repair_loops.rs | 1109 ++++++++++++++++- 4 files changed, 1165 insertions(+), 1123 deletions(-) diff --git a/src-tauri/src/remote_doctor/agent.rs b/src-tauri/src/remote_doctor/agent.rs index e5fa1464..b2dac5f0 100644 --- a/src-tauri/src/remote_doctor/agent.rs +++ b/src-tauri/src/remote_doctor/agent.rs @@ -1,7 +1,7 @@ use std::fs::create_dir_all; use std::path::PathBuf; -use serde_json::{json, Value}; +use serde_json::json; use super::config::diagnosis_context; use super::types::{CommandResult, ConfigExcerptContext, PlanKind, RemoteDoctorProtocol, TargetLocation}; diff --git a/src-tauri/src/remote_doctor/legacy.rs b/src-tauri/src/remote_doctor/legacy.rs index 27ec68b5..306c5e22 100644 --- a/src-tauri/src/remote_doctor/legacy.rs +++ b/src-tauri/src/remote_doctor/legacy.rs @@ -1,64 +1,33 @@ -use std::fs::create_dir_all; -use std::io::Write; -use std::path::PathBuf; -use std::process::Command; use std::time::Instant; -use base64::Engine; use serde_json::{json, Value}; -use tauri::{AppHandle, Manager, Runtime, State}; -use uuid::Uuid; +use tauri::{AppHandle, Manager, Runtime}; use super::config::{ - append_diagnosis_log, build_config_excerpt_context, + append_diagnosis_log, build_gateway_credentials as remote_doctor_gateway_credentials, - config_excerpt_log_summary, diagnosis_context, - diagnosis_has_only_non_auto_fixable_issues, diagnosis_is_healthy, diagnosis_missing_rescue_profile, diagnosis_unhealthy_rescue_gateway, - empty_config_excerpt_context, empty_diagnosis, - load_gateway_config as remote_doctor_gateway_config, primary_remote_target_host_id, - read_target_config, read_target_config_raw, remote_target_host_id_candidates, - restart_target_gateway, run_rescue_diagnosis, write_target_config, - write_target_config_raw, RemoteDoctorGatewayConfig, + primary_remote_target_host_id, remote_target_host_id_candidates, run_rescue_diagnosis, }; use super::agent::{ - build_agent_plan_prompt, configured_remote_doctor_protocol, default_remote_doctor_protocol, - detect_method_name, ensure_agent_workspace_ready as ensure_local_remote_doctor_agent_ready, - gateway_url_is_local, next_agent_plan_kind, next_agent_plan_kind_for_round, - protocol_requires_bridge, protocol_runs_rescue_preflight, remote_doctor_agent_id, - remote_doctor_agent_session_key, repair_method_name, + build_agent_plan_prompt, remote_doctor_agent_id, remote_doctor_agent_session_key, }; use super::plan::{ - agent_plan_step_types, apply_config_set, apply_config_unset, command_result_stdout, - config_read_response, decode_base64_config_payload, execute_clawpal_command, execute_command, - execute_invoke_payload, execute_plan_command, parse_invoke_argv, parse_plan_response, - plan_command_failure_message, plan_command_uses_internal_clawpal_tool, - report_clawpal_server_final_result, report_clawpal_server_step_result, request_clawpal_server_plan, - request_plan, shell_escape, validate_clawpal_exec_args, validate_plan_command_argv, - build_shell_command, -}; -use super::session::{ - append_session_log as append_remote_doctor_log, - emit_session_progress as emit_progress, result_for_completion, - result_for_completion_with_warnings, + apply_config_set, apply_config_unset, config_read_response, decode_base64_config_payload, + execute_clawpal_command, execute_command, execute_invoke_payload, parse_invoke_argv, + parse_plan_response, plan_command_uses_internal_clawpal_tool, request_plan, build_shell_command, + shell_escape, validate_clawpal_exec_args, validate_plan_command_argv, }; +use super::session::{append_session_log as append_remote_doctor_log, emit_session_progress as emit_progress}; use super::types::{ - diagnosis_issue_summaries, parse_target_location, ClawpalServerPlanResponse, - ClawpalServerPlanStep, CommandResult, ConfigExcerptContext, PlanCommand, PlanKind, - PlanResponse, RemoteDoctorProtocol, RemoteDoctorRepairResult, - RepairRoundObservation, StoredRemoteDoctorIdentity, TargetLocation, + CommandResult, ConfigExcerptContext, PlanCommand, PlanKind, PlanResponse, TargetLocation, }; use crate::bridge_client::BridgeClient; -use crate::cli_runner::{get_active_openclaw_home_override, run_openclaw, run_openclaw_remote}; -use crate::commands::logs::log_dev; use crate::commands::{manage_rescue_bot, remote_manage_rescue_bot, RescuePrimaryDiagnosisResult}; use crate::node_client::NodeClient; use crate::ssh::SshConnectionPool; -const MAX_REMOTE_DOCTOR_ROUNDS: usize = 50; -const REPAIR_PLAN_STALL_THRESHOLD: usize = 3; - -async fn ensure_agent_bridge_connected( +pub(crate) async fn ensure_agent_bridge_connected( app: &AppHandle, bridge: &BridgeClient, gateway_url: &str, @@ -89,7 +58,7 @@ async fn ensure_agent_bridge_connected( } } -async fn ensure_remote_target_connected( +pub(crate) async fn ensure_remote_target_connected( pool: &SshConnectionPool, instance_id: &str, ) -> Result<(), String> { @@ -115,12 +84,6 @@ async fn ensure_remote_target_connected( } } -fn is_unknown_method_error(error: &str) -> bool { - error.contains("unknown method") - || error.contains("\"code\":\"INVALID_REQUEST\"") - || error.contains("\"code\": \"INVALID_REQUEST\"") -} - fn rescue_setup_command_result( action: &str, profile: &str, @@ -401,7 +364,7 @@ async fn ensure_rescue_profile_ready( Ok(command_result) } -async fn repair_rescue_gateway_if_needed( +pub(crate) async fn repair_rescue_gateway_if_needed( app: &AppHandle, session_id: &str, round: usize, @@ -460,69 +423,11 @@ async fn repair_rescue_gateway_if_needed( Ok(()) } -fn clawpal_server_step_type_summary(steps: &[ClawpalServerPlanStep]) -> Value { - let mut counts = serde_json::Map::new(); - for step in steps { - let entry = counts - .entry(step.step_type.clone()) - .or_insert_with(|| Value::from(0_u64)); - let next = entry.as_u64().unwrap_or(0) + 1; - *entry = Value::from(next); - } - Value::Object(counts) -} - -fn repair_plan_stalled(observations: &[RepairRoundObservation], threshold: usize) -> bool { - if observations.len() < threshold { - return false; - } - let recent = &observations[observations.len() - threshold..]; - let Some(first) = recent.first() else { - return false; - }; - !first.issue_summaries.is_empty() - && recent.iter().all(|entry| { - entry.step_types.len() == 1 - && entry.step_types[0] == "doctorRediagnose" - && entry.diagnosis_signature == first.diagnosis_signature - }) -} - -fn round_limit_error_message( - diagnosis: &RescuePrimaryDiagnosisResult, - last_step_types: &[String], -) -> String { - let issue_summary = serde_json::to_string(&diagnosis_issue_summaries(diagnosis)) - .unwrap_or_else(|_| "[]".to_string()); - let step_summary = if last_step_types.is_empty() { - "[]".to_string() - } else { - serde_json::to_string(last_step_types).unwrap_or_else(|_| "[]".to_string()) - }; - format!( - "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean rescue diagnosis result. Last issues: {issue_summary}. Last repair step types: {step_summary}." - ) -} - -fn stalled_plan_error_message(observation: &RepairRoundObservation) -> String { - let issue_summary = - serde_json::to_string(&observation.issue_summaries).unwrap_or_else(|_| "[]".to_string()); - let step_summary = - serde_json::to_string(&observation.step_types).unwrap_or_else(|_| "[]".to_string()); - format!( - "Remote Doctor did not return actionable repair steps by round {} after {} repeated rounds. Last issues: {}. Last repair step types: {}.", - observation.round, - REPAIR_PLAN_STALL_THRESHOLD, - issue_summary, - step_summary - ) -} - fn extract_json_block(text: &str) -> Option<&str> { clawpal_core::doctor::extract_json_from_output(text) } -fn parse_agent_plan_response(kind: PlanKind, text: &str) -> Result { +pub(crate) fn parse_agent_plan_response(kind: PlanKind, text: &str) -> Result { let json_block = extract_json_block(text) .ok_or_else(|| format!("Remote doctor agent did not return JSON: {text}"))?; let value: Value = serde_json::from_str(json_block) @@ -530,7 +435,7 @@ fn parse_agent_plan_response(kind: PlanKind, text: &str) -> Result( +pub(crate) async fn run_agent_request_with_bridge( app: &AppHandle, client: &NodeClient, bridge: &BridgeClient, @@ -584,7 +489,7 @@ async fn run_agent_request_with_bridge( } } -async fn request_agent_plan( +pub(crate) async fn request_agent_plan( app: &AppHandle, client: &NodeClient, bridge_client: &BridgeClient, @@ -630,1011 +535,38 @@ async fn request_agent_plan( parse_agent_plan_response(kind, &text) } -async fn run_remote_doctor_repair_loop( - app: Option<&AppHandle>, - pool: &SshConnectionPool, - session_id: &str, - instance_id: &str, - target_location: TargetLocation, - mut request_plan_fn: F, -) -> Result -where - F: FnMut(PlanKind, usize, Vec) -> Fut, - Fut: std::future::Future>, -{ - let mut previous_results: Vec = Vec::new(); - let mut last_command: Option> = None; - let mut last_plan_kind = PlanKind::Detect; - - for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { - emit_progress( - app, - session_id, - round, - "planning_detect", - format!("Requesting detection plan for round {round}"), - Some(PlanKind::Detect), - None, - ); - let detect_plan = - request_plan_fn(PlanKind::Detect, round, previous_results.clone()).await?; - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_received", - "round": round, - "planKind": "detect", - "planId": detect_plan.plan_id, - "summary": detect_plan.summary, - "commandCount": detect_plan.commands.len(), - "healthy": detect_plan.healthy, - "done": detect_plan.done, - }), - ); - if detect_plan.healthy || (detect_plan.done && detect_plan.commands.is_empty()) { - return Ok(RemoteDoctorRepairResult { - mode: "remoteDoctor".into(), - status: "completed".into(), - round, - phase: "completed".into(), - last_plan_kind: match last_plan_kind { - PlanKind::Detect => "detect".into(), - PlanKind::Investigate => "investigate".into(), - PlanKind::Repair => "repair".into(), - }, - latest_diagnosis_healthy: true, - last_command, - session_id: session_id.to_string(), - message: "Remote Doctor repair completed with a healthy detection result.".into(), - }); - } - previous_results.clear(); - for command in &detect_plan.commands { - last_command = Some(command.argv.clone()); - emit_progress( - app, - session_id, - round, - "executing_detect", - format!("Running detect command: {}", command.argv.join(" ")), - Some(PlanKind::Detect), - Some(command.argv.clone()), - ); - let command_result = - execute_command(pool, target_location, instance_id, &command.argv).await?; - append_remote_doctor_log( - session_id, - json!({ - "event": "command_result", - "round": round, - "planKind": "detect", - "result": command_result, - }), - ); - if command_result.exit_code.unwrap_or(1) != 0 - && !command.continue_on_failure.unwrap_or(false) - { - previous_results.push(command_result); - return Err(format!( - "Detect command failed in round {round}: {}", - command.argv.join(" ") - )); - } - previous_results.push(command_result); - } - - emit_progress( - app, - session_id, - round, - "planning_repair", - format!("Requesting repair plan for round {round}"), - Some(PlanKind::Repair), - None, - ); - let repair_plan = - request_plan_fn(PlanKind::Repair, round, previous_results.clone()).await?; - last_plan_kind = PlanKind::Repair; - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_received", - "round": round, - "planKind": "repair", - "planId": repair_plan.plan_id, - "summary": repair_plan.summary, - "commandCount": repair_plan.commands.len(), - "success": repair_plan.success, - "done": repair_plan.done, - }), - ); - previous_results.clear(); - for command in &repair_plan.commands { - last_command = Some(command.argv.clone()); - emit_progress( - app, - session_id, - round, - "executing_repair", - format!("Running repair command: {}", command.argv.join(" ")), - Some(PlanKind::Repair), - Some(command.argv.clone()), - ); - let command_result = - execute_command(pool, target_location, instance_id, &command.argv).await?; - append_remote_doctor_log( - session_id, - json!({ - "event": "command_result", - "round": round, - "planKind": "repair", - "result": command_result, - }), - ); - if command_result.exit_code.unwrap_or(1) != 0 - && !command.continue_on_failure.unwrap_or(false) - { - previous_results.push(command_result); - return Err(format!( - "Repair command failed in round {round}: {}", - command.argv.join(" ") - )); - } - previous_results.push(command_result); - } - } - - append_remote_doctor_log( - session_id, - json!({ - "event": "session_complete", - "status": "failed", - "reason": "round_limit_exceeded", - }), - ); - Err(format!( - "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean detection result" - )) -} - -async fn run_clawpal_server_repair_loop( - app: &AppHandle, - client: &NodeClient, - session_id: &str, - instance_id: &str, - target_location: TargetLocation, -) -> Result { - let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "initial", 0, &diagnosis); - if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { - repair_rescue_gateway_if_needed( - app, - session_id, - 0, - target_location, - instance_id, - &mut diagnosis, - ) - .await?; - } - if diagnosis_is_healthy(&diagnosis) { - return Ok(result_for_completion( - session_id, - 0, - PlanKind::Detect, - None, - "Remote Doctor repair skipped because diagnosis is already healthy.", - )); - } - - let mut last_command = None; - let mut round_observations: Vec = Vec::new(); - let mut last_step_types: Vec = Vec::new(); - for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { - emit_progress( - Some(app), - session_id, - round, - "planning_repair", - format!("Requesting remote repair plan for round {round}"), - Some(PlanKind::Repair), - None, - ); - let config_context = build_config_excerpt_context( - &read_target_config_raw(app, target_location, instance_id).await?, - ); - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_request_context", - "protocol": "clawpal_server", - "round": round, - "planKind": "repair", - "instanceId": instance_id, - "targetLocation": target_location, - "configContext": config_excerpt_log_summary(&config_context), - "diagnosisIssueCount": diagnosis.issues.len(), - "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), - }), - ); - if config_context.config_parse_error.is_some() { - append_remote_doctor_log( - session_id, - json!({ - "event": "config_recovery_context", - "round": round, - "context": config_excerpt_log_summary(&config_context), - }), - ); - } - let plan = request_clawpal_server_plan( - client, - session_id, - round, - instance_id, - target_location, - &diagnosis, - &config_context, - ) - .await?; - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_received", - "protocol": "clawpal_server", - "round": round, - "planKind": "repair", - "planId": plan.plan_id, - "summary": plan.summary, - "stepCount": plan.steps.len(), - "stepTypeCounts": clawpal_server_step_type_summary(&plan.steps), - }), - ); - - let mut current_config = config_context.config_excerpt.clone(); - let mut rediagnosed = false; - let mut round_step_types = Vec::new(); - for (step_index, step) in plan.steps.iter().enumerate() { - round_step_types.push(step.step_type.clone()); - let mut result = CommandResult { - argv: Vec::new(), - exit_code: Some(0), - stdout: String::new(), - stderr: String::new(), - duration_ms: 0, - timed_out: false, - }; - let started = Instant::now(); - match step.step_type.as_str() { - "configSet" => { - let path = step.path.as_deref().ok_or("configSet step missing path")?; - let value = step.value.clone().ok_or("configSet step missing value")?; - emit_progress( - Some(app), - session_id, - round, - "executing_repair", - format!("Applying config set: {path}"), - Some(PlanKind::Repair), - None, - ); - apply_config_set(&mut current_config, path, value)?; - write_target_config(app, target_location, instance_id, ¤t_config).await?; - restart_target_gateway(app, target_location, instance_id).await?; - result.argv = vec!["configSet".into(), path.into()]; - result.stdout = format!("Updated {path}"); - } - "configUnset" => { - let path = step - .path - .as_deref() - .ok_or("configUnset step missing path")?; - emit_progress( - Some(app), - session_id, - round, - "executing_repair", - format!("Applying config unset: {path}"), - Some(PlanKind::Repair), - None, - ); - apply_config_unset(&mut current_config, path)?; - write_target_config(app, target_location, instance_id, ¤t_config).await?; - restart_target_gateway(app, target_location, instance_id).await?; - result.argv = vec!["configUnset".into(), path.into()]; - result.stdout = format!("Removed {path}"); - } - "doctorRediagnose" => { - emit_progress( - Some(app), - session_id, - round, - "planning_detect", - format!("Running rescue diagnosis after repair plan round {round}"), - Some(PlanKind::Detect), - None, - ); - diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "post_step_rediagnose", round, &diagnosis); - rediagnosed = true; - result.argv = vec!["doctorRediagnose".into()]; - result.stdout = format!( - "Diagnosis status={} issues={}", - diagnosis.status, - diagnosis.issues.len() - ); - } - other => { - result.exit_code = Some(1); - result.stderr = format!("Unsupported clawpal-server step type: {other}"); - } - } - result.duration_ms = started.elapsed().as_millis() as u64; - last_command = Some(result.argv.clone()); - append_remote_doctor_log( - session_id, - json!({ - "event": "command_result", - "protocol": "clawpal_server", - "round": round, - "planKind": "repair", - "stepIndex": step_index, - "step": step, - "result": result, - }), - ); - report_clawpal_server_step_result(client, &plan.plan_id, step_index, step, &result) - .await; - if result.exit_code.unwrap_or(1) != 0 { - return Err(result.stderr); - } - } - - if !rediagnosed { - diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "post_round", round, &diagnosis); - } - if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { - repair_rescue_gateway_if_needed( - app, - session_id, - round, - target_location, - instance_id, - &mut diagnosis, - ) - .await?; - } - last_step_types = round_step_types.clone(); - round_observations.push(RepairRoundObservation::new( - round, - &round_step_types, - &diagnosis, - )); - if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { - let observation = round_observations - .last() - .expect("stalled observations should contain current round"); - append_remote_doctor_log( - session_id, - json!({ - "event": "repair_plan_stalled", - "protocol": "clawpal_server", - "round": round, - "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, - "latestStepTypes": observation.step_types, - "issues": observation.issue_summaries, - }), - ); - return Err(stalled_plan_error_message(observation)); - } - let healthy = diagnosis_is_healthy(&diagnosis); - report_clawpal_server_final_result(client, &plan.plan_id, healthy, &diagnosis).await; - if healthy { - return Ok(result_for_completion( - session_id, - round, - PlanKind::Repair, - last_command, - "Remote Doctor repair completed with a healthy rescue diagnosis.", - )); - } - } - - Err(round_limit_error_message(&diagnosis, &last_step_types)) -} - -async fn run_agent_planner_repair_loop( - app: &AppHandle, - client: &NodeClient, - bridge_client: &BridgeClient, - pool: &SshConnectionPool, - session_id: &str, - instance_id: &str, - target_location: TargetLocation, -) -> Result { - let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "initial", 0, &diagnosis); - if diagnosis_is_healthy(&diagnosis) { - return Ok(result_for_completion( - session_id, - 0, - PlanKind::Detect, - None, - "Remote Doctor repair skipped because diagnosis is already healthy.", - )); - } - - let mut previous_results: Vec = Vec::new(); - let mut last_command = None; - let mut last_step_types: Vec = Vec::new(); - let mut round_observations: Vec = Vec::new(); - - for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { - let kind = next_agent_plan_kind_for_round(&diagnosis, &previous_results); - let config_context = build_config_excerpt_context( - &read_target_config_raw(app, target_location, instance_id).await?, - ); - let phase = match kind { - PlanKind::Detect => "planning_detect", - PlanKind::Investigate => "planning_investigate", - PlanKind::Repair => "planning_repair", - }; - let line = match kind { - PlanKind::Detect => format!("Requesting detection plan for round {round}"), - PlanKind::Investigate => format!("Requesting investigation plan for round {round}"), - PlanKind::Repair => format!("Requesting repair plan for round {round}"), - }; - emit_progress(Some(app), session_id, round, phase, line, Some(kind), None); - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_request_context", - "protocol": "agent", - "round": round, - "planKind": match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "instanceId": instance_id, - "targetLocation": target_location, - "configContext": config_excerpt_log_summary(&config_context), - "diagnosisIssueCount": diagnosis.issues.len(), - "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), - }), - ); - let plan = request_agent_plan( - app, - client, - bridge_client, - pool, - session_id, - round, - kind, - target_location, - instance_id, - &diagnosis, - &config_context, - &previous_results, - ) - .await?; - append_remote_doctor_log( - session_id, - json!({ - "event": "plan_received", - "protocol": "agent", - "round": round, - "planKind": match plan.plan_kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "planId": plan.plan_id, - "summary": plan.summary, - "commandCount": plan.commands.len(), - "healthy": plan.healthy, - "done": plan.done, - "success": plan.success, - }), - ); - previous_results.clear(); - last_step_types = agent_plan_step_types(&plan); - for command in &plan.commands { - last_command = Some(command.argv.clone()); - emit_progress( - Some(app), - session_id, - round, - match kind { - PlanKind::Detect => "executing_detect", - PlanKind::Investigate => "executing_investigate", - PlanKind::Repair => "executing_repair", - }, - format!( - "Running {} command: {}", - match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - command.argv.join(" ") - ), - Some(kind), - Some(command.argv.clone()), - ); - append_remote_doctor_log( - session_id, - json!({ - "event": "command_start", - "round": round, - "planKind": match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "argv": command.argv, - "timeoutSec": command.timeout_sec, - "purpose": command.purpose, - }), - ); - let command_result = - match execute_plan_command(app, pool, target_location, instance_id, &command.argv) - .await - { - Ok(result) => result, - Err(error) => { - return Err(plan_command_failure_message( - kind, - round, - &command.argv, - &error, - )); - } - }; - append_remote_doctor_log( - session_id, - json!({ - "event": "command_result", - "round": round, - "planKind": match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "result": command_result, - }), - ); - if command_result.exit_code.unwrap_or(1) != 0 - && !command.continue_on_failure.unwrap_or(false) - { - return Err(format!( - "{} command failed in round {round}: {}", - match kind { - PlanKind::Detect => "Detect", - PlanKind::Investigate => "Investigate", - PlanKind::Repair => "Repair", - }, - command.argv.join(" ") - )); - } - previous_results.push(command_result); - } - - diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "post_round", round, &diagnosis); - if diagnosis_is_healthy(&diagnosis) { - return Ok(result_for_completion( - session_id, - round, - kind, - last_command, - "Remote Doctor repair completed with a healthy rescue diagnosis.", - )); - } - if matches!(kind, PlanKind::Repair) - && plan.done - && plan.commands.is_empty() - && diagnosis_has_only_non_auto_fixable_issues(&diagnosis) - { - return Ok(result_for_completion_with_warnings( - session_id, - round, - kind, - last_command, - "Remote Doctor completed all safe automatic repairs. Remaining issues are non-auto-fixable warnings.", - )); - } - - round_observations.push(RepairRoundObservation::new( - round, - &last_step_types, - &diagnosis, - )); - if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { - let observation = round_observations - .last() - .expect("stalled observations should contain current round"); - append_remote_doctor_log( - session_id, - json!({ - "event": "repair_plan_stalled", - "protocol": "agent", - "round": round, - "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, - "latestStepTypes": observation.step_types, - "issues": observation.issue_summaries, - }), - ); - return Err(stalled_plan_error_message(observation)); - } - } - - Err(round_limit_error_message(&diagnosis, &last_step_types)) -} - -async fn start_remote_doctor_repair_impl( - app: AppHandle, - pool: &SshConnectionPool, - instance_id: String, - target_location: String, -) -> Result { - let target_location = parse_target_location(&target_location)?; - if matches!(target_location, TargetLocation::RemoteOpenclaw) { - ensure_remote_target_connected(pool, &instance_id).await?; - } - let session_id = Uuid::new_v4().to_string(); - let gateway = remote_doctor_gateway_config()?; - let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref())?; - log_dev(format!( - "[remote_doctor] start session={} instance_id={} target_location={:?} gateway_url={} auth_token_override={}", - session_id, - instance_id, - target_location, - gateway.url, - gateway.auth_token_override.is_some() - )); - append_remote_doctor_log( - &session_id, - json!({ - "event": "session_start", - "instanceId": instance_id, - "targetLocation": target_location, - "gatewayUrl": gateway.url, - "gatewayAuthTokenOverride": gateway.auth_token_override.is_some(), - }), - ); - - let client = NodeClient::new(); - client.connect(&gateway.url, app.clone(), creds).await?; - let bridge = BridgeClient::new(); - - let forced_protocol = configured_remote_doctor_protocol(); - let active_protocol = forced_protocol.unwrap_or(default_remote_doctor_protocol()); - let pool_ref: &SshConnectionPool = pool; - let app_handle = app.clone(); - let bridge_client = bridge.clone(); - let gateway_url = gateway.url.clone(); - let gateway_auth_override = gateway.auth_token_override.clone(); - if matches!(active_protocol, RemoteDoctorProtocol::AgentPlanner) - && gateway_url_is_local(&gateway_url) - { - ensure_local_remote_doctor_agent_ready()?; - } - if protocol_requires_bridge(active_protocol) { - ensure_agent_bridge_connected( - &app, - &bridge, - &gateway_url, - gateway_auth_override.as_deref(), - &session_id, - ) - .await; - } - let result = match active_protocol { - RemoteDoctorProtocol::AgentPlanner => { - let agent = run_agent_planner_repair_loop( - &app, - &client, - &bridge_client, - pool_ref, - &session_id, - &instance_id, - target_location, - ) - .await; - - if forced_protocol.is_none() - && matches!(&agent, Err(error) if is_unknown_method_error(error)) - { - append_remote_doctor_log( - &session_id, - json!({ - "event": "protocol_fallback", - "from": "agent", - "to": "legacy_doctor", - "reason": agent.as_ref().err(), - }), - ); - run_remote_doctor_repair_loop( - Some(&app), - pool_ref, - &session_id, - &instance_id, - target_location, - |kind, round, previous_results| { - let method = match kind { - PlanKind::Detect => detect_method_name(), - PlanKind::Investigate => repair_method_name(), - PlanKind::Repair => repair_method_name(), - }; - let client = &client; - let session_id = &session_id; - let instance_id = &instance_id; - async move { - request_plan( - client, - &method, - kind, - session_id, - round, - target_location, - instance_id, - &previous_results, - ) - .await - } - }, - ) - .await - } else { - agent - } - } - RemoteDoctorProtocol::LegacyDoctor => { - let legacy = run_remote_doctor_repair_loop( - Some(&app), - pool_ref, - &session_id, - &instance_id, - target_location, - |kind, round, previous_results| { - let method = match kind { - PlanKind::Detect => detect_method_name(), - PlanKind::Investigate => repair_method_name(), - PlanKind::Repair => repair_method_name(), - }; - let client = &client; - let session_id = &session_id; - let instance_id = &instance_id; - async move { - request_plan( - client, - &method, - kind, - session_id, - round, - target_location, - instance_id, - &previous_results, - ) - .await - } - }, - ) - .await; - - if forced_protocol.is_none() - && matches!(&legacy, Err(error) if is_unknown_method_error(error)) - { - append_remote_doctor_log( - &session_id, - json!({ - "event": "protocol_fallback", - "from": "legacy_doctor", - "to": "clawpal_server", - "reason": legacy.as_ref().err(), - }), - ); - log_dev(format!( - "[remote_doctor] session={} protocol fallback legacy_doctor -> clawpal_server", - session_id - )); - run_clawpal_server_repair_loop( - &app, - &client, - &session_id, - &instance_id, - target_location, - ) - .await - } else { - legacy - } - } - RemoteDoctorProtocol::ClawpalServer => { - let clawpal_server = run_clawpal_server_repair_loop( - &app, - &client, - &session_id, - &instance_id, - target_location, - ) - .await; - if forced_protocol.is_none() - && matches!(&clawpal_server, Err(error) if is_unknown_method_error(error)) - { - append_remote_doctor_log( - &session_id, - json!({ - "event": "protocol_fallback", - "from": "clawpal_server", - "to": "agent", - "reason": clawpal_server.as_ref().err(), - }), - ); - let agent = run_remote_doctor_repair_loop( - Some(&app), - pool_ref, - &session_id, - &instance_id, - target_location, - |kind, round, previous_results| { - let client = &client; - let session_id = &session_id; - let instance_id = &instance_id; - let app_handle = app_handle.clone(); - let bridge_client = bridge_client.clone(); - let gateway_url = gateway_url.clone(); - let gateway_auth_override = gateway_auth_override.clone(); - let empty_diagnosis = empty_diagnosis(); - let empty_config = empty_config_excerpt_context(); - async move { - ensure_agent_bridge_connected( - &app_handle, - &bridge_client, - &gateway_url, - gateway_auth_override.as_deref(), - session_id, - ) - .await; - let text = if bridge_client.is_connected().await { - run_agent_request_with_bridge( - &app_handle, - client, - &bridge_client, - pool_ref, - target_location, - instance_id, - remote_doctor_agent_id(), - &remote_doctor_agent_session_key(session_id), - &build_agent_plan_prompt( - kind, - session_id, - round, - target_location, - instance_id, - &empty_diagnosis, - &empty_config, - &previous_results, - ), - ) - .await? - } else { - client - .run_agent_request( - remote_doctor_agent_id(), - &remote_doctor_agent_session_key(session_id), - &build_agent_plan_prompt( - kind, - session_id, - round, - target_location, - instance_id, - &empty_diagnosis, - &empty_config, - &previous_results, - ), - ) - .await? - }; - parse_agent_plan_response(kind, &text) - } - }, - ) - .await; - if matches!(&agent, Err(error) if is_unknown_method_error(error)) { - append_remote_doctor_log( - &session_id, - json!({ - "event": "protocol_fallback", - "from": "agent", - "to": "legacy_doctor", - "reason": agent.as_ref().err(), - }), - ); - run_remote_doctor_repair_loop( - Some(&app), - pool_ref, - &session_id, - &instance_id, - target_location, - |kind, round, previous_results| { - let method = match kind { - PlanKind::Detect => detect_method_name(), - PlanKind::Investigate => repair_method_name(), - PlanKind::Repair => repair_method_name(), - }; - let client = &client; - let session_id = &session_id; - let instance_id = &instance_id; - async move { - request_plan( - client, - &method, - kind, - session_id, - round, - target_location, - instance_id, - &previous_results, - ) - .await - } - }, - ) - .await - } else { - agent - } - } else { - clawpal_server - } - } - }; - - let _ = client.disconnect().await; - let _ = bridge.disconnect().await; - - match result { - Ok(done) => { - append_remote_doctor_log( - &session_id, - json!({ - "event": "session_complete", - "status": "completed", - "latestDiagnosisHealthy": done.latest_diagnosis_healthy, - }), - ); - Ok(done) - } - Err(error) => { - append_remote_doctor_log( - &session_id, - json!({ - "event": "session_complete", - "status": "failed", - "reason": error, - }), - ); - Err(error) - } - } -} - -#[tauri::command] -pub async fn start_remote_doctor_repair( - app: AppHandle, - pool: State<'_, SshConnectionPool>, - instance_id: String, - target_location: String, -) -> Result { - start_remote_doctor_repair_impl(app, &pool, instance_id, target_location).await -} - #[cfg(test)] mod tests { use super::*; + use std::fs::create_dir_all; + use std::io::Write; + use std::process::Command; + + use uuid::Uuid; + + use crate::remote_doctor::agent::{ + default_remote_doctor_protocol, detect_method_name, + ensure_agent_workspace_ready as ensure_local_remote_doctor_agent_ready, + next_agent_plan_kind, next_agent_plan_kind_for_round, protocol_requires_bridge, + protocol_runs_rescue_preflight, + }; + use crate::remote_doctor::config::{ + build_config_excerpt_context, config_excerpt_log_summary, + diagnosis_has_only_non_auto_fixable_issues, empty_config_excerpt_context, + load_gateway_config as remote_doctor_gateway_config, + }; + use crate::remote_doctor::plan::plan_command_failure_message; + use crate::remote_doctor::repair_loops::{ + round_limit_error_message as repair_loops_round_limit_error_message, + repair_plan_stalled as repair_loops_repair_plan_stalled, + run_clawpal_server_repair_loop as repair_loops_run_clawpal_server_repair_loop, + run_remote_doctor_repair_loop as repair_loops_run_remote_doctor_repair_loop, + start_remote_doctor_repair_impl as repair_loops_start_remote_doctor_repair_impl, + }; + use crate::remote_doctor::types::{ + diagnosis_issue_summaries, parse_target_location, RemoteDoctorProtocol, + RepairRoundObservation, + }; use crate::cli_runner::{set_active_clawpal_data_override, set_active_openclaw_home_override}; use crate::ssh::SshHostConfig; use std::net::TcpStream; @@ -2123,14 +1055,14 @@ mod tests { })]); let step_types = vec!["doctorRediagnose".to_string()]; - assert!(!repair_plan_stalled( + assert!(!repair_loops_repair_plan_stalled( &[ RepairRoundObservation::new(1, &step_types, &diagnosis), RepairRoundObservation::new(2, &step_types, &diagnosis), ], 3, )); - assert!(repair_plan_stalled( + assert!(repair_loops_repair_plan_stalled( &[ RepairRoundObservation::new(1, &step_types, &diagnosis), RepairRoundObservation::new(2, &step_types, &diagnosis), @@ -2151,7 +1083,10 @@ mod tests { "fixHint": "Reset baseUrl", "source": "config" })]); - let error = round_limit_error_message(&diagnosis, &["doctorRediagnose".to_string()]); + let error = repair_loops_round_limit_error_message( + &diagnosis, + &["doctorRediagnose".to_string()], + ); assert!(error.contains("invalid.base_url")); assert!(error.contains("doctorRediagnose")); assert!(error.contains("Provider base URL is invalid")); @@ -2588,7 +1523,7 @@ CMD ["/usr/sbin/sshd", "-D"] let session_id = Uuid::new_v4().to_string(); let marker = "/tmp/clawpal-remote-doctor-fixed"; - let result = run_remote_doctor_repair_loop( + let result = repair_loops_run_remote_doctor_repair_loop( Option::<&AppHandle>::None, &pool, &session_id, @@ -2901,7 +1836,7 @@ CMD ["/usr/sbin/sshd", "-D"] .expect("connect live remote doctor gateway"); let session_id = Uuid::new_v4().to_string(); - let result = run_clawpal_server_repair_loop( + let result = repair_loops_run_clawpal_server_repair_loop( &app_handle, &client, &session_id, @@ -2972,7 +1907,7 @@ CMD ["/usr/sbin/sshd", "-D"] let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); let pool = app_handle.state::(); - let result = start_remote_doctor_repair_impl( + let result = repair_loops_start_remote_doctor_repair_impl( app_handle.clone(), &pool, format!("ssh:{}", cfg.id), @@ -3058,7 +1993,7 @@ CMD ["/usr/sbin/sshd", "-D"] .await .expect("corrupt remote config"); - let result = start_remote_doctor_repair_impl( + let result = repair_loops_start_remote_doctor_repair_impl( app_handle.clone(), &pool, cfg.id.clone(), diff --git a/src-tauri/src/remote_doctor/mod.rs b/src-tauri/src/remote_doctor/mod.rs index 5239c1a5..65c363f2 100644 --- a/src-tauri/src/remote_doctor/mod.rs +++ b/src-tauri/src/remote_doctor/mod.rs @@ -6,5 +6,5 @@ mod repair_loops; mod session; mod types; -pub use legacy::start_remote_doctor_repair; +pub use repair_loops::start_remote_doctor_repair; pub use types::RemoteDoctorRepairResult; diff --git a/src-tauri/src/remote_doctor/repair_loops.rs b/src-tauri/src/remote_doctor/repair_loops.rs index c8007366..def45d1a 100644 --- a/src-tauri/src/remote_doctor/repair_loops.rs +++ b/src-tauri/src/remote_doctor/repair_loops.rs @@ -1 +1,1108 @@ -// Placeholder for repair loop orchestration helpers. +use serde_json::{json, Value}; +use tauri::{AppHandle, Runtime, State}; +use uuid::Uuid; + +use super::agent::{ + build_agent_plan_prompt, configured_remote_doctor_protocol, default_remote_doctor_protocol, + detect_method_name, ensure_agent_workspace_ready, gateway_url_is_local, + next_agent_plan_kind_for_round, protocol_requires_bridge, protocol_runs_rescue_preflight, + remote_doctor_agent_id, remote_doctor_agent_session_key, repair_method_name, +}; +use super::config::{ + append_diagnosis_log, build_gateway_credentials, config_excerpt_log_summary, + diagnosis_has_only_non_auto_fixable_issues, diagnosis_is_healthy, + empty_config_excerpt_context, empty_diagnosis, load_gateway_config, read_target_config_raw, + run_rescue_diagnosis, +}; +use super::legacy::{ + ensure_agent_bridge_connected, ensure_remote_target_connected, parse_agent_plan_response, + repair_rescue_gateway_if_needed, request_agent_plan, run_agent_request_with_bridge, +}; +use super::plan::{ + agent_plan_step_types, apply_config_set, apply_config_unset, execute_command, + execute_plan_command, plan_command_failure_message, + report_clawpal_server_final_result, report_clawpal_server_step_result, + request_clawpal_server_plan, request_plan, +}; +use super::session::{ + append_session_log, emit_session_progress, result_for_completion, + result_for_completion_with_warnings, +}; +use super::types::{ + diagnosis_issue_summaries, parse_target_location, ClawpalServerPlanStep, CommandResult, + PlanKind, PlanResponse, RemoteDoctorProtocol, RemoteDoctorRepairResult, RepairRoundObservation, + TargetLocation, +}; +use crate::bridge_client::BridgeClient; +use crate::commands::logs::log_dev; +use crate::node_client::NodeClient; +use crate::ssh::SshConnectionPool; + +const MAX_REMOTE_DOCTOR_ROUNDS: usize = 50; +const REPAIR_PLAN_STALL_THRESHOLD: usize = 3; + +fn is_unknown_method_error(error: &str) -> bool { + error.contains("unknown method") + || error.contains("\"code\":\"INVALID_REQUEST\"") + || error.contains("\"code\": \"INVALID_REQUEST\"") +} + +fn clawpal_server_step_type_summary(steps: &[ClawpalServerPlanStep]) -> Value { + let mut counts = serde_json::Map::new(); + for step in steps { + let entry = counts + .entry(step.step_type.clone()) + .or_insert_with(|| Value::from(0_u64)); + let next = entry.as_u64().unwrap_or(0) + 1; + *entry = Value::from(next); + } + Value::Object(counts) +} + +pub(crate) fn repair_plan_stalled( + observations: &[RepairRoundObservation], + threshold: usize, +) -> bool { + if observations.len() < threshold { + return false; + } + let recent = &observations[observations.len() - threshold..]; + let Some(first) = recent.first() else { + return false; + }; + !first.issue_summaries.is_empty() + && recent.iter().all(|entry| { + entry.step_types.len() == 1 + && entry.step_types[0] == "doctorRediagnose" + && entry.diagnosis_signature == first.diagnosis_signature + }) +} + +pub(crate) fn round_limit_error_message( + diagnosis: &crate::commands::RescuePrimaryDiagnosisResult, + last_step_types: &[String], +) -> String { + let issue_summary = serde_json::to_string(&diagnosis_issue_summaries(diagnosis)) + .unwrap_or_else(|_| "[]".to_string()); + let step_summary = if last_step_types.is_empty() { + "[]".to_string() + } else { + serde_json::to_string(last_step_types).unwrap_or_else(|_| "[]".to_string()) + }; + format!( + "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean rescue diagnosis result. Last issues: {issue_summary}. Last repair step types: {step_summary}." + ) +} + +pub(crate) fn stalled_plan_error_message(observation: &RepairRoundObservation) -> String { + let issue_summary = + serde_json::to_string(&observation.issue_summaries).unwrap_or_else(|_| "[]".to_string()); + let step_summary = + serde_json::to_string(&observation.step_types).unwrap_or_else(|_| "[]".to_string()); + format!( + "Remote Doctor did not return actionable repair steps by round {} after {} repeated rounds. Last issues: {}. Last repair step types: {}.", + observation.round, + REPAIR_PLAN_STALL_THRESHOLD, + issue_summary, + step_summary + ) +} + +pub(crate) async fn run_remote_doctor_repair_loop( + app: Option<&AppHandle>, + pool: &SshConnectionPool, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, + mut request_plan_fn: F, +) -> Result +where + F: FnMut(PlanKind, usize, Vec) -> Fut, + Fut: std::future::Future>, +{ + let mut previous_results: Vec = Vec::new(); + let mut last_command: Option> = None; + let mut last_plan_kind = PlanKind::Detect; + + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + emit_session_progress( + app, + session_id, + round, + "planning_detect", + format!("Requesting detection plan for round {round}"), + Some(PlanKind::Detect), + None, + ); + let detect_plan = + request_plan_fn(PlanKind::Detect, round, previous_results.clone()).await?; + append_session_log( + session_id, + json!({ + "event": "plan_received", + "round": round, + "planKind": "detect", + "planId": detect_plan.plan_id, + "summary": detect_plan.summary, + "commandCount": detect_plan.commands.len(), + "healthy": detect_plan.healthy, + "done": detect_plan.done, + }), + ); + if detect_plan.healthy || (detect_plan.done && detect_plan.commands.is_empty()) { + return Ok(RemoteDoctorRepairResult { + mode: "remoteDoctor".into(), + status: "completed".into(), + round, + phase: "completed".into(), + last_plan_kind: match last_plan_kind { + PlanKind::Detect => "detect".into(), + PlanKind::Investigate => "investigate".into(), + PlanKind::Repair => "repair".into(), + }, + latest_diagnosis_healthy: true, + last_command, + session_id: session_id.to_string(), + message: "Remote Doctor repair completed with a healthy detection result.".into(), + }); + } + previous_results.clear(); + for command in &detect_plan.commands { + last_command = Some(command.argv.clone()); + emit_session_progress( + app, + session_id, + round, + "executing_detect", + format!("Running detect command: {}", command.argv.join(" ")), + Some(PlanKind::Detect), + Some(command.argv.clone()), + ); + let command_result = + execute_command(pool, target_location, instance_id, &command.argv).await?; + append_session_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": "detect", + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + previous_results.push(command_result); + return Err(format!( + "Detect command failed in round {round}: {}", + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + + emit_session_progress( + app, + session_id, + round, + "planning_repair", + format!("Requesting repair plan for round {round}"), + Some(PlanKind::Repair), + None, + ); + let repair_plan = + request_plan_fn(PlanKind::Repair, round, previous_results.clone()).await?; + last_plan_kind = PlanKind::Repair; + append_session_log( + session_id, + json!({ + "event": "plan_received", + "round": round, + "planKind": "repair", + "planId": repair_plan.plan_id, + "summary": repair_plan.summary, + "commandCount": repair_plan.commands.len(), + "success": repair_plan.success, + "done": repair_plan.done, + }), + ); + previous_results.clear(); + for command in &repair_plan.commands { + last_command = Some(command.argv.clone()); + emit_session_progress( + app, + session_id, + round, + "executing_repair", + format!("Running repair command: {}", command.argv.join(" ")), + Some(PlanKind::Repair), + Some(command.argv.clone()), + ); + let command_result = + execute_command(pool, target_location, instance_id, &command.argv).await?; + append_session_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": "repair", + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + previous_results.push(command_result); + return Err(format!( + "Repair command failed in round {round}: {}", + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + } + + append_session_log( + session_id, + json!({ + "event": "session_complete", + "status": "failed", + "reason": "round_limit_exceeded", + }), + ); + Err(format!( + "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean detection result" + )) +} + +pub(crate) async fn run_clawpal_server_repair_loop( + app: &AppHandle, + client: &NodeClient, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, +) -> Result { + let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "initial", 0, &diagnosis); + if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { + repair_rescue_gateway_if_needed( + app, + session_id, + 0, + target_location, + instance_id, + &mut diagnosis, + ) + .await?; + } + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + 0, + PlanKind::Detect, + None, + "Remote Doctor repair skipped because diagnosis is already healthy.", + )); + } + + let mut last_command = None; + let mut round_observations: Vec = Vec::new(); + let mut last_step_types: Vec = Vec::new(); + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + emit_session_progress( + Some(app), + session_id, + round, + "planning_repair", + format!("Requesting remote repair plan for round {round}"), + Some(PlanKind::Repair), + None, + ); + let config_context = super::config::build_config_excerpt_context( + &read_target_config_raw(app, target_location, instance_id).await?, + ); + append_session_log( + session_id, + json!({ + "event": "plan_request_context", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "instanceId": instance_id, + "targetLocation": target_location, + "configContext": config_excerpt_log_summary(&config_context), + "diagnosisIssueCount": diagnosis.issues.len(), + "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), + }), + ); + if config_context.config_parse_error.is_some() { + append_session_log( + session_id, + json!({ + "event": "config_recovery_context", + "round": round, + "context": config_excerpt_log_summary(&config_context), + }), + ); + } + let plan = request_clawpal_server_plan( + client, + session_id, + round, + instance_id, + target_location, + &diagnosis, + &config_context, + ) + .await?; + append_session_log( + session_id, + json!({ + "event": "plan_received", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "planId": plan.plan_id, + "summary": plan.summary, + "stepCount": plan.steps.len(), + "stepTypeCounts": clawpal_server_step_type_summary(&plan.steps), + }), + ); + + let mut current_config = config_context.config_excerpt.clone(); + let mut rediagnosed = false; + let mut round_step_types = Vec::new(); + for (step_index, step) in plan.steps.iter().enumerate() { + round_step_types.push(step.step_type.clone()); + let mut result = CommandResult { + argv: Vec::new(), + exit_code: Some(0), + stdout: String::new(), + stderr: String::new(), + duration_ms: 0, + timed_out: false, + }; + let started = std::time::Instant::now(); + match step.step_type.as_str() { + "configSet" => { + let path = step.path.as_deref().ok_or("configSet step missing path")?; + let value = step.value.clone().ok_or("configSet step missing value")?; + emit_session_progress( + Some(app), + session_id, + round, + "executing_repair", + format!("Applying config set: {path}"), + Some(PlanKind::Repair), + None, + ); + apply_config_set(&mut current_config, path, value)?; + super::config::write_target_config(app, target_location, instance_id, ¤t_config).await?; + super::config::restart_target_gateway(app, target_location, instance_id).await?; + result.argv = vec!["configSet".into(), path.into()]; + result.stdout = format!("Updated {path}"); + } + "configUnset" => { + let path = step.path.as_deref().ok_or("configUnset step missing path")?; + emit_session_progress( + Some(app), + session_id, + round, + "executing_repair", + format!("Applying config unset: {path}"), + Some(PlanKind::Repair), + None, + ); + apply_config_unset(&mut current_config, path)?; + super::config::write_target_config(app, target_location, instance_id, ¤t_config).await?; + super::config::restart_target_gateway(app, target_location, instance_id).await?; + result.argv = vec!["configUnset".into(), path.into()]; + result.stdout = format!("Removed {path}"); + } + "doctorRediagnose" => { + emit_session_progress( + Some(app), + session_id, + round, + "planning_detect", + format!("Running rescue diagnosis after repair plan round {round}"), + Some(PlanKind::Detect), + None, + ); + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_step_rediagnose", round, &diagnosis); + rediagnosed = true; + result.argv = vec!["doctorRediagnose".into()]; + result.stdout = format!( + "Diagnosis status={} issues={}", + diagnosis.status, + diagnosis.issues.len() + ); + } + other => { + result.exit_code = Some(1); + result.stderr = format!("Unsupported clawpal-server step type: {other}"); + } + } + result.duration_ms = started.elapsed().as_millis() as u64; + last_command = Some(result.argv.clone()); + append_session_log( + session_id, + json!({ + "event": "command_result", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "stepIndex": step_index, + "step": step, + "result": result, + }), + ); + report_clawpal_server_step_result(client, &plan.plan_id, step_index, step, &result) + .await; + if result.exit_code.unwrap_or(1) != 0 { + return Err(result.stderr); + } + } + + if !rediagnosed { + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_round", round, &diagnosis); + } + if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { + repair_rescue_gateway_if_needed( + app, + session_id, + round, + target_location, + instance_id, + &mut diagnosis, + ) + .await?; + } + last_step_types = round_step_types.clone(); + round_observations.push(RepairRoundObservation::new( + round, + &round_step_types, + &diagnosis, + )); + if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { + let observation = round_observations + .last() + .expect("stalled observations should contain current round"); + append_session_log( + session_id, + json!({ + "event": "repair_plan_stalled", + "protocol": "clawpal_server", + "round": round, + "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, + "latestStepTypes": observation.step_types, + "issues": observation.issue_summaries, + }), + ); + return Err(stalled_plan_error_message(observation)); + } + let healthy = diagnosis_is_healthy(&diagnosis); + report_clawpal_server_final_result(client, &plan.plan_id, healthy, &diagnosis).await; + if healthy { + return Ok(result_for_completion( + session_id, + round, + PlanKind::Repair, + last_command, + "Remote Doctor repair completed with a healthy rescue diagnosis.", + )); + } + } + + Err(round_limit_error_message(&diagnosis, &last_step_types)) +} + +pub(crate) async fn run_agent_planner_repair_loop( + app: &AppHandle, + client: &NodeClient, + bridge_client: &BridgeClient, + pool: &SshConnectionPool, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, +) -> Result { + let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "initial", 0, &diagnosis); + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + 0, + PlanKind::Detect, + None, + "Remote Doctor repair skipped because diagnosis is already healthy.", + )); + } + + let mut previous_results: Vec = Vec::new(); + let mut last_command = None; + let mut last_step_types: Vec = Vec::new(); + let mut round_observations: Vec = Vec::new(); + + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + let kind = next_agent_plan_kind_for_round(&diagnosis, &previous_results); + let config_context = super::config::build_config_excerpt_context( + &read_target_config_raw(app, target_location, instance_id).await?, + ); + let phase = match kind { + PlanKind::Detect => "planning_detect", + PlanKind::Investigate => "planning_investigate", + PlanKind::Repair => "planning_repair", + }; + let line = match kind { + PlanKind::Detect => format!("Requesting detection plan for round {round}"), + PlanKind::Investigate => format!("Requesting investigation plan for round {round}"), + PlanKind::Repair => format!("Requesting repair plan for round {round}"), + }; + emit_session_progress(Some(app), session_id, round, phase, line, Some(kind), None); + append_session_log( + session_id, + json!({ + "event": "plan_request_context", + "protocol": "agent", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "instanceId": instance_id, + "targetLocation": target_location, + "configContext": config_excerpt_log_summary(&config_context), + "diagnosisIssueCount": diagnosis.issues.len(), + "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), + }), + ); + let plan = request_agent_plan( + app, + client, + bridge_client, + pool, + session_id, + round, + kind, + target_location, + instance_id, + &diagnosis, + &config_context, + &previous_results, + ) + .await?; + append_session_log( + session_id, + json!({ + "event": "plan_received", + "protocol": "agent", + "round": round, + "planKind": match plan.plan_kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "planId": plan.plan_id, + "summary": plan.summary, + "commandCount": plan.commands.len(), + "healthy": plan.healthy, + "done": plan.done, + "success": plan.success, + }), + ); + previous_results.clear(); + last_step_types = agent_plan_step_types(&plan); + for command in &plan.commands { + last_command = Some(command.argv.clone()); + emit_session_progress( + Some(app), + session_id, + round, + match kind { + PlanKind::Detect => "executing_detect", + PlanKind::Investigate => "executing_investigate", + PlanKind::Repair => "executing_repair", + }, + format!( + "Running {} command: {}", + match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + command.argv.join(" ") + ), + Some(kind), + Some(command.argv.clone()), + ); + append_session_log( + session_id, + json!({ + "event": "command_start", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "argv": command.argv, + "timeoutSec": command.timeout_sec, + "purpose": command.purpose, + }), + ); + let command_result = + match execute_plan_command(app, pool, target_location, instance_id, &command.argv) + .await + { + Ok(result) => result, + Err(error) => { + return Err(plan_command_failure_message( + kind, + round, + &command.argv, + &error, + )); + } + }; + append_session_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + return Err(format!( + "{} command failed in round {round}: {}", + match kind { + PlanKind::Detect => "Detect", + PlanKind::Investigate => "Investigate", + PlanKind::Repair => "Repair", + }, + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_round", round, &diagnosis); + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + round, + kind, + last_command, + "Remote Doctor repair completed with a healthy rescue diagnosis.", + )); + } + if matches!(kind, PlanKind::Repair) + && plan.done + && plan.commands.is_empty() + && diagnosis_has_only_non_auto_fixable_issues(&diagnosis) + { + return Ok(result_for_completion_with_warnings( + session_id, + round, + kind, + last_command, + "Remote Doctor completed all safe automatic repairs. Remaining issues are non-auto-fixable warnings.", + )); + } + + round_observations.push(RepairRoundObservation::new( + round, + &last_step_types, + &diagnosis, + )); + if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { + let observation = round_observations + .last() + .expect("stalled observations should contain current round"); + append_session_log( + session_id, + json!({ + "event": "repair_plan_stalled", + "protocol": "agent", + "round": round, + "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, + "latestStepTypes": observation.step_types, + "issues": observation.issue_summaries, + }), + ); + return Err(stalled_plan_error_message(observation)); + } + } + + Err(round_limit_error_message(&diagnosis, &last_step_types)) +} + +pub(crate) async fn start_remote_doctor_repair_impl( + app: AppHandle, + pool: &SshConnectionPool, + instance_id: String, + target_location: String, +) -> Result { + let target_location = parse_target_location(&target_location)?; + if matches!(target_location, TargetLocation::RemoteOpenclaw) { + ensure_remote_target_connected(pool, &instance_id).await?; + } + let session_id = Uuid::new_v4().to_string(); + let gateway = load_gateway_config()?; + let creds = build_gateway_credentials(gateway.auth_token_override.as_deref())?; + log_dev(format!( + "[remote_doctor] start session={} instance_id={} target_location={:?} gateway_url={} auth_token_override={}", + session_id, + instance_id, + target_location, + gateway.url, + gateway.auth_token_override.is_some() + )); + append_session_log( + &session_id, + json!({ + "event": "session_start", + "instanceId": instance_id, + "targetLocation": target_location, + "gatewayUrl": gateway.url, + "gatewayAuthTokenOverride": gateway.auth_token_override.is_some(), + }), + ); + + let client = NodeClient::new(); + client.connect(&gateway.url, app.clone(), creds).await?; + let bridge = BridgeClient::new(); + + let forced_protocol = configured_remote_doctor_protocol(); + let active_protocol = forced_protocol.unwrap_or(default_remote_doctor_protocol()); + let pool_ref: &SshConnectionPool = pool; + let app_handle = app.clone(); + let bridge_client = bridge.clone(); + let gateway_url = gateway.url.clone(); + let gateway_auth_override = gateway.auth_token_override.clone(); + if matches!(active_protocol, RemoteDoctorProtocol::AgentPlanner) + && gateway_url_is_local(&gateway_url) + { + ensure_agent_workspace_ready()?; + } + if protocol_requires_bridge(active_protocol) { + ensure_agent_bridge_connected( + &app, + &bridge, + &gateway_url, + gateway_auth_override.as_deref(), + &session_id, + ) + .await; + } + let result = match active_protocol { + RemoteDoctorProtocol::AgentPlanner => { + let agent = run_agent_planner_repair_loop( + &app, + &client, + &bridge_client, + pool_ref, + &session_id, + &instance_id, + target_location, + ) + .await; + + if forced_protocol.is_none() + && matches!(&agent, Err(error) if is_unknown_method_error(error)) + { + append_session_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "agent", + "to": "legacy_doctor", + "reason": agent.as_ref().err(), + }), + ); + run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let method = match kind { + PlanKind::Detect => detect_method_name(), + PlanKind::Investigate => repair_method_name(), + PlanKind::Repair => repair_method_name(), + }; + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + async move { + request_plan( + client, + &method, + kind, + session_id, + round, + target_location, + instance_id, + &previous_results, + ) + .await + } + }, + ) + .await + } else { + agent + } + } + RemoteDoctorProtocol::LegacyDoctor => { + let legacy = run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let method = match kind { + PlanKind::Detect => detect_method_name(), + PlanKind::Investigate => repair_method_name(), + PlanKind::Repair => repair_method_name(), + }; + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + async move { + request_plan( + client, + &method, + kind, + session_id, + round, + target_location, + instance_id, + &previous_results, + ) + .await + } + }, + ) + .await; + + if forced_protocol.is_none() + && matches!(&legacy, Err(error) if is_unknown_method_error(error)) + { + append_session_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "legacy_doctor", + "to": "clawpal_server", + "reason": legacy.as_ref().err(), + }), + ); + log_dev(format!( + "[remote_doctor] session={} protocol fallback legacy_doctor -> clawpal_server", + session_id + )); + run_clawpal_server_repair_loop( + &app, + &client, + &session_id, + &instance_id, + target_location, + ) + .await + } else { + legacy + } + } + RemoteDoctorProtocol::ClawpalServer => { + let clawpal_server = run_clawpal_server_repair_loop( + &app, + &client, + &session_id, + &instance_id, + target_location, + ) + .await; + if forced_protocol.is_none() + && matches!(&clawpal_server, Err(error) if is_unknown_method_error(error)) + { + append_session_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "clawpal_server", + "to": "agent", + "reason": clawpal_server.as_ref().err(), + }), + ); + let agent = run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + let app_handle = app_handle.clone(); + let bridge_client = bridge_client.clone(); + let gateway_url = gateway_url.clone(); + let gateway_auth_override = gateway_auth_override.clone(); + let empty_diagnosis = empty_diagnosis(); + let empty_config = empty_config_excerpt_context(); + async move { + ensure_agent_bridge_connected( + &app_handle, + &bridge_client, + &gateway_url, + gateway_auth_override.as_deref(), + session_id, + ) + .await; + let text = if bridge_client.is_connected().await { + run_agent_request_with_bridge( + &app_handle, + client, + &bridge_client, + pool_ref, + target_location, + instance_id, + remote_doctor_agent_id(), + &remote_doctor_agent_session_key(session_id), + &build_agent_plan_prompt( + kind, + session_id, + round, + target_location, + instance_id, + &empty_diagnosis, + &empty_config, + &previous_results, + ), + ) + .await? + } else { + client + .run_agent_request( + remote_doctor_agent_id(), + &remote_doctor_agent_session_key(session_id), + &build_agent_plan_prompt( + kind, + session_id, + round, + target_location, + instance_id, + &empty_diagnosis, + &empty_config, + &previous_results, + ), + ) + .await? + }; + parse_agent_plan_response(kind, &text) + } + }, + ) + .await; + if matches!(&agent, Err(error) if is_unknown_method_error(error)) { + append_session_log( + &session_id, + json!({ + "event": "protocol_fallback", + "from": "agent", + "to": "legacy_doctor", + "reason": agent.as_ref().err(), + }), + ); + run_remote_doctor_repair_loop( + Some(&app), + pool_ref, + &session_id, + &instance_id, + target_location, + |kind, round, previous_results| { + let method = match kind { + PlanKind::Detect => detect_method_name(), + PlanKind::Investigate => repair_method_name(), + PlanKind::Repair => repair_method_name(), + }; + let client = &client; + let session_id = &session_id; + let instance_id = &instance_id; + async move { + request_plan( + client, + &method, + kind, + session_id, + round, + target_location, + instance_id, + &previous_results, + ) + .await + } + }, + ) + .await + } else { + agent + } + } else { + clawpal_server + } + } + }; + + let _ = client.disconnect().await; + let _ = bridge.disconnect().await; + + match result { + Ok(done) => { + append_session_log( + &session_id, + json!({ + "event": "session_complete", + "status": "completed", + "latestDiagnosisHealthy": done.latest_diagnosis_healthy, + }), + ); + Ok(done) + } + Err(error) => { + append_session_log( + &session_id, + json!({ + "event": "session_complete", + "status": "failed", + "reason": error, + }), + ); + Err(error) + } + } +} + +#[tauri::command] +pub async fn start_remote_doctor_repair( + app: AppHandle, + pool: State<'_, SshConnectionPool>, + instance_id: String, + target_location: String, +) -> Result { + start_remote_doctor_repair_impl(app, &pool, instance_id, target_location).await +} From 1c1cc69a067d71682a0daf1ffef37ae910fb4b8d Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 15:35:54 +0800 Subject: [PATCH 12/20] refactor: split remote doctor unit tests by module --- src-tauri/src/remote_doctor/agent.rs | 131 +++++ src-tauri/src/remote_doctor/config.rs | 81 +++ src-tauri/src/remote_doctor/legacy.rs | 614 +------------------- src-tauri/src/remote_doctor/plan.rs | 78 +++ src-tauri/src/remote_doctor/repair_loops.rs | 79 +++ src-tauri/src/remote_doctor/types.rs | 32 + 6 files changed, 407 insertions(+), 608 deletions(-) diff --git a/src-tauri/src/remote_doctor/agent.rs b/src-tauri/src/remote_doctor/agent.rs index b2dac5f0..9ca1fcaf 100644 --- a/src-tauri/src/remote_doctor/agent.rs +++ b/src-tauri/src/remote_doctor/agent.rs @@ -283,10 +283,102 @@ mod tests { assert!(prompt.contains("\"configExcerpt\"")); } + #[test] + fn unreadable_config_requires_investigate_plan_kind() { + let diagnosis = sample_diagnosis(vec![RescuePrimaryIssue { + id: "issue-1".into(), + code: "primary.config.unreadable".into(), + severity: "error".into(), + message: "Primary configuration could not be read".into(), + auto_fixable: false, + fix_hint: Some("Repair".into()), + source: "primary".into(), + }]); + assert_eq!(next_agent_plan_kind(&diagnosis), PlanKind::Investigate); + } + + #[test] + fn investigate_prompt_requires_read_only_diagnosis_steps() { + let diagnosis = sample_diagnosis(vec![RescuePrimaryIssue { + id: "issue-1".into(), + code: "primary.config.unreadable".into(), + severity: "error".into(), + message: "Primary configuration could not be read".into(), + auto_fixable: false, + fix_hint: Some("Repair".into()), + source: "primary".into(), + }]); + let prompt = build_agent_plan_prompt( + PlanKind::Investigate, + "sess-1", + 1, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &diagnosis, + &ConfigExcerptContext { + config_excerpt: serde_json::Value::Null, + config_excerpt_raw: Some("{\n ddd\n}".into()), + config_parse_error: Some("Failed to parse target config: key must be a string".into()), + }, + &[], + ); + assert!(prompt.contains("read-only")); + assert!(prompt.contains("Do not modify files")); + assert!(prompt.contains("\"planKind\": \"investigate\"")); + assert!(prompt.contains("configParseError")); + } + + #[test] + fn investigate_prompt_discourages_long_running_log_commands() { + let prompt = build_agent_plan_prompt( + PlanKind::Investigate, + "sess-1", + 1, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &sample_diagnosis(Vec::new()), + &ConfigExcerptContext { + config_excerpt: serde_json::Value::Null, + config_excerpt_raw: None, + config_parse_error: None, + }, + &[], + ); + assert!(prompt.contains("Do not run follow/tail commands")); + assert!(prompt.contains("bounded")); + assert!(prompt.contains("Do not use heredocs")); + } + + #[test] + fn repair_prompt_discourages_unverified_openclaw_subcommands() { + let prompt = build_agent_plan_prompt( + PlanKind::Repair, + "sess-1", + 2, + TargetLocation::RemoteOpenclaw, + "ssh:vm1", + &sample_diagnosis(Vec::new()), + &ConfigExcerptContext { + config_excerpt: serde_json::Value::Null, + config_excerpt_raw: None, + config_parse_error: None, + }, + &[], + ); + assert!(prompt.contains("Do not invent OpenClaw subcommands")); + assert!(prompt.contains("Do not use `openclaw auth")); + assert!(prompt.contains("Do not use `openclaw doctor --json`")); + assert!(!prompt.contains("- `openclaw doctor --json`")); + } + #[test] fn remote_doctor_agent_id_is_dedicated() { assert_eq!(remote_doctor_agent_id(), "clawpal-remote-doctor"); assert!(!remote_doctor_agent_session_key("sess-1").contains("main")); + assert!( + remote_doctor_agent_session_key("sess-1") + .starts_with("agent:clawpal-remote-doctor:") + ); } #[test] @@ -325,9 +417,48 @@ mod tests { let _ = std::fs::remove_dir_all(&temp_root); panic!("ensure agent ready: {error}"); } + let cfg: serde_json::Value = serde_json::from_str( + &std::fs::read_to_string(openclaw_dir.join("openclaw.json")).expect("read config"), + ) + .expect("parse config"); + let agent = cfg["agents"]["list"] + .as_array() + .and_then(|agents| { + agents.iter().find(|agent| { + agent.get("id").and_then(serde_json::Value::as_str) + == Some(remote_doctor_agent_id()) + }) + }) + .expect("dedicated agent entry"); + let workspace = agent["workspace"] + .as_str() + .expect("agent workspace") + .replace("~/", &format!("{}/", home_dir.to_string_lossy())); + for file_name in ["IDENTITY.md", "USER.md", "BOOTSTRAP.md", "AGENTS.md"] { + let content = std::fs::read_to_string(std::path::Path::new(&workspace).join(file_name)) + .unwrap_or_else(|error| panic!("read {file_name}: {error}")); + assert!(!content.trim().is_empty(), "{file_name} should not be empty"); + } let _ = std::fs::remove_dir_all(&temp_root); } + #[test] + fn only_agent_planner_protocol_requires_bridge() { + assert!(protocol_requires_bridge(RemoteDoctorProtocol::AgentPlanner)); + assert!(!protocol_requires_bridge(RemoteDoctorProtocol::ClawpalServer)); + assert!(!protocol_requires_bridge(RemoteDoctorProtocol::LegacyDoctor)); + } + + #[test] + fn clawpal_server_protocol_skips_local_rescue_preflight() { + assert!(!protocol_runs_rescue_preflight( + RemoteDoctorProtocol::ClawpalServer + )); + assert!(!protocol_runs_rescue_preflight( + RemoteDoctorProtocol::AgentPlanner + )); + } + fn sample_diagnosis(issues: Vec) -> RescuePrimaryDiagnosisResult { RescuePrimaryDiagnosisResult { status: "degraded".to_string(), diff --git a/src-tauri/src/remote_doctor/config.rs b/src-tauri/src/remote_doctor/config.rs index dc08becf..69710032 100644 --- a/src-tauri/src/remote_doctor/config.rs +++ b/src-tauri/src/remote_doctor/config.rs @@ -465,6 +465,60 @@ mod tests { .contains("Failed to parse target config")); } + #[test] + fn unreadable_config_context_summary_marks_excerpt_missing() { + let context = build_config_excerpt_context("{\n ddd\n}"); + let summary = config_excerpt_log_summary(&context); + assert_eq!(summary["configExcerptPresent"], json!(false)); + assert_eq!(summary["configExcerptRawPresent"], json!(true)); + assert!(summary["configParseError"] + .as_str() + .unwrap_or_default() + .contains("Failed to parse target config")); + } + + #[test] + fn diagnosis_missing_rescue_profile_is_detected() { + let diagnosis = empty_diagnosis_with_issues(vec![json!({ + "id": "rescue.profile.missing", + "code": "rescue.profile.missing", + "severity": "error", + "message": "Rescue profile missing", + "autoFixable": false, + "fixHint": "Activate Rescue Bot first", + "source": "rescue" + })]); + assert!(diagnosis_missing_rescue_profile(&diagnosis)); + } + + #[test] + fn diagnosis_unhealthy_rescue_gateway_is_detected() { + let diagnosis = empty_diagnosis_with_issues(vec![json!({ + "id": "rescue.gateway.unhealthy", + "code": "rescue.gateway.unhealthy", + "severity": "warn", + "message": "Rescue gateway unhealthy", + "autoFixable": false, + "fixHint": "Inspect rescue gateway", + "source": "rescue" + })]); + assert!(diagnosis_unhealthy_rescue_gateway(&diagnosis)); + } + + #[test] + fn non_auto_fixable_warning_only_diagnosis_is_terminal() { + let diagnosis = empty_diagnosis_with_issues(vec![json!({ + "id": "rescue.gateway.unhealthy", + "code": "rescue.gateway.unhealthy", + "severity": "warn", + "message": "Rescue gateway unhealthy", + "autoFixable": false, + "fixHint": "Inspect rescue gateway", + "source": "rescue" + })]); + assert!(diagnosis_has_only_non_auto_fixable_issues(&diagnosis)); + } + #[test] fn remote_target_host_id_candidates_include_exact_and_stripped_ids() { assert_eq!( @@ -511,4 +565,31 @@ mod tests { assert!(result.contains("\"ok\": true")); } + + fn empty_diagnosis_with_issues(issues: Vec) -> RescuePrimaryDiagnosisResult { + serde_json::from_value(json!({ + "status": if issues.is_empty() { "healthy" } else { "broken" }, + "checkedAt": "2026-03-18T00:00:00Z", + "targetProfile": "primary", + "rescueProfile": "rescue", + "rescueConfigured": true, + "rescuePort": 18789, + "summary": { + "status": if issues.is_empty() { "healthy" } else { "broken" }, + "headline": if issues.is_empty() { "Healthy" } else { "Broken" }, + "recommendedAction": if issues.is_empty() { "No action needed" } else { "Repair issues" }, + "fixableIssueCount": issues.len(), + "selectedFixIssueIds": issues.iter().filter_map(|issue| issue.get("id").and_then(Value::as_str)).collect::>(), + "rootCauseHypotheses": [], + "fixSteps": [], + "confidence": 0.8, + "citations": [], + "versionAwareness": null + }, + "sections": [], + "checks": [], + "issues": issues + })) + .expect("sample diagnosis") + } } diff --git a/src-tauri/src/remote_doctor/legacy.rs b/src-tauri/src/remote_doctor/legacy.rs index 306c5e22..b455e584 100644 --- a/src-tauri/src/remote_doctor/legacy.rs +++ b/src-tauri/src/remote_doctor/legacy.rs @@ -13,14 +13,11 @@ use super::agent::{ build_agent_plan_prompt, remote_doctor_agent_id, remote_doctor_agent_session_key, }; use super::plan::{ - apply_config_set, apply_config_unset, config_read_response, decode_base64_config_payload, - execute_clawpal_command, execute_command, execute_invoke_payload, parse_invoke_argv, - parse_plan_response, plan_command_uses_internal_clawpal_tool, request_plan, build_shell_command, - shell_escape, validate_clawpal_exec_args, validate_plan_command_argv, + execute_command, execute_invoke_payload, parse_plan_response, }; use super::session::{append_session_log as append_remote_doctor_log, emit_session_progress as emit_progress}; use super::types::{ - CommandResult, ConfigExcerptContext, PlanCommand, PlanKind, PlanResponse, TargetLocation, + CommandResult, ConfigExcerptContext, PlanKind, PlanResponse, TargetLocation, }; use crate::bridge_client::BridgeClient; use crate::commands::{manage_rescue_bot, remote_manage_rescue_bot, RescuePrimaryDiagnosisResult}; @@ -544,80 +541,20 @@ mod tests { use uuid::Uuid; - use crate::remote_doctor::agent::{ - default_remote_doctor_protocol, detect_method_name, - ensure_agent_workspace_ready as ensure_local_remote_doctor_agent_ready, - next_agent_plan_kind, next_agent_plan_kind_for_round, protocol_requires_bridge, - protocol_runs_rescue_preflight, - }; - use crate::remote_doctor::config::{ - build_config_excerpt_context, config_excerpt_log_summary, - diagnosis_has_only_non_auto_fixable_issues, empty_config_excerpt_context, - load_gateway_config as remote_doctor_gateway_config, - }; - use crate::remote_doctor::plan::plan_command_failure_message; + use crate::remote_doctor::agent::detect_method_name; + use crate::remote_doctor::config::load_gateway_config as remote_doctor_gateway_config; + use crate::remote_doctor::plan::request_plan; use crate::remote_doctor::repair_loops::{ - round_limit_error_message as repair_loops_round_limit_error_message, - repair_plan_stalled as repair_loops_repair_plan_stalled, run_clawpal_server_repair_loop as repair_loops_run_clawpal_server_repair_loop, run_remote_doctor_repair_loop as repair_loops_run_remote_doctor_repair_loop, start_remote_doctor_repair_impl as repair_loops_start_remote_doctor_repair_impl, }; - use crate::remote_doctor::types::{ - diagnosis_issue_summaries, parse_target_location, RemoteDoctorProtocol, - RepairRoundObservation, - }; + use crate::remote_doctor::types::PlanCommand; use crate::cli_runner::{set_active_clawpal_data_override, set_active_openclaw_home_override}; use crate::ssh::SshHostConfig; use std::net::TcpStream; use tauri::test::mock_app; - #[test] - fn build_shell_command_escapes_single_quotes() { - let command = build_shell_command(&["echo".into(), "a'b".into()]); - assert_eq!(command, "'echo' 'a'\\''b'"); - } - - #[test] - fn parse_target_location_rejects_unknown_values() { - let error = parse_target_location("elsewhere").unwrap_err(); - assert!(error.contains("Unsupported target location")); - } - - #[test] - fn apply_config_set_creates_missing_object_path() { - let mut value = json!({}); - apply_config_set( - &mut value, - "models.providers.openai.baseUrl", - json!("http://127.0.0.1:3000/v1"), - ) - .expect("config set"); - assert_eq!( - value - .pointer("/models/providers/openai/baseUrl") - .and_then(Value::as_str), - Some("http://127.0.0.1:3000/v1") - ); - } - - #[test] - fn apply_config_unset_removes_existing_leaf() { - let mut value = json!({ - "models": { - "providers": { - "openai": { - "baseUrl": "http://127.0.0.1:3000/v1", - "models": [{"id": "gpt-4.1"}] - } - } - } - }); - apply_config_unset(&mut value, "models.providers.openai.baseUrl").expect("config unset"); - assert!(value.pointer("/models/providers/openai/baseUrl").is_none()); - assert!(value.pointer("/models/providers/openai/models").is_some()); - } - #[test] fn parse_agent_plan_response_reads_json_payload() { let text = r#"preface @@ -628,545 +565,6 @@ mod tests { assert_eq!(plan.commands[0].argv, vec!["openclaw", "doctor", "--json"]); } - #[test] - fn build_agent_plan_prompt_mentions_target_and_schema() { - let prompt = build_agent_plan_prompt( - PlanKind::Repair, - "sess-1", - 3, - TargetLocation::RemoteOpenclaw, - "ssh:vm1", - &sample_diagnosis(Vec::new()), - &ConfigExcerptContext { - config_excerpt: json!({"ok": true}), - config_excerpt_raw: None, - config_parse_error: None, - }, - &[], - ); - assert!(prompt.contains("Task: produce the next repair plan")); - assert!(prompt.contains("Target location: remote_openclaw")); - assert!(prompt.contains("\"planKind\": \"repair\"")); - assert!(prompt.contains("\"configExcerpt\"")); - assert!(prompt.contains("clawpal doctor probe-openclaw")); - assert!(prompt.contains("openclaw gateway status")); - assert!(prompt.contains("Output valid JSON only.")); - } - - #[test] - fn default_remote_doctor_protocol_prefers_agent() { - assert_eq!( - default_remote_doctor_protocol(), - RemoteDoctorProtocol::AgentPlanner - ); - } - - #[test] - fn unreadable_config_requires_investigate_plan_kind() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "primary.config.unreadable", - "code": "primary.config.unreadable", - "severity": "error", - "message": "Primary configuration could not be read", - "autoFixable": false, - "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", - "source": "primary" - })]); - assert_eq!(next_agent_plan_kind(&diagnosis), PlanKind::Investigate); - } - - #[test] - fn unreadable_config_switches_to_repair_after_investigation_results_exist() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "primary.config.unreadable", - "code": "primary.config.unreadable", - "severity": "error", - "message": "Primary configuration could not be read", - "autoFixable": false, - "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", - "source": "primary" - })]); - let previous_results = vec![CommandResult { - argv: vec!["clawpal".into(), "doctor".into(), "config-read-raw".into()], - exit_code: Some(0), - stdout: "{\"raw\":\"{\\n ddd\\n}\"}".into(), - stderr: String::new(), - duration_ms: 1, - timed_out: false, - }]; - assert_eq!( - next_agent_plan_kind_for_round(&diagnosis, &previous_results), - PlanKind::Repair - ); - } - - #[test] - fn non_auto_fixable_warning_only_diagnosis_is_terminal() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "rescue.gateway.unhealthy", - "code": "rescue.gateway.unhealthy", - "severity": "warn", - "message": "Rescue gateway is not healthy", - "autoFixable": false, - "fixHint": "Inspect rescue gateway logs before using failover", - "source": "rescue" - })]); - assert!(diagnosis_has_only_non_auto_fixable_issues(&diagnosis)); - } - - #[test] - fn investigate_prompt_requires_read_only_diagnosis_steps() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "primary.config.unreadable", - "code": "primary.config.unreadable", - "severity": "error", - "message": "Primary configuration could not be read", - "autoFixable": false, - "fixHint": "Repair openclaw.json parsing errors and re-run the primary recovery check", - "source": "primary" - })]); - let prompt = build_agent_plan_prompt( - PlanKind::Investigate, - "sess-1", - 1, - TargetLocation::RemoteOpenclaw, - "ssh:vm1", - &diagnosis, - &build_config_excerpt_context("{\n ddd\n}"), - &[], - ); - assert!(prompt.contains("read-only")); - assert!(prompt.contains("Do not modify files")); - assert!(prompt.contains("\"planKind\": \"investigate\"")); - assert!(prompt.contains("configParseError")); - } - - #[test] - fn investigate_prompt_discourages_long_running_log_commands() { - let prompt = build_agent_plan_prompt( - PlanKind::Investigate, - "sess-1", - 1, - TargetLocation::RemoteOpenclaw, - "ssh:vm1", - &sample_diagnosis(Vec::new()), - &empty_config_excerpt_context(), - &[], - ); - assert!(prompt.contains("Do not run follow/tail commands")); - assert!(prompt.contains("bounded")); - assert!(prompt.contains("Do not use heredocs")); - } - - #[test] - fn repair_prompt_discourages_unverified_openclaw_subcommands() { - let prompt = build_agent_plan_prompt( - PlanKind::Repair, - "sess-1", - 2, - TargetLocation::RemoteOpenclaw, - "ssh:vm1", - &sample_diagnosis(Vec::new()), - &empty_config_excerpt_context(), - &[], - ); - assert!(prompt.contains("Do not invent OpenClaw subcommands")); - assert!(prompt.contains("Do not use `openclaw auth")); - assert!(prompt.contains("Do not use `openclaw doctor --json`")); - assert!(!prompt.contains("- `openclaw doctor --json`")); - } - - #[test] - fn remote_doctor_agent_id_is_dedicated() { - assert_eq!(remote_doctor_agent_id(), "clawpal-remote-doctor"); - assert!(!remote_doctor_agent_session_key("sess-1").contains("main")); - assert!( - remote_doctor_agent_session_key("sess-1").starts_with("agent:clawpal-remote-doctor:") - ); - } - - #[test] - fn ensure_local_remote_doctor_agent_creates_workspace_bootstrap_files() { - let temp_root = std::env::temp_dir().join(format!( - "clawpal-remote-doctor-agent-test-{}", - Uuid::new_v4() - )); - let home_dir = temp_root.join("home"); - let clawpal_dir = temp_root.join("clawpal"); - let openclaw_dir = home_dir.join(".openclaw"); - std::fs::create_dir_all(&openclaw_dir).expect("create openclaw dir"); - std::fs::create_dir_all(&clawpal_dir).expect("create clawpal dir"); - std::fs::write( - openclaw_dir.join("openclaw.json"), - r#"{ - "gateway": { "port": 18789, "auth": { "token": "gw-test-token" } }, - "agents": { - "defaults": { "model": "openai/gpt-4o-mini" }, - "list": [{ "id": "main", "workspace": "~/.openclaw/workspaces/main" }] - } -} -"#, - ) - .expect("write config"); - - set_active_openclaw_home_override(Some(home_dir.to_string_lossy().to_string())) - .expect("set openclaw override"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal override"); - - let result = ensure_local_remote_doctor_agent_ready(); - - set_active_openclaw_home_override(None).expect("clear openclaw override"); - set_active_clawpal_data_override(None).expect("clear clawpal override"); - - if let Err(error) = &result { - let _ = std::fs::remove_dir_all(&temp_root); - panic!("ensure agent ready: {error}"); - } - - let cfg: Value = serde_json::from_str( - &std::fs::read_to_string(openclaw_dir.join("openclaw.json")).expect("read config"), - ) - .expect("parse config"); - let agent = cfg["agents"]["list"] - .as_array() - .and_then(|agents| { - agents.iter().find(|agent| { - agent.get("id").and_then(Value::as_str) == Some(remote_doctor_agent_id()) - }) - }) - .expect("dedicated agent entry"); - let workspace = agent["workspace"] - .as_str() - .expect("agent workspace") - .replace("~/", &format!("{}/", home_dir.to_string_lossy())); - for file_name in ["IDENTITY.md", "USER.md", "BOOTSTRAP.md", "AGENTS.md"] { - let content = std::fs::read_to_string(std::path::Path::new(&workspace).join(file_name)) - .unwrap_or_else(|error| panic!("read {file_name}: {error}")); - assert!( - !content.trim().is_empty(), - "{file_name} should not be empty" - ); - } - - let _ = std::fs::remove_dir_all(&temp_root); - } - - #[test] - fn only_agent_planner_protocol_requires_bridge() { - assert!(protocol_requires_bridge(RemoteDoctorProtocol::AgentPlanner)); - assert!(!protocol_requires_bridge( - RemoteDoctorProtocol::ClawpalServer - )); - assert!(!protocol_requires_bridge( - RemoteDoctorProtocol::LegacyDoctor - )); - } - - #[test] - fn clawpal_server_protocol_skips_local_rescue_preflight() { - assert!(!protocol_runs_rescue_preflight( - RemoteDoctorProtocol::ClawpalServer - )); - assert!(!protocol_runs_rescue_preflight( - RemoteDoctorProtocol::AgentPlanner - )); - } - - #[test] - fn remote_target_host_id_candidates_include_exact_and_stripped_ids() { - assert_eq!( - remote_target_host_id_candidates("ssh:15-235-214-81"), - vec!["ssh:15-235-214-81".to_string(), "15-235-214-81".to_string()] - ); - assert_eq!( - remote_target_host_id_candidates("e2e-remote-doctor"), - vec!["e2e-remote-doctor".to_string()] - ); - } - - #[test] - fn primary_remote_target_host_id_prefers_exact_instance_id() { - assert_eq!( - primary_remote_target_host_id("ssh:15-235-214-81").unwrap(), - "ssh:15-235-214-81" - ); - } - - #[test] - fn parse_invoke_argv_supports_command_string_payloads() { - let argv = parse_invoke_argv( - "clawpal", - &json!({ - "command": "doctor config-read models.providers.openai" - }), - ) - .expect("parse invoke argv"); - assert_eq!( - argv, - vec![ - "clawpal", - "doctor", - "config-read", - "models.providers.openai" - ] - ); - } - - #[test] - fn plan_commands_treat_clawpal_as_internal_tool() { - assert!(plan_command_uses_internal_clawpal_tool(&[ - "clawpal".to_string(), - "doctor".to_string(), - "config-read".to_string(), - ])); - assert!(!plan_command_uses_internal_clawpal_tool(&[ - "openclaw".to_string(), - "doctor".to_string(), - ])); - } - - #[test] - fn unsupported_openclaw_subcommand_is_rejected_early() { - let error = validate_plan_command_argv(&[ - "openclaw".to_string(), - "auth".to_string(), - "list".to_string(), - ]) - .unwrap_err(); - assert!(error.contains("Unsupported openclaw plan command")); - assert!(error.contains("openclaw auth list")); - } - - #[test] - fn openclaw_doctor_json_is_rejected_early() { - let error = validate_plan_command_argv(&[ - "openclaw".to_string(), - "doctor".to_string(), - "--json".to_string(), - ]) - .unwrap_err(); - assert!(error.contains("Unsupported openclaw plan command")); - assert!(error.contains("openclaw doctor --json")); - } - - #[test] - fn multiline_clawpal_exec_is_rejected_early() { - let error = validate_plan_command_argv(&[ - "clawpal".to_string(), - "doctor".to_string(), - "exec".to_string(), - "--tool".to_string(), - "python3".to_string(), - "--args".to_string(), - "- <<'PY'\nprint('hi')\nPY".to_string(), - ]) - .unwrap_err(); - assert!(error.contains("Unsupported clawpal doctor exec args")); - assert!(error.contains("heredocs")); - } - - #[test] - fn plan_command_failure_message_mentions_command_and_error() { - let error = plan_command_failure_message( - PlanKind::Investigate, - 2, - &[ - "openclaw".to_string(), - "gateway".to_string(), - "logs".to_string(), - ], - "ssh command failed: russh exec timed out after 25s", - ); - assert!(error.contains("Investigate command failed in round 2")); - assert!(error.contains("openclaw gateway logs")); - assert!(error.contains("timed out after 25s")); - } - - fn sample_diagnosis(issues: Vec) -> RescuePrimaryDiagnosisResult { - serde_json::from_value(json!({ - "status": if issues.is_empty() { "healthy" } else { "broken" }, - "checkedAt": "2026-03-18T00:00:00Z", - "targetProfile": "primary", - "rescueProfile": "rescue", - "rescueConfigured": true, - "rescuePort": 18789, - "summary": { - "status": if issues.is_empty() { "healthy" } else { "broken" }, - "headline": if issues.is_empty() { "Healthy" } else { "Broken" }, - "recommendedAction": if issues.is_empty() { "No action needed" } else { "Repair issues" }, - "fixableIssueCount": issues.len(), - "selectedFixIssueIds": issues.iter().filter_map(|issue| issue.get("id").and_then(Value::as_str)).collect::>(), - "rootCauseHypotheses": [], - "fixSteps": [], - "confidence": 0.8, - "citations": [], - "versionAwareness": null - }, - "sections": [], - "checks": [], - "issues": issues - })) - .expect("sample diagnosis") - } - - #[test] - fn diagnosis_issue_summaries_capture_code_severity_and_message() { - let diagnosis = sample_diagnosis(vec![ - json!({ - "id": "gateway.unhealthy", - "code": "gateway.unhealthy", - "severity": "high", - "message": "Gateway is unhealthy", - "autoFixable": true, - "fixHint": "Restart gateway", - "source": "gateway" - }), - json!({ - "id": "providers.base_url", - "code": "invalid.base_url", - "severity": "medium", - "message": "Provider base URL is invalid", - "autoFixable": true, - "fixHint": "Reset baseUrl", - "source": "config" - }), - ]); - - let summary = diagnosis_issue_summaries(&diagnosis); - assert_eq!(summary.len(), 2); - assert_eq!(summary[0]["code"], "gateway.unhealthy"); - assert_eq!(summary[0]["severity"], "high"); - assert_eq!(summary[0]["title"], "Gateway is unhealthy"); - assert_eq!(summary[0]["target"], "gateway"); - assert_eq!(summary[1]["code"], "invalid.base_url"); - } - - #[test] - fn repeated_rediagnose_only_rounds_are_detected_as_stalled() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "providers.base_url", - "code": "invalid.base_url", - "severity": "medium", - "message": "Provider base URL is invalid", - "autoFixable": true, - "fixHint": "Reset baseUrl", - "source": "config" - })]); - let step_types = vec!["doctorRediagnose".to_string()]; - - assert!(!repair_loops_repair_plan_stalled( - &[ - RepairRoundObservation::new(1, &step_types, &diagnosis), - RepairRoundObservation::new(2, &step_types, &diagnosis), - ], - 3, - )); - assert!(repair_loops_repair_plan_stalled( - &[ - RepairRoundObservation::new(1, &step_types, &diagnosis), - RepairRoundObservation::new(2, &step_types, &diagnosis), - RepairRoundObservation::new(3, &step_types, &diagnosis), - ], - 3, - )); - } - - #[test] - fn round_limit_error_message_includes_latest_issues_and_step_types() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "providers.base_url", - "code": "invalid.base_url", - "severity": "medium", - "message": "Provider base URL is invalid", - "autoFixable": true, - "fixHint": "Reset baseUrl", - "source": "config" - })]); - let error = repair_loops_round_limit_error_message( - &diagnosis, - &["doctorRediagnose".to_string()], - ); - assert!(error.contains("invalid.base_url")); - assert!(error.contains("doctorRediagnose")); - assert!(error.contains("Provider base URL is invalid")); - } - - #[test] - fn unreadable_config_context_uses_raw_excerpt_and_parse_error() { - let context = build_config_excerpt_context("{\n ddd\n}"); - assert!(context.config_excerpt.is_null()); - assert!(context - .config_excerpt_raw - .as_deref() - .unwrap_or_default() - .contains("ddd")); - assert!(context - .config_parse_error - .as_deref() - .unwrap_or_default() - .contains("key must be a string")); - } - - #[test] - fn unreadable_config_context_summary_marks_excerpt_missing() { - let context = build_config_excerpt_context("{\n ddd\n}"); - let summary = config_excerpt_log_summary(&context); - assert_eq!(summary["configExcerptPresent"], json!(false)); - assert_eq!(summary["configExcerptRawPresent"], json!(true)); - assert!(summary["configParseError"] - .as_str() - .unwrap_or_default() - .contains("key must be a string")); - } - - #[test] - fn config_read_response_returns_raw_context_for_unreadable_json() { - let value = config_read_response("{\n ddd\n}", None).expect("config read response"); - assert!(value["value"].is_null()); - assert!(value["raw"].as_str().unwrap_or_default().contains("ddd")); - assert!(value["parseError"] - .as_str() - .unwrap_or_default() - .contains("key must be a string")); - } - - #[test] - fn decode_base64_config_payload_reads_utf8_text() { - use base64::Engine as _; - let encoded = base64::engine::general_purpose::STANDARD.encode("{\"ok\":true}"); - let decoded = decode_base64_config_payload(&encoded).expect("decode payload"); - assert_eq!(decoded, "{\"ok\":true}"); - } - - #[test] - fn diagnosis_missing_rescue_profile_is_detected() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "rescue.profile.missing", - "code": "rescue.profile.missing", - "severity": "error", - "message": "Rescue profile \"rescue\" is not configured", - "autoFixable": false, - "fixHint": "Activate Rescue Bot first", - "source": "rescue" - })]); - assert!(diagnosis_missing_rescue_profile(&diagnosis)); - } - - #[test] - fn diagnosis_unhealthy_rescue_gateway_is_detected() { - let diagnosis = sample_diagnosis(vec![json!({ - "id": "rescue.gateway.unhealthy", - "code": "rescue.gateway.unhealthy", - "severity": "warn", - "message": "Rescue gateway is not healthy", - "autoFixable": false, - "fixHint": "Inspect rescue gateway logs before using failover", - "source": "rescue" - })]); - assert!(diagnosis_unhealthy_rescue_gateway(&diagnosis)); - } - #[test] fn rescue_setup_command_result_reports_activation() { let result = rescue_setup_command_result("activate", "rescue", true, true, "active"); diff --git a/src-tauri/src/remote_doctor/plan.rs b/src-tauri/src/remote_doctor/plan.rs index 60a0cdd3..118071b3 100644 --- a/src-tauri/src/remote_doctor/plan.rs +++ b/src-tauri/src/remote_doctor/plan.rs @@ -657,6 +657,84 @@ mod tests { ]) .unwrap_err(); assert!(error.contains("Unsupported openclaw plan command")); + assert!(error.contains("openclaw auth list")); + } + + #[test] + fn openclaw_doctor_json_is_rejected_early() { + let error = validate_plan_command_argv(&[ + "openclaw".to_string(), + "doctor".to_string(), + "--json".to_string(), + ]) + .unwrap_err(); + assert!(error.contains("Unsupported openclaw plan command")); + assert!(error.contains("openclaw doctor --json")); + } + + #[test] + fn multiline_clawpal_exec_is_rejected_early() { + let error = validate_plan_command_argv(&[ + "clawpal".to_string(), + "doctor".to_string(), + "exec".to_string(), + "--tool".to_string(), + "python3".to_string(), + "--args".to_string(), + "- <<'PY'\nprint('hi')\nPY".to_string(), + ]) + .unwrap_err(); + assert!(error.contains("Unsupported clawpal doctor exec args")); + assert!(error.contains("heredocs")); + } + + #[test] + fn plan_commands_treat_clawpal_as_internal_tool() { + assert!(plan_command_uses_internal_clawpal_tool(&[ + "clawpal".to_string(), + "doctor".to_string(), + "config-read".to_string(), + ])); + assert!(!plan_command_uses_internal_clawpal_tool(&[ + "openclaw".to_string(), + "doctor".to_string(), + ])); + } + + #[test] + fn config_read_response_returns_raw_context_for_unreadable_json() { + let value = config_read_response("{\n ddd\n}", None).expect("config read response"); + assert!(value["value"].is_null()); + assert!(value["raw"].as_str().unwrap_or_default().contains("ddd")); + assert!(value["parseError"] + .as_str() + .unwrap_or_default() + .contains("key must be a string")); + } + + #[test] + fn decode_base64_config_payload_reads_utf8_text() { + use base64::Engine as _; + let encoded = base64::engine::general_purpose::STANDARD.encode("{\"ok\":true}"); + let decoded = decode_base64_config_payload(&encoded).expect("decode payload"); + assert_eq!(decoded, "{\"ok\":true}"); + } + + #[test] + fn plan_command_failure_message_mentions_command_and_error() { + let error = plan_command_failure_message( + PlanKind::Investigate, + 2, + &[ + "openclaw".to_string(), + "gateway".to_string(), + "logs".to_string(), + ], + "ssh command failed: russh exec timed out after 25s", + ); + assert!(error.contains("Investigate command failed in round 2")); + assert!(error.contains("openclaw gateway logs")); + assert!(error.contains("timed out after 25s")); } #[test] diff --git a/src-tauri/src/remote_doctor/repair_loops.rs b/src-tauri/src/remote_doctor/repair_loops.rs index def45d1a..f0fa62a8 100644 --- a/src-tauri/src/remote_doctor/repair_loops.rs +++ b/src-tauri/src/remote_doctor/repair_loops.rs @@ -1106,3 +1106,82 @@ pub async fn start_remote_doctor_repair( ) -> Result { start_remote_doctor_repair_impl(app, &pool, instance_id, target_location).await } + +#[cfg(test)] +mod tests { + use super::*; + use crate::commands::{RescuePrimaryDiagnosisResult, RescuePrimaryIssue, RescuePrimarySummary}; + + #[test] + fn repeated_rediagnose_only_rounds_are_detected_as_stalled() { + let diagnosis = sample_diagnosis(vec![RescuePrimaryIssue { + id: "issue-1".to_string(), + code: "invalid.base_url".to_string(), + severity: "medium".to_string(), + message: "Provider base URL is invalid".to_string(), + auto_fixable: true, + fix_hint: Some("Reset baseUrl".to_string()), + source: "config".to_string(), + }]); + let step_types = vec!["doctorRediagnose".to_string()]; + + assert!(!repair_plan_stalled( + &[ + RepairRoundObservation::new(1, &step_types, &diagnosis), + RepairRoundObservation::new(2, &step_types, &diagnosis), + ], + 3, + )); + assert!(repair_plan_stalled( + &[ + RepairRoundObservation::new(1, &step_types, &diagnosis), + RepairRoundObservation::new(2, &step_types, &diagnosis), + RepairRoundObservation::new(3, &step_types, &diagnosis), + ], + 3, + )); + } + + #[test] + fn round_limit_error_message_includes_latest_issues_and_step_types() { + let diagnosis = sample_diagnosis(vec![RescuePrimaryIssue { + id: "issue-1".to_string(), + code: "invalid.base_url".to_string(), + severity: "medium".to_string(), + message: "Provider base URL is invalid".to_string(), + auto_fixable: true, + fix_hint: Some("Reset baseUrl".to_string()), + source: "config".to_string(), + }]); + let error = round_limit_error_message(&diagnosis, &["doctorRediagnose".to_string()]); + assert!(error.contains("invalid.base_url")); + assert!(error.contains("doctorRediagnose")); + assert!(error.contains("Provider base URL is invalid")); + } + + fn sample_diagnosis(issues: Vec) -> RescuePrimaryDiagnosisResult { + RescuePrimaryDiagnosisResult { + status: "degraded".to_string(), + checked_at: "2026-03-19T00:00:00Z".to_string(), + target_profile: "primary".to_string(), + rescue_profile: "rescue".to_string(), + rescue_configured: true, + rescue_port: Some(18789), + summary: RescuePrimarySummary { + status: "degraded".to_string(), + headline: "Issues found".to_string(), + recommended_action: "Repair".to_string(), + fixable_issue_count: 0, + selected_fix_issue_ids: Vec::new(), + root_cause_hypotheses: Vec::new(), + fix_steps: Vec::new(), + confidence: None, + citations: Vec::new(), + version_awareness: None, + }, + sections: Vec::new(), + checks: Vec::new(), + issues, + } + } +} diff --git a/src-tauri/src/remote_doctor/types.rs b/src-tauri/src/remote_doctor/types.rs index 402edde2..8b8cfbc3 100644 --- a/src-tauri/src/remote_doctor/types.rs +++ b/src-tauri/src/remote_doctor/types.rs @@ -215,6 +215,38 @@ mod tests { assert_eq!(first.issue_summaries, second.issue_summaries); } + #[test] + fn diagnosis_issue_summaries_capture_code_severity_and_message() { + let diagnosis = sample_diagnosis(vec![ + RescuePrimaryIssue { + id: "issue-1".to_string(), + code: "gateway.unhealthy".to_string(), + severity: "high".to_string(), + message: "Gateway is unhealthy".to_string(), + auto_fixable: true, + fix_hint: Some("Restart gateway".to_string()), + source: "gateway".to_string(), + }, + RescuePrimaryIssue { + id: "issue-2".to_string(), + code: "invalid.base_url".to_string(), + severity: "medium".to_string(), + message: "Provider base URL is invalid".to_string(), + auto_fixable: true, + fix_hint: Some("Reset baseUrl".to_string()), + source: "config".to_string(), + }, + ]); + + let summary = diagnosis_issue_summaries(&diagnosis); + assert_eq!(summary.len(), 2); + assert_eq!(summary[0]["code"], "gateway.unhealthy"); + assert_eq!(summary[0]["severity"], "high"); + assert_eq!(summary[0]["title"], "Gateway is unhealthy"); + assert_eq!(summary[0]["target"], "gateway"); + assert_eq!(summary[1]["code"], "invalid.base_url"); + } + fn sample_diagnosis(issues: Vec) -> RescuePrimaryDiagnosisResult { RescuePrimaryDiagnosisResult { status: "degraded".to_string(), From 65562dc3a0d18dd842003979cba0f42a11c27182 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 15:42:18 +0800 Subject: [PATCH 13/20] refactor: move remote doctor e2e tests out of legacy --- src-tauri/src/remote_doctor/legacy.rs | 841 +--------------------- src-tauri/src/remote_doctor/legacy_e2e.rs | 798 ++++++++++++++++++++ src-tauri/src/remote_doctor/mod.rs | 2 + 3 files changed, 805 insertions(+), 836 deletions(-) create mode 100644 src-tauri/src/remote_doctor/legacy_e2e.rs diff --git a/src-tauri/src/remote_doctor/legacy.rs b/src-tauri/src/remote_doctor/legacy.rs index b455e584..effb4492 100644 --- a/src-tauri/src/remote_doctor/legacy.rs +++ b/src-tauri/src/remote_doctor/legacy.rs @@ -264,13 +264,13 @@ async fn collect_rescue_activation_failure_diagnostics( results } -struct RescueActivationFailure { - message: String, - activation_result: CommandResult, - diagnostics: Vec, +pub(crate) struct RescueActivationFailure { + pub(crate) message: String, + pub(crate) activation_result: CommandResult, + pub(crate) diagnostics: Vec, } -async fn ensure_rescue_profile_ready( +pub(crate) async fn ensure_rescue_profile_ready( app: &AppHandle, target_location: TargetLocation, instance_id: &str, @@ -535,25 +535,6 @@ pub(crate) async fn request_agent_plan( #[cfg(test)] mod tests { use super::*; - use std::fs::create_dir_all; - use std::io::Write; - use std::process::Command; - - use uuid::Uuid; - - use crate::remote_doctor::agent::detect_method_name; - use crate::remote_doctor::config::load_gateway_config as remote_doctor_gateway_config; - use crate::remote_doctor::plan::request_plan; - use crate::remote_doctor::repair_loops::{ - run_clawpal_server_repair_loop as repair_loops_run_clawpal_server_repair_loop, - run_remote_doctor_repair_loop as repair_loops_run_remote_doctor_repair_loop, - start_remote_doctor_repair_impl as repair_loops_start_remote_doctor_repair_impl, - }; - use crate::remote_doctor::types::PlanCommand; - use crate::cli_runner::{set_active_clawpal_data_override, set_active_openclaw_home_override}; - use crate::ssh::SshHostConfig; - use std::net::TcpStream; - use tauri::test::mock_app; #[test] fn parse_agent_plan_response_reads_json_payload() { @@ -605,816 +586,4 @@ mod tests { .contains(&"openclaw --profile rescue config get gateway.port --json".to_string())); } - const E2E_CONTAINER_NAME: &str = "clawpal-e2e-remote-doctor"; - const E2E_SSH_PORT: u16 = 2399; - const E2E_ROOT_PASSWORD: &str = "clawpal-remote-doctor-pass"; - const E2E_DOCKERFILE: &str = r#" -FROM ubuntu:22.04 -ENV DEBIAN_FRONTEND=noninteractive -RUN apt-get update && apt-get install -y openssh-server && rm -rf /var/lib/apt/lists/* && mkdir /var/run/sshd -RUN echo "root:ROOTPASS" | chpasswd && \ - sed -i 's/#PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config && \ - sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config && \ - echo "PasswordAuthentication yes" >> /etc/ssh/sshd_config -RUN mkdir -p /root/.openclaw -RUN cat > /root/.openclaw/openclaw.json <<'EOF' -{ - "gateway": { "port": 18789, "auth": { "token": "gw-test-token" } }, - "auth": { - "profiles": { - "openai-default": { - "provider": "openai", - "apiKey": "sk-test" - } - } - }, - "models": { - "providers": { - "openai": { - "baseUrl": "http://127.0.0.1:9/v1", - "models": [{ "id": "gpt-4o-mini", "name": "gpt-4o-mini" }] - } - } - }, - "agents": { - "defaults": { "model": "openai/gpt-4o-mini" }, - "list": [ { "id": "main", "model": "anthropic/claude-sonnet-4-20250514" } ] - }, - "channels": { - "discord": { - "guilds": { - "guild-1": { - "channels": { - "general": { "model": "openai/gpt-4o-mini" } - } - } - } - } - } -} -EOF -RUN cat > /usr/local/bin/openclaw <<'EOF' && chmod +x /usr/local/bin/openclaw -#!/bin/sh -STATE_DIR="${OPENCLAW_STATE_DIR:-${OPENCLAW_HOME:-$HOME/.openclaw}}" -CONFIG_PATH="$STATE_DIR/openclaw.json" -PROFILE="primary" -if [ "$1" = "--profile" ]; then - PROFILE="$2" - shift 2 -fi -case "$1" in - --version) - echo "openclaw 2026.3.2-test" - ;; - doctor) - if grep -q '127.0.0.1:9/v1' "$CONFIG_PATH"; then - echo '{"ok":false,"score":40,"issues":[{"id":"primary.models.base_url","code":"invalid.base_url","severity":"error","message":"provider baseUrl points to test blackhole","autoFixable":true,"fixHint":"Remove the bad baseUrl override"}]}' - else - echo '{"ok":true,"score":100,"issues":[],"checks":[{"id":"test","status":"ok"}]}' - fi - ;; - agents) - if [ "$2" = "list" ] && [ "$3" = "--json" ]; then - echo '[{"id":"main"}]' - else - echo "unsupported openclaw agents command" >&2 - exit 1 - fi - ;; - models) - if [ "$2" = "list" ] && [ "$3" = "--all" ] && [ "$4" = "--json" ] && [ "$5" = "--no-color" ]; then - echo '{"models":[{"key":"openai/gpt-4o-mini","provider":"openai","id":"gpt-4o-mini","name":"gpt-4o-mini","baseUrl":"https://api.openai.com/v1"}],"providers":{"openai":{"baseUrl":"https://api.openai.com/v1"}}}' - else - echo "unsupported openclaw models command" >&2 - exit 1 - fi - ;; - config) - if [ "$2" = "get" ] && [ "$3" = "gateway.port" ] && [ "$4" = "--json" ]; then - if [ "$PROFILE" = "rescue" ]; then - echo '19789' - else - echo '18789' - fi - else - echo "unsupported openclaw config command: $*" >&2 - exit 1 - fi - ;; - gateway) - case "$2" in - status) - if [ "$PROFILE" = "rescue" ] && [ "${OPENCLAW_RESCUE_GATEWAY_ACTIVE:-1}" != "1" ]; then - echo '{"running":false,"healthy":false,"gateway":{"running":false},"health":{"ok":false}}' - else - echo '{"running":true,"healthy":true,"gateway":{"running":true},"health":{"ok":true}}' - fi - ;; - restart|start|stop) - echo '{"ok":true}' - ;; - *) - echo "unsupported openclaw gateway command: $*" >&2 - exit 1 - ;; - esac - ;; - *) - echo "unsupported openclaw command: $*" >&2 - exit 1 - ;; -esac -EOF -EXPOSE 22 -CMD ["/usr/sbin/sshd", "-D"] -"#; - - fn should_run_docker_e2e() -> bool { - std::env::var("CLAWPAL_RUN_REMOTE_DOCTOR_E2E") - .ok() - .as_deref() - == Some("1") - } - - fn live_gateway_url() -> Option { - std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL") - .ok() - .map(|value| value.trim().to_string()) - .filter(|value| !value.is_empty()) - } - - fn live_gateway_token() -> Option { - std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN") - .ok() - .map(|value| value.trim().to_string()) - .filter(|value| !value.is_empty()) - } - - fn live_gateway_instance_id() -> String { - std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_INSTANCE_ID") - .ok() - .map(|value| value.trim().to_string()) - .filter(|value| !value.is_empty()) - .unwrap_or_else(|| "local".to_string()) - } - - fn live_gateway_target_location() -> TargetLocation { - match std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TARGET_LOCATION") - .ok() - .as_deref() - { - Some("remote_openclaw") => TargetLocation::RemoteOpenclaw, - _ => TargetLocation::LocalOpenclaw, - } - } - - fn live_gateway_protocol() -> String { - std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_PROTOCOL") - .ok() - .map(|value| value.trim().to_string()) - .filter(|value| !value.is_empty()) - .unwrap_or_else(|| "clawpal_server".to_string()) - } - - fn docker_available() -> bool { - Command::new("docker") - .args(["info"]) - .stdout(std::process::Stdio::null()) - .stderr(std::process::Stdio::null()) - .status() - .map(|status| status.success()) - .unwrap_or(false) - } - - fn cleanup_e2e_container() { - let _ = Command::new("docker") - .args(["rm", "-f", E2E_CONTAINER_NAME]) - .stdout(std::process::Stdio::null()) - .stderr(std::process::Stdio::null()) - .status(); - let _ = Command::new("docker") - .args(["rmi", "-f", &format!("{E2E_CONTAINER_NAME}:latest")]) - .stdout(std::process::Stdio::null()) - .stderr(std::process::Stdio::null()) - .status(); - } - - fn build_e2e_image() -> Result<(), String> { - let dockerfile = E2E_DOCKERFILE.replace("ROOTPASS", E2E_ROOT_PASSWORD); - let output = Command::new("docker") - .args([ - "build", - "-t", - &format!("{E2E_CONTAINER_NAME}:latest"), - "-f", - "-", - ".", - ]) - .stdin(std::process::Stdio::piped()) - .stdout(std::process::Stdio::piped()) - .stderr(std::process::Stdio::piped()) - .current_dir(std::env::temp_dir()) - .spawn() - .and_then(|mut child| { - if let Some(ref mut stdin) = child.stdin { - stdin.write_all(dockerfile.as_bytes())?; - } - child.wait_with_output() - }) - .map_err(|error| format!("docker build failed: {error}"))?; - if !output.status.success() { - return Err(String::from_utf8_lossy(&output.stderr).to_string()); - } - Ok(()) - } - - fn start_e2e_container() -> Result<(), String> { - start_e2e_container_with_env(&[]) - } - - fn start_e2e_container_with_env(env: &[(&str, &str)]) -> Result<(), String> { - let mut args = vec![ - "run".to_string(), - "-d".to_string(), - "--name".to_string(), - E2E_CONTAINER_NAME.to_string(), - ]; - for (key, value) in env { - args.push("-e".to_string()); - args.push(format!("{key}={value}")); - } - args.extend([ - "-p".to_string(), - format!("{E2E_SSH_PORT}:22"), - format!("{E2E_CONTAINER_NAME}:latest"), - ]); - let output = Command::new("docker") - .args(&args) - .output() - .map_err(|error| format!("docker run failed: {error}"))?; - if !output.status.success() { - return Err(String::from_utf8_lossy(&output.stderr).to_string()); - } - Ok(()) - } - - fn wait_for_ssh(timeout_secs: u64) -> Result<(), String> { - let start = Instant::now(); - while start.elapsed().as_secs() < timeout_secs { - if TcpStream::connect(format!("127.0.0.1:{E2E_SSH_PORT}")).is_ok() { - std::thread::sleep(std::time::Duration::from_millis(500)); - return Ok(()); - } - std::thread::sleep(std::time::Duration::from_millis(300)); - } - Err("timeout waiting for ssh".into()) - } - - fn e2e_host_config() -> SshHostConfig { - SshHostConfig { - id: "e2e-remote-doctor".into(), - label: "E2E Remote Doctor".into(), - host: "127.0.0.1".into(), - port: E2E_SSH_PORT, - username: "root".into(), - auth_method: "password".into(), - key_path: None, - password: Some(E2E_ROOT_PASSWORD.into()), - passphrase: None, - } - } - - #[tokio::test] - async fn remote_doctor_docker_e2e_loop_completes() { - if !should_run_docker_e2e() { - eprintln!("skip: set CLAWPAL_RUN_REMOTE_DOCTOR_E2E=1 to enable"); - return; - } - if !docker_available() { - eprintln!("skip: docker not available"); - return; - } - - cleanup_e2e_container(); - build_e2e_image().expect("docker build"); - start_e2e_container().expect("docker run"); - struct Cleanup; - impl Drop for Cleanup { - fn drop(&mut self) { - cleanup_e2e_container(); - } - } - let _cleanup = Cleanup; - wait_for_ssh(30).expect("ssh should become available"); - - let temp_root = - std::env::temp_dir().join(format!("clawpal-remote-doctor-e2e-{}", Uuid::new_v4())); - let clawpal_dir = temp_root.join(".clawpal"); - create_dir_all(&clawpal_dir).expect("create clawpal dir"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal data"); - set_active_openclaw_home_override(None).expect("clear openclaw home override"); - - let pool = SshConnectionPool::new(); - let cfg = e2e_host_config(); - pool.connect(&cfg).await.expect("ssh connect"); - - let session_id = Uuid::new_v4().to_string(); - let marker = "/tmp/clawpal-remote-doctor-fixed"; - let result = repair_loops_run_remote_doctor_repair_loop( - Option::<&AppHandle>::None, - &pool, - &session_id, - &format!("ssh:{}", cfg.id), - TargetLocation::RemoteOpenclaw, - |kind, round, previous_results| async move { - match (kind, round) { - (PlanKind::Detect, 1) => Ok(PlanResponse { - plan_id: "detect-1".into(), - plan_kind: PlanKind::Detect, - summary: "Initial detect".into(), - commands: vec![PlanCommand { - argv: vec!["openclaw".into(), "--version".into()], - timeout_sec: Some(10), - purpose: Some("collect version".into()), - continue_on_failure: Some(false), - }], - healthy: false, - done: false, - success: false, - }), - (PlanKind::Repair, 1) => { - assert_eq!(previous_results.len(), 1); - Ok(PlanResponse { - plan_id: "repair-1".into(), - plan_kind: PlanKind::Repair, - summary: "Write marker".into(), - commands: vec![PlanCommand { - argv: vec![ - "sh".into(), - "-lc".into(), - format!("printf 'fixed' > {marker}"), - ], - timeout_sec: Some(10), - purpose: Some("mark repaired".into()), - continue_on_failure: Some(false), - }], - healthy: false, - done: false, - success: false, - }) - } - (PlanKind::Detect, 2) => { - assert_eq!(previous_results.len(), 1); - assert_eq!( - previous_results[0].stdout.trim(), - "", - "repair command should not print to stdout" - ); - Ok(PlanResponse { - plan_id: "detect-2".into(), - plan_kind: PlanKind::Detect, - summary: "Marker exists".into(), - commands: Vec::new(), - healthy: true, - done: true, - success: true, - }) - } - _ => Err(format!( - "unexpected planner request: {:?} round {}", - kind, round - )), - } - }, - ) - .await - .expect("remote doctor loop should complete"); - - assert_eq!(result.status, "completed"); - assert!(result.latest_diagnosis_healthy); - assert_eq!(result.round, 2); - - let marker_result = pool - .exec(&cfg.id, &format!("test -f {marker}")) - .await - .expect("marker check"); - assert_eq!(marker_result.exit_code, 0); - - let log_path = clawpal_dir - .join("doctor") - .join("remote") - .join(format!("{session_id}.jsonl")); - let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor log"); - assert!(log_text.contains("\"planKind\":\"detect\"")); - assert!(log_text.contains("\"planKind\":\"repair\"")); - let _ = std::fs::remove_dir_all(temp_root); - set_active_clawpal_data_override(None).expect("clear clawpal data"); - } - - #[tokio::test] - async fn remote_doctor_docker_e2e_rescue_activation_fails_when_gateway_stays_inactive() { - if !should_run_docker_e2e() { - eprintln!("skip: set CLAWPAL_RUN_REMOTE_DOCTOR_E2E=1 to enable"); - return; - } - if !docker_available() { - eprintln!("skip: docker not available"); - return; - } - - cleanup_e2e_container(); - build_e2e_image().expect("docker build"); - start_e2e_container_with_env(&[("OPENCLAW_RESCUE_GATEWAY_ACTIVE", "0")]) - .expect("docker run"); - struct Cleanup; - impl Drop for Cleanup { - fn drop(&mut self) { - cleanup_e2e_container(); - } - } - let _cleanup = Cleanup; - wait_for_ssh(30).expect("ssh should become available"); - - let app = mock_app(); - let app_handle = app.handle().clone(); - app_handle.manage(SshConnectionPool::new()); - let pool = app_handle.state::(); - let cfg = e2e_host_config(); - pool.connect(&cfg).await.expect("ssh connect"); - - let error = ensure_rescue_profile_ready( - &app_handle, - TargetLocation::RemoteOpenclaw, - &format!("ssh:{}", cfg.id), - ) - .await - .expect_err("rescue activation should fail when gateway remains inactive"); - - assert!(error.message.contains("did not become active")); - assert!(error.message.contains("configured_inactive")); - assert!(error - .diagnostics - .iter() - .any(|result| result.argv.join(" ") == "manage_rescue_bot status rescue")); - } - - #[tokio::test] - async fn remote_doctor_live_gateway_uses_configured_url_and_token() { - let Some(url) = live_gateway_url() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); - return; - }; - let Some(token) = live_gateway_token() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); - return; - }; - - let app = mock_app(); - let app_handle = app.handle().clone(); - app_handle.manage(SshConnectionPool::new()); - let temp_root = - std::env::temp_dir().join(format!("clawpal-remote-doctor-live-{}", Uuid::new_v4())); - let clawpal_dir = temp_root.join(".clawpal"); - create_dir_all(&clawpal_dir).expect("create clawpal dir"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal data"); - - std::fs::write( - clawpal_dir.join("app-preferences.json"), - serde_json::to_string(&json!({ - "remoteDoctorGatewayUrl": url, - "remoteDoctorGatewayAuthToken": token, - })) - .expect("serialize prefs"), - ) - .expect("write app preferences"); - - let gateway = remote_doctor_gateway_config().expect("gateway config"); - assert_eq!(gateway.url, url); - assert_eq!(gateway.auth_token_override.as_deref(), Some(token.as_str())); - - let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref()) - .expect("gateway credentials"); - assert!(creds.is_some(), "expected token override credentials"); - - let client = NodeClient::new(); - client - .connect(&gateway.url, app.handle().clone(), creds) - .await - .expect("connect live remote doctor gateway"); - assert!(client.is_connected().await); - match live_gateway_protocol().as_str() { - "clawpal_server" => { - let response = client - .send_request( - "remote_repair_plan.request", - json!({ - "requestId": format!("live-e2e-{}", Uuid::new_v4()), - "targetId": live_gateway_instance_id(), - "context": { - "configExcerpt": { - "models": { - "providers": { - "openai-codex": { - "baseUrl": "http://127.0.0.1:9/v1" - } - } - } - } - } - }), - ) - .await - .expect("request clawpal-server remote repair plan"); - let plan_id = response - .get("planId") - .and_then(|value| value.as_str()) - .unwrap_or_default(); - assert!( - !plan_id.trim().is_empty(), - "clawpal-server response should include a plan id" - ); - let steps = response - .get("steps") - .and_then(|value| value.as_array()) - .cloned() - .unwrap_or_default(); - assert!( - !steps.is_empty(), - "clawpal-server response should include repair steps" - ); - } - _ => { - let detect_plan = request_plan( - &client, - &detect_method_name(), - PlanKind::Detect, - &format!("live-e2e-{}", Uuid::new_v4()), - 1, - live_gateway_target_location(), - &live_gateway_instance_id(), - &[], - ) - .await - .expect("request live detection plan"); - assert!( - !detect_plan.plan_id.trim().is_empty(), - "live detection plan should include a plan id" - ); - } - } - client.disconnect().await.expect("disconnect"); - - set_active_clawpal_data_override(None).expect("clear clawpal data"); - let _ = std::fs::remove_dir_all(temp_root); - } - - #[tokio::test] - async fn remote_doctor_live_gateway_full_repair_loop_completes() { - let Some(url) = live_gateway_url() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); - return; - }; - let Some(token) = live_gateway_token() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); - return; - }; - if !docker_available() { - eprintln!("skip: docker not available"); - return; - } - - cleanup_e2e_container(); - build_e2e_image().expect("docker build"); - start_e2e_container().expect("docker run"); - struct Cleanup; - impl Drop for Cleanup { - fn drop(&mut self) { - cleanup_e2e_container(); - } - } - let _cleanup = Cleanup; - wait_for_ssh(30).expect("ssh should become available"); - - let app = mock_app(); - let app_handle = app.handle().clone(); - app_handle.manage(SshConnectionPool::new()); - let temp_root = std::env::temp_dir().join(format!( - "clawpal-remote-doctor-live-loop-{}", - Uuid::new_v4() - )); - let clawpal_dir = temp_root.join(".clawpal"); - create_dir_all(&clawpal_dir).expect("create clawpal dir"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal data"); - set_active_openclaw_home_override(None).expect("clear openclaw home override"); - - std::fs::write( - clawpal_dir.join("app-preferences.json"), - serde_json::to_string(&json!({ - "remoteDoctorGatewayUrl": url, - "remoteDoctorGatewayAuthToken": token, - })) - .expect("serialize prefs"), - ) - .expect("write app preferences"); - - let cfg = e2e_host_config(); - let pool = app_handle.state::(); - pool.connect(&cfg).await.expect("ssh connect"); - - let gateway = remote_doctor_gateway_config().expect("gateway config"); - let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref()) - .expect("gateway credentials"); - let client = NodeClient::new(); - client - .connect(&gateway.url, app_handle.clone(), creds) - .await - .expect("connect live remote doctor gateway"); - - let session_id = Uuid::new_v4().to_string(); - let result = repair_loops_run_clawpal_server_repair_loop( - &app_handle, - &client, - &session_id, - &format!("ssh:{}", cfg.id), - TargetLocation::RemoteOpenclaw, - ) - .await - .expect("full live remote doctor repair loop should complete"); - - assert_eq!(result.status, "completed"); - assert!(result.latest_diagnosis_healthy); - - client.disconnect().await.expect("disconnect"); - set_active_clawpal_data_override(None).expect("clear clawpal data"); - let _ = std::fs::remove_dir_all(temp_root); - } - - #[tokio::test] - async fn remote_doctor_live_start_command_remote_target_completes_without_bridge_pairing() { - let Some(url) = live_gateway_url() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); - return; - }; - let Some(token) = live_gateway_token() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); - return; - }; - if !docker_available() { - eprintln!("skip: docker not available"); - return; - } - - cleanup_e2e_container(); - build_e2e_image().expect("docker build"); - start_e2e_container().expect("docker run"); - struct Cleanup; - impl Drop for Cleanup { - fn drop(&mut self) { - cleanup_e2e_container(); - } - } - let _cleanup = Cleanup; - wait_for_ssh(30).expect("ssh should become available"); - - let app = mock_app(); - let app_handle = app.handle().clone(); - app_handle.manage(SshConnectionPool::new()); - let temp_root = std::env::temp_dir().join(format!( - "clawpal-remote-doctor-live-start-{}", - Uuid::new_v4() - )); - let clawpal_dir = temp_root.join(".clawpal"); - create_dir_all(&clawpal_dir).expect("create clawpal dir"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal data"); - set_active_openclaw_home_override(None).expect("clear openclaw home override"); - - std::fs::write( - clawpal_dir.join("app-preferences.json"), - serde_json::to_string(&json!({ - "remoteDoctorGatewayUrl": url, - "remoteDoctorGatewayAuthToken": token, - })) - .expect("serialize prefs"), - ) - .expect("write app preferences"); - - let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); - let pool = app_handle.state::(); - - let result = repair_loops_start_remote_doctor_repair_impl( - app_handle.clone(), - &pool, - format!("ssh:{}", cfg.id), - "remote_openclaw".to_string(), - ) - .await - .expect("start command should complete remote repair"); - - assert_eq!(result.status, "completed"); - assert!(result.latest_diagnosis_healthy); - - let log_path = clawpal_dir - .join("doctor") - .join("remote") - .join(format!("{}.jsonl", result.session_id)); - let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor session log"); - assert!( - !log_text.contains("\"event\":\"bridge_connect_failed\""), - "clawpal_server path should not attempt bridge pairing: {log_text}" - ); - - set_active_clawpal_data_override(None).expect("clear clawpal data"); - let _ = std::fs::remove_dir_all(temp_root); - } - - #[tokio::test] - async fn remote_doctor_live_gateway_repairs_unreadable_remote_config() { - let Some(url) = live_gateway_url() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); - return; - }; - let Some(token) = live_gateway_token() else { - eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); - return; - }; - if !docker_available() { - eprintln!("skip: docker not available"); - return; - } - - cleanup_e2e_container(); - build_e2e_image().expect("docker build"); - start_e2e_container().expect("docker run"); - struct Cleanup; - impl Drop for Cleanup { - fn drop(&mut self) { - cleanup_e2e_container(); - } - } - let _cleanup = Cleanup; - wait_for_ssh(30).expect("ssh should become available"); - - let app = mock_app(); - let app_handle = app.handle().clone(); - app_handle.manage(SshConnectionPool::new()); - let temp_root = std::env::temp_dir().join(format!( - "clawpal-remote-doctor-live-raw-config-{}", - Uuid::new_v4() - )); - let clawpal_dir = temp_root.join(".clawpal"); - create_dir_all(&clawpal_dir).expect("create clawpal dir"); - set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) - .expect("set clawpal data"); - set_active_openclaw_home_override(None).expect("clear openclaw home override"); - - std::fs::write( - clawpal_dir.join("app-preferences.json"), - serde_json::to_string(&json!({ - "remoteDoctorGatewayUrl": url, - "remoteDoctorGatewayAuthToken": token, - })) - .expect("serialize prefs"), - ) - .expect("write app preferences"); - - let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); - let pool = app_handle.state::(); - pool.connect(&cfg).await.expect("ssh connect"); - pool.exec_login( - &cfg.id, - "cat > ~/.openclaw/openclaw.json <<'EOF'\n{\n ddd\n}\nEOF", - ) - .await - .expect("corrupt remote config"); - - let result = repair_loops_start_remote_doctor_repair_impl( - app_handle.clone(), - &pool, - cfg.id.clone(), - "remote_openclaw".to_string(), - ) - .await - .expect("start command should repair unreadable config"); - - assert_eq!(result.status, "completed"); - assert!(result.latest_diagnosis_healthy); - - let repaired = pool - .exec_login(&cfg.id, "python3 - <<'PY'\nimport json, pathlib\njson.load(open(pathlib.Path.home()/'.openclaw'/'openclaw.json'))\nprint('ok')\nPY") - .await - .expect("read repaired config"); - assert_eq!( - repaired.exit_code, 0, - "repaired config should be valid JSON: {}", - repaired.stderr - ); - assert_eq!(repaired.stdout.trim(), "ok"); - - set_active_clawpal_data_override(None).expect("clear clawpal data"); - let _ = std::fs::remove_dir_all(temp_root); - } } diff --git a/src-tauri/src/remote_doctor/legacy_e2e.rs b/src-tauri/src/remote_doctor/legacy_e2e.rs new file mode 100644 index 00000000..708269ce --- /dev/null +++ b/src-tauri/src/remote_doctor/legacy_e2e.rs @@ -0,0 +1,798 @@ +use std::fs::create_dir_all; +use std::io::Write; +use std::net::TcpStream; +use std::process::Command; +use std::time::Instant; + +use serde_json::json; +use tauri::test::mock_app; +use tauri::AppHandle; +use tauri::Manager; +use uuid::Uuid; + +use super::agent::detect_method_name; +use super::config::{ + build_gateway_credentials as remote_doctor_gateway_credentials, + load_gateway_config as remote_doctor_gateway_config, +}; +use super::legacy::ensure_rescue_profile_ready; +use super::plan::request_plan; +use super::repair_loops::{ + run_clawpal_server_repair_loop, run_remote_doctor_repair_loop, + start_remote_doctor_repair_impl, +}; +use super::types::{PlanCommand, PlanKind, PlanResponse, TargetLocation}; +use crate::cli_runner::{set_active_clawpal_data_override, set_active_openclaw_home_override}; +use crate::node_client::NodeClient; +use crate::ssh::{SshConnectionPool, SshHostConfig}; + +const E2E_CONTAINER_NAME: &str = "clawpal-e2e-remote-doctor"; +const E2E_SSH_PORT: u16 = 2399; +const E2E_ROOT_PASSWORD: &str = "clawpal-remote-doctor-pass"; +const E2E_DOCKERFILE: &str = r#" +FROM ubuntu:22.04 +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update && apt-get install -y openssh-server && rm -rf /var/lib/apt/lists/* && mkdir /var/run/sshd +RUN echo "root:ROOTPASS" | chpasswd && \ + sed -i 's/#PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config && \ + sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config && \ + echo "PasswordAuthentication yes" >> /etc/ssh/sshd_config +RUN mkdir -p /root/.openclaw +RUN cat > /root/.openclaw/openclaw.json <<'EOF' +{ + "gateway": { "port": 18789, "auth": { "token": "gw-test-token" } }, + "auth": { + "profiles": { + "openai-default": { + "provider": "openai", + "apiKey": "sk-test" + } + } + }, + "models": { + "providers": { + "openai": { + "baseUrl": "http://127.0.0.1:9/v1", + "models": [{ "id": "gpt-4o-mini", "name": "gpt-4o-mini" }] + } + } + }, + "agents": { + "defaults": { "model": "openai/gpt-4o-mini" }, + "list": [ { "id": "main", "model": "anthropic/claude-sonnet-4-20250514" } ] + }, + "channels": { + "discord": { + "guilds": { + "guild-1": { + "channels": { + "general": { "model": "openai/gpt-4o-mini" } + } + } + } + } + } +} +EOF +RUN cat > /usr/local/bin/openclaw <<'EOF' && chmod +x /usr/local/bin/openclaw +#!/bin/sh +STATE_DIR="${OPENCLAW_STATE_DIR:-${OPENCLAW_HOME:-$HOME/.openclaw}}" +CONFIG_PATH="$STATE_DIR/openclaw.json" +PROFILE="primary" +if [ "$1" = "--profile" ]; then + PROFILE="$2" + shift 2 +fi +case "$1" in + --version) + echo "openclaw 2026.3.2-test" + ;; + doctor) + if grep -q '127.0.0.1:9/v1' "$CONFIG_PATH"; then + echo '{"ok":false,"score":40,"issues":[{"id":"primary.models.base_url","code":"invalid.base_url","severity":"error","message":"provider baseUrl points to test blackhole","autoFixable":true,"fixHint":"Remove the bad baseUrl override"}]}' + else + echo '{"ok":true,"score":100,"issues":[],"checks":[{"id":"test","status":"ok"}]}' + fi + ;; + agents) + if [ "$2" = "list" ] && [ "$3" = "--json" ]; then + echo '[{"id":"main"}]' + else + echo "unsupported openclaw agents command" >&2 + exit 1 + fi + ;; + models) + if [ "$2" = "list" ] && [ "$3" = "--all" ] && [ "$4" = "--json" ] && [ "$5" = "--no-color" ]; then + echo '{"models":[{"key":"openai/gpt-4o-mini","provider":"openai","id":"gpt-4o-mini","name":"gpt-4o-mini","baseUrl":"https://api.openai.com/v1"}],"providers":{"openai":{"baseUrl":"https://api.openai.com/v1"}}}' + else + echo "unsupported openclaw models command" >&2 + exit 1 + fi + ;; + config) + if [ "$2" = "get" ] && [ "$3" = "gateway.port" ] && [ "$4" = "--json" ]; then + if [ "$PROFILE" = "rescue" ]; then + echo '19789' + else + echo '18789' + fi + else + echo "unsupported openclaw config command: $*" >&2 + exit 1 + fi + ;; + gateway) + case "$2" in + status) + if [ "$PROFILE" = "rescue" ] && [ "${OPENCLAW_RESCUE_GATEWAY_ACTIVE:-1}" != "1" ]; then + echo '{"running":false,"healthy":false,"gateway":{"running":false},"health":{"ok":false}}' + else + echo '{"running":true,"healthy":true,"gateway":{"running":true},"health":{"ok":true}}' + fi + ;; + restart|start|stop) + echo '{"ok":true}' + ;; + *) + echo "unsupported openclaw gateway command: $*" >&2 + exit 1 + ;; + esac + ;; + *) + echo "unsupported openclaw command: $*" >&2 + exit 1 + ;; +esac +EOF +EXPOSE 22 +CMD ["/usr/sbin/sshd", "-D"] +"#; + +fn should_run_docker_e2e() -> bool { + std::env::var("CLAWPAL_RUN_REMOTE_DOCTOR_E2E") + .ok() + .as_deref() + == Some("1") +} + +fn live_gateway_url() -> Option { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) +} + +fn live_gateway_token() -> Option { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) +} + +fn live_gateway_instance_id() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_INSTANCE_ID") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) + .unwrap_or_else(|| "local".to_string()) +} + +fn live_gateway_target_location() -> TargetLocation { + match std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TARGET_LOCATION") + .ok() + .as_deref() + { + Some("remote_openclaw") => TargetLocation::RemoteOpenclaw, + _ => TargetLocation::LocalOpenclaw, + } +} + +fn live_gateway_protocol() -> String { + std::env::var("CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_PROTOCOL") + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) + .unwrap_or_else(|| "clawpal_server".to_string()) +} + +fn docker_available() -> bool { + Command::new("docker") + .args(["info"]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .map(|status| status.success()) + .unwrap_or(false) +} + +fn cleanup_e2e_container() { + let _ = Command::new("docker") + .args(["rm", "-f", E2E_CONTAINER_NAME]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status(); + let _ = Command::new("docker") + .args(["rmi", "-f", &format!("{E2E_CONTAINER_NAME}:latest")]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status(); +} + +fn build_e2e_image() -> Result<(), String> { + let dockerfile = E2E_DOCKERFILE.replace("ROOTPASS", E2E_ROOT_PASSWORD); + let output = Command::new("docker") + .args(["build", "-t", &format!("{E2E_CONTAINER_NAME}:latest"), "-f", "-", "."]) + .stdin(std::process::Stdio::piped()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .current_dir(std::env::temp_dir()) + .spawn() + .and_then(|mut child| { + if let Some(ref mut stdin) = child.stdin { + stdin.write_all(dockerfile.as_bytes())?; + } + child.wait_with_output() + }) + .map_err(|error| format!("docker build failed: {error}"))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } + Ok(()) +} + +fn start_e2e_container() -> Result<(), String> { + start_e2e_container_with_env(&[]) +} + +fn start_e2e_container_with_env(env: &[(&str, &str)]) -> Result<(), String> { + let mut args = vec![ + "run".to_string(), + "-d".to_string(), + "--name".to_string(), + E2E_CONTAINER_NAME.to_string(), + ]; + for (key, value) in env { + args.push("-e".to_string()); + args.push(format!("{key}={value}")); + } + args.extend([ + "-p".to_string(), + format!("{E2E_SSH_PORT}:22"), + format!("{E2E_CONTAINER_NAME}:latest"), + ]); + let output = Command::new("docker") + .args(&args) + .output() + .map_err(|error| format!("docker run failed: {error}"))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } + Ok(()) +} + +fn wait_for_ssh(timeout_secs: u64) -> Result<(), String> { + let start = Instant::now(); + while start.elapsed().as_secs() < timeout_secs { + if TcpStream::connect(format!("127.0.0.1:{E2E_SSH_PORT}")).is_ok() { + std::thread::sleep(std::time::Duration::from_millis(500)); + return Ok(()); + } + std::thread::sleep(std::time::Duration::from_millis(300)); + } + Err("timeout waiting for ssh".into()) +} + +fn e2e_host_config() -> SshHostConfig { + SshHostConfig { + id: "e2e-remote-doctor".into(), + label: "E2E Remote Doctor".into(), + host: "127.0.0.1".into(), + port: E2E_SSH_PORT, + username: "root".into(), + auth_method: "password".into(), + key_path: None, + password: Some(E2E_ROOT_PASSWORD.into()), + passphrase: None, + } +} + +#[tokio::test] +async fn remote_doctor_docker_e2e_loop_completes() { + if !should_run_docker_e2e() { + eprintln!("skip: set CLAWPAL_RUN_REMOTE_DOCTOR_E2E=1 to enable"); + return; + } + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let temp_root = std::env::temp_dir().join(format!("clawpal-remote-doctor-e2e-{}", Uuid::new_v4())); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + let pool = SshConnectionPool::new(); + let cfg = e2e_host_config(); + pool.connect(&cfg).await.expect("ssh connect"); + + let session_id = Uuid::new_v4().to_string(); + let marker = "/tmp/clawpal-remote-doctor-fixed"; + let result = run_remote_doctor_repair_loop( + Option::<&AppHandle>::None, + &pool, + &session_id, + &format!("ssh:{}", cfg.id), + TargetLocation::RemoteOpenclaw, + |kind, round, previous_results| async move { + match (kind, round) { + (PlanKind::Detect, 1) => Ok(PlanResponse { + plan_id: "detect-1".into(), + plan_kind: PlanKind::Detect, + summary: "Initial detect".into(), + commands: vec![PlanCommand { + argv: vec!["openclaw".into(), "--version".into()], + timeout_sec: Some(10), + purpose: Some("collect version".into()), + continue_on_failure: Some(false), + }], + healthy: false, + done: false, + success: false, + }), + (PlanKind::Repair, 1) => { + assert_eq!(previous_results.len(), 1); + Ok(PlanResponse { + plan_id: "repair-1".into(), + plan_kind: PlanKind::Repair, + summary: "Write marker".into(), + commands: vec![PlanCommand { + argv: vec![ + "sh".into(), + "-lc".into(), + format!("printf 'fixed' > {marker}"), + ], + timeout_sec: Some(10), + purpose: Some("mark repaired".into()), + continue_on_failure: Some(false), + }], + healthy: false, + done: false, + success: false, + }) + } + (PlanKind::Detect, 2) => { + assert_eq!(previous_results.len(), 1); + assert_eq!(previous_results[0].stdout.trim(), ""); + Ok(PlanResponse { + plan_id: "detect-2".into(), + plan_kind: PlanKind::Detect, + summary: "Marker exists".into(), + commands: Vec::new(), + healthy: true, + done: true, + success: true, + }) + } + _ => Err(format!("unexpected planner request: {:?} round {}", kind, round)), + } + }, + ) + .await + .expect("remote doctor loop should complete"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + assert_eq!(result.round, 2); + + let marker_result = pool.exec(&cfg.id, &format!("test -f {marker}")).await.expect("marker check"); + assert_eq!(marker_result.exit_code, 0); + + let log_path = clawpal_dir.join("doctor").join("remote").join(format!("{session_id}.jsonl")); + let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor log"); + assert!(log_text.contains("\"planKind\":\"detect\"")); + assert!(log_text.contains("\"planKind\":\"repair\"")); + let _ = std::fs::remove_dir_all(temp_root); + set_active_clawpal_data_override(None).expect("clear clawpal data"); +} + +#[tokio::test] +async fn remote_doctor_docker_e2e_rescue_activation_fails_when_gateway_stays_inactive() { + if !should_run_docker_e2e() { + eprintln!("skip: set CLAWPAL_RUN_REMOTE_DOCTOR_E2E=1 to enable"); + return; + } + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container_with_env(&[("OPENCLAW_RESCUE_GATEWAY_ACTIVE", "0")]).expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let pool = app_handle.state::(); + let cfg = e2e_host_config(); + pool.connect(&cfg).await.expect("ssh connect"); + + let error = ensure_rescue_profile_ready( + &app_handle, + TargetLocation::RemoteOpenclaw, + &format!("ssh:{}", cfg.id), + ) + .await + .expect_err("rescue activation should fail when gateway remains inactive"); + + assert!(error.message.contains("did not become active")); + assert!(error.message.contains("configured_inactive")); + assert!(error + .diagnostics + .iter() + .any(|result| result.argv.join(" ") == "manage_rescue_bot status rescue")); +} + +#[tokio::test] +async fn remote_doctor_live_gateway_uses_configured_url_and_token() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = std::env::temp_dir().join(format!("clawpal-remote-doctor-live-{}", Uuid::new_v4())); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let gateway = remote_doctor_gateway_config().expect("gateway config"); + assert_eq!(gateway.url, url); + assert_eq!(gateway.auth_token_override.as_deref(), Some(token.as_str())); + + let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref()) + .expect("gateway credentials"); + assert!(creds.is_some()); + + let client = NodeClient::new(); + client + .connect(&gateway.url, app.handle().clone(), creds) + .await + .expect("connect live remote doctor gateway"); + assert!(client.is_connected().await); + match live_gateway_protocol().as_str() { + "clawpal_server" => { + let response = client + .send_request( + "remote_repair_plan.request", + json!({ + "requestId": format!("live-e2e-{}", Uuid::new_v4()), + "targetId": live_gateway_instance_id(), + "context": { + "configExcerpt": { + "models": { + "providers": { + "openai-codex": { + "baseUrl": "http://127.0.0.1:9/v1" + } + } + } + } + } + }), + ) + .await + .expect("request clawpal-server remote repair plan"); + let plan_id = response.get("planId").and_then(|value| value.as_str()).unwrap_or_default(); + assert!(!plan_id.trim().is_empty()); + let steps = response + .get("steps") + .and_then(|value| value.as_array()) + .cloned() + .unwrap_or_default(); + assert!(!steps.is_empty()); + } + _ => { + let detect_plan = request_plan( + &client, + &detect_method_name(), + PlanKind::Detect, + &format!("live-e2e-{}", Uuid::new_v4()), + 1, + live_gateway_target_location(), + &live_gateway_instance_id(), + &[], + ) + .await + .expect("request live detection plan"); + assert!(!detect_plan.plan_id.trim().is_empty()); + } + } + client.disconnect().await.expect("disconnect"); + + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); +} + +#[tokio::test] +async fn remote_doctor_live_gateway_full_repair_loop_completes() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-live-loop-{}", + Uuid::new_v4() + )); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let cfg = e2e_host_config(); + let pool = app_handle.state::(); + pool.connect(&cfg).await.expect("ssh connect"); + + let gateway = remote_doctor_gateway_config().expect("gateway config"); + let creds = remote_doctor_gateway_credentials(gateway.auth_token_override.as_deref()) + .expect("gateway credentials"); + let client = NodeClient::new(); + client + .connect(&gateway.url, app_handle.clone(), creds) + .await + .expect("connect live remote doctor gateway"); + + let session_id = Uuid::new_v4().to_string(); + let result = run_clawpal_server_repair_loop( + &app_handle, + &client, + &session_id, + &format!("ssh:{}", cfg.id), + TargetLocation::RemoteOpenclaw, + ) + .await + .expect("full live remote doctor repair loop should complete"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + + client.disconnect().await.expect("disconnect"); + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); +} + +#[tokio::test] +async fn remote_doctor_live_start_command_remote_target_completes_without_bridge_pairing() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-live-start-{}", + Uuid::new_v4() + )); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); + let pool = app_handle.state::(); + + let result = start_remote_doctor_repair_impl( + app_handle.clone(), + &pool, + format!("ssh:{}", cfg.id), + "remote_openclaw".to_string(), + ) + .await + .expect("start command should complete remote repair"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + + let log_path = clawpal_dir + .join("doctor") + .join("remote") + .join(format!("{}.jsonl", result.session_id)); + let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor session log"); + assert!( + !log_text.contains("\"event\":\"bridge_connect_failed\""), + "clawpal_server path should not attempt bridge pairing: {log_text}" + ); + + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); +} + +#[tokio::test] +async fn remote_doctor_live_gateway_repairs_unreadable_remote_config() { + let Some(url) = live_gateway_url() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_URL to enable"); + return; + }; + let Some(token) = live_gateway_token() else { + eprintln!("skip: set CLAWPAL_REMOTE_DOCTOR_LIVE_E2E_TOKEN to enable"); + return; + }; + if !docker_available() { + eprintln!("skip: docker not available"); + return; + } + + cleanup_e2e_container(); + build_e2e_image().expect("docker build"); + start_e2e_container().expect("docker run"); + struct Cleanup; + impl Drop for Cleanup { + fn drop(&mut self) { + cleanup_e2e_container(); + } + } + let _cleanup = Cleanup; + wait_for_ssh(30).expect("ssh should become available"); + + let app = mock_app(); + let app_handle = app.handle().clone(); + app_handle.manage(SshConnectionPool::new()); + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-live-raw-config-{}", + Uuid::new_v4() + )); + let clawpal_dir = temp_root.join(".clawpal"); + create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal data"); + set_active_openclaw_home_override(None).expect("clear openclaw home override"); + + std::fs::write( + clawpal_dir.join("app-preferences.json"), + serde_json::to_string(&json!({ + "remoteDoctorGatewayUrl": url, + "remoteDoctorGatewayAuthToken": token, + })) + .expect("serialize prefs"), + ) + .expect("write app preferences"); + + let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); + let pool = app_handle.state::(); + pool.connect(&cfg).await.expect("ssh connect"); + pool.exec_login(&cfg.id, "cat > ~/.openclaw/openclaw.json <<'EOF'\n{\n ddd\n}\nEOF") + .await + .expect("corrupt remote config"); + + let result = start_remote_doctor_repair_impl( + app_handle.clone(), + &pool, + cfg.id.clone(), + "remote_openclaw".to_string(), + ) + .await + .expect("start command should repair unreadable config"); + + assert_eq!(result.status, "completed"); + assert!(result.latest_diagnosis_healthy); + + let repaired = pool + .exec_login(&cfg.id, "python3 - <<'PY'\nimport json, pathlib\njson.load(open(pathlib.Path.home()/'.openclaw'/'openclaw.json'))\nprint('ok')\nPY") + .await + .expect("read repaired config"); + assert_eq!(repaired.exit_code, 0, "repaired config should be valid JSON: {}", repaired.stderr); + assert_eq!(repaired.stdout.trim(), "ok"); + + set_active_clawpal_data_override(None).expect("clear clawpal data"); + let _ = std::fs::remove_dir_all(temp_root); +} diff --git a/src-tauri/src/remote_doctor/mod.rs b/src-tauri/src/remote_doctor/mod.rs index 65c363f2..61c48593 100644 --- a/src-tauri/src/remote_doctor/mod.rs +++ b/src-tauri/src/remote_doctor/mod.rs @@ -1,6 +1,8 @@ mod agent; mod config; mod legacy; +#[cfg(test)] +mod legacy_e2e; mod plan; mod repair_loops; mod session; From 84a09ba8f794e656c80057382c1433218b160157 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 15:44:25 +0800 Subject: [PATCH 14/20] style: cargo fmt --- src-tauri/src/remote_doctor/agent.rs | 28 +++++++---- src-tauri/src/remote_doctor/config.rs | 13 ++--- src-tauri/src/remote_doctor/legacy.rs | 25 +++++----- src-tauri/src/remote_doctor/legacy_e2e.rs | 53 ++++++++++++++++----- src-tauri/src/remote_doctor/plan.rs | 31 +++++++++--- src-tauri/src/remote_doctor/repair_loops.rs | 37 +++++++++----- src-tauri/src/remote_doctor/session.rs | 14 +++--- 7 files changed, 134 insertions(+), 67 deletions(-) diff --git a/src-tauri/src/remote_doctor/agent.rs b/src-tauri/src/remote_doctor/agent.rs index 9ca1fcaf..7440473d 100644 --- a/src-tauri/src/remote_doctor/agent.rs +++ b/src-tauri/src/remote_doctor/agent.rs @@ -4,8 +4,12 @@ use std::path::PathBuf; use serde_json::json; use super::config::diagnosis_context; -use super::types::{CommandResult, ConfigExcerptContext, PlanKind, RemoteDoctorProtocol, TargetLocation}; -use crate::commands::{agent::create_agent, agent::setup_agent_identity, RescuePrimaryDiagnosisResult}; +use super::types::{ + CommandResult, ConfigExcerptContext, PlanKind, RemoteDoctorProtocol, TargetLocation, +}; +use crate::commands::{ + agent::create_agent, agent::setup_agent_identity, RescuePrimaryDiagnosisResult, +}; use crate::config_io::read_openclaw_config; use crate::models::resolve_paths; @@ -318,7 +322,9 @@ mod tests { &ConfigExcerptContext { config_excerpt: serde_json::Value::Null, config_excerpt_raw: Some("{\n ddd\n}".into()), - config_parse_error: Some("Failed to parse target config: key must be a string".into()), + config_parse_error: Some( + "Failed to parse target config: key must be a string".into(), + ), }, &[], ); @@ -376,8 +382,7 @@ mod tests { assert_eq!(remote_doctor_agent_id(), "clawpal-remote-doctor"); assert!(!remote_doctor_agent_session_key("sess-1").contains("main")); assert!( - remote_doctor_agent_session_key("sess-1") - .starts_with("agent:clawpal-remote-doctor:") + remote_doctor_agent_session_key("sess-1").starts_with("agent:clawpal-remote-doctor:") ); } @@ -437,7 +442,10 @@ mod tests { for file_name in ["IDENTITY.md", "USER.md", "BOOTSTRAP.md", "AGENTS.md"] { let content = std::fs::read_to_string(std::path::Path::new(&workspace).join(file_name)) .unwrap_or_else(|error| panic!("read {file_name}: {error}")); - assert!(!content.trim().is_empty(), "{file_name} should not be empty"); + assert!( + !content.trim().is_empty(), + "{file_name} should not be empty" + ); } let _ = std::fs::remove_dir_all(&temp_root); } @@ -445,8 +453,12 @@ mod tests { #[test] fn only_agent_planner_protocol_requires_bridge() { assert!(protocol_requires_bridge(RemoteDoctorProtocol::AgentPlanner)); - assert!(!protocol_requires_bridge(RemoteDoctorProtocol::ClawpalServer)); - assert!(!protocol_requires_bridge(RemoteDoctorProtocol::LegacyDoctor)); + assert!(!protocol_requires_bridge( + RemoteDoctorProtocol::ClawpalServer + )); + assert!(!protocol_requires_bridge( + RemoteDoctorProtocol::LegacyDoctor + )); } #[test] diff --git a/src-tauri/src/remote_doctor/config.rs b/src-tauri/src/remote_doctor/config.rs index 69710032..59661de9 100644 --- a/src-tauri/src/remote_doctor/config.rs +++ b/src-tauri/src/remote_doctor/config.rs @@ -9,7 +9,7 @@ use tauri::{AppHandle, Manager, Runtime}; use super::session::append_session_log; use super::types::{ - diagnosis_issue_summaries, ConfigExcerptContext, TargetLocation, StoredRemoteDoctorIdentity, + diagnosis_issue_summaries, ConfigExcerptContext, StoredRemoteDoctorIdentity, TargetLocation, }; use crate::commands::preferences::load_app_preferences_from_paths; use crate::commands::{ @@ -77,7 +77,8 @@ pub(crate) fn remote_doctor_identity_path() -> PathBuf { .join("device-identity.json") } -pub(crate) fn load_or_create_remote_doctor_identity() -> Result { +pub(crate) fn load_or_create_remote_doctor_identity() -> Result +{ let path = remote_doctor_identity_path(); if let Ok(text) = std::fs::read_to_string(&path) { if let Ok(identity) = serde_json::from_str::(&text) { @@ -212,9 +213,7 @@ pub(crate) fn diagnosis_missing_rescue_profile(diagnosis: &RescuePrimaryDiagnosi .any(|issue| issue.code == "rescue.profile.missing") } -pub(crate) fn diagnosis_unhealthy_rescue_gateway( - diagnosis: &RescuePrimaryDiagnosisResult, -) -> bool { +pub(crate) fn diagnosis_unhealthy_rescue_gateway(diagnosis: &RescuePrimaryDiagnosisResult) -> bool { diagnosis .issues .iter() @@ -384,9 +383,7 @@ mod tests { use std::sync::{Mutex, OnceLock}; use super::*; - use crate::cli_runner::{ - set_active_clawpal_data_override, set_active_openclaw_home_override, - }; + use crate::cli_runner::{set_active_clawpal_data_override, set_active_openclaw_home_override}; fn override_lock() -> &'static Mutex<()> { static LOCK: OnceLock> = OnceLock::new(); diff --git a/src-tauri/src/remote_doctor/legacy.rs b/src-tauri/src/remote_doctor/legacy.rs index effb4492..e664afe7 100644 --- a/src-tauri/src/remote_doctor/legacy.rs +++ b/src-tauri/src/remote_doctor/legacy.rs @@ -3,22 +3,19 @@ use std::time::Instant; use serde_json::{json, Value}; use tauri::{AppHandle, Manager, Runtime}; -use super::config::{ - append_diagnosis_log, - build_gateway_credentials as remote_doctor_gateway_credentials, - diagnosis_missing_rescue_profile, diagnosis_unhealthy_rescue_gateway, - primary_remote_target_host_id, remote_target_host_id_candidates, run_rescue_diagnosis, -}; use super::agent::{ build_agent_plan_prompt, remote_doctor_agent_id, remote_doctor_agent_session_key, }; -use super::plan::{ - execute_command, execute_invoke_payload, parse_plan_response, +use super::config::{ + append_diagnosis_log, build_gateway_credentials as remote_doctor_gateway_credentials, + diagnosis_missing_rescue_profile, diagnosis_unhealthy_rescue_gateway, + primary_remote_target_host_id, remote_target_host_id_candidates, run_rescue_diagnosis, }; -use super::session::{append_session_log as append_remote_doctor_log, emit_session_progress as emit_progress}; -use super::types::{ - CommandResult, ConfigExcerptContext, PlanKind, PlanResponse, TargetLocation, +use super::plan::{execute_command, execute_invoke_payload, parse_plan_response}; +use super::session::{ + append_session_log as append_remote_doctor_log, emit_session_progress as emit_progress, }; +use super::types::{CommandResult, ConfigExcerptContext, PlanKind, PlanResponse, TargetLocation}; use crate::bridge_client::BridgeClient; use crate::commands::{manage_rescue_bot, remote_manage_rescue_bot, RescuePrimaryDiagnosisResult}; use crate::node_client::NodeClient; @@ -424,7 +421,10 @@ fn extract_json_block(text: &str) -> Option<&str> { clawpal_core::doctor::extract_json_from_output(text) } -pub(crate) fn parse_agent_plan_response(kind: PlanKind, text: &str) -> Result { +pub(crate) fn parse_agent_plan_response( + kind: PlanKind, + text: &str, +) -> Result { let json_block = extract_json_block(text) .ok_or_else(|| format!("Remote doctor agent did not return JSON: {text}"))?; let value: Value = serde_json::from_str(json_block) @@ -585,5 +585,4 @@ mod tests { assert!(rendered .contains(&"openclaw --profile rescue config get gateway.port --json".to_string())); } - } diff --git a/src-tauri/src/remote_doctor/legacy_e2e.rs b/src-tauri/src/remote_doctor/legacy_e2e.rs index 708269ce..f15a5924 100644 --- a/src-tauri/src/remote_doctor/legacy_e2e.rs +++ b/src-tauri/src/remote_doctor/legacy_e2e.rs @@ -18,8 +18,7 @@ use super::config::{ use super::legacy::ensure_rescue_profile_ready; use super::plan::request_plan; use super::repair_loops::{ - run_clawpal_server_repair_loop, run_remote_doctor_repair_loop, - start_remote_doctor_repair_impl, + run_clawpal_server_repair_loop, run_remote_doctor_repair_loop, start_remote_doctor_repair_impl, }; use super::types::{PlanCommand, PlanKind, PlanResponse, TargetLocation}; use crate::cli_runner::{set_active_clawpal_data_override, set_active_openclaw_home_override}; @@ -223,7 +222,14 @@ fn cleanup_e2e_container() { fn build_e2e_image() -> Result<(), String> { let dockerfile = E2E_DOCKERFILE.replace("ROOTPASS", E2E_ROOT_PASSWORD); let output = Command::new("docker") - .args(["build", "-t", &format!("{E2E_CONTAINER_NAME}:latest"), "-f", "-", "."]) + .args([ + "build", + "-t", + &format!("{E2E_CONTAINER_NAME}:latest"), + "-f", + "-", + ".", + ]) .stdin(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::piped()) @@ -321,7 +327,8 @@ async fn remote_doctor_docker_e2e_loop_completes() { let _cleanup = Cleanup; wait_for_ssh(30).expect("ssh should become available"); - let temp_root = std::env::temp_dir().join(format!("clawpal-remote-doctor-e2e-{}", Uuid::new_v4())); + let temp_root = + std::env::temp_dir().join(format!("clawpal-remote-doctor-e2e-{}", Uuid::new_v4())); let clawpal_dir = temp_root.join(".clawpal"); create_dir_all(&clawpal_dir).expect("create clawpal dir"); set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) @@ -390,7 +397,10 @@ async fn remote_doctor_docker_e2e_loop_completes() { success: true, }) } - _ => Err(format!("unexpected planner request: {:?} round {}", kind, round)), + _ => Err(format!( + "unexpected planner request: {:?} round {}", + kind, round + )), } }, ) @@ -401,10 +411,16 @@ async fn remote_doctor_docker_e2e_loop_completes() { assert!(result.latest_diagnosis_healthy); assert_eq!(result.round, 2); - let marker_result = pool.exec(&cfg.id, &format!("test -f {marker}")).await.expect("marker check"); + let marker_result = pool + .exec(&cfg.id, &format!("test -f {marker}")) + .await + .expect("marker check"); assert_eq!(marker_result.exit_code, 0); - let log_path = clawpal_dir.join("doctor").join("remote").join(format!("{session_id}.jsonl")); + let log_path = clawpal_dir + .join("doctor") + .join("remote") + .join(format!("{session_id}.jsonl")); let log_text = std::fs::read_to_string(&log_path).expect("read remote doctor log"); assert!(log_text.contains("\"planKind\":\"detect\"")); assert!(log_text.contains("\"planKind\":\"repair\"")); @@ -472,7 +488,8 @@ async fn remote_doctor_live_gateway_uses_configured_url_and_token() { let app = mock_app(); let app_handle = app.handle().clone(); app_handle.manage(SshConnectionPool::new()); - let temp_root = std::env::temp_dir().join(format!("clawpal-remote-doctor-live-{}", Uuid::new_v4())); + let temp_root = + std::env::temp_dir().join(format!("clawpal-remote-doctor-live-{}", Uuid::new_v4())); let clawpal_dir = temp_root.join(".clawpal"); create_dir_all(&clawpal_dir).expect("create clawpal dir"); set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) @@ -525,7 +542,10 @@ async fn remote_doctor_live_gateway_uses_configured_url_and_token() { ) .await .expect("request clawpal-server remote repair plan"); - let plan_id = response.get("planId").and_then(|value| value.as_str()).unwrap_or_default(); + let plan_id = response + .get("planId") + .and_then(|value| value.as_str()) + .unwrap_or_default(); assert!(!plan_id.trim().is_empty()); let steps = response .get("steps") @@ -770,9 +790,12 @@ async fn remote_doctor_live_gateway_repairs_unreadable_remote_config() { let cfg = crate::commands::ssh::upsert_ssh_host(e2e_host_config()).expect("save ssh host"); let pool = app_handle.state::(); pool.connect(&cfg).await.expect("ssh connect"); - pool.exec_login(&cfg.id, "cat > ~/.openclaw/openclaw.json <<'EOF'\n{\n ddd\n}\nEOF") - .await - .expect("corrupt remote config"); + pool.exec_login( + &cfg.id, + "cat > ~/.openclaw/openclaw.json <<'EOF'\n{\n ddd\n}\nEOF", + ) + .await + .expect("corrupt remote config"); let result = start_remote_doctor_repair_impl( app_handle.clone(), @@ -790,7 +813,11 @@ async fn remote_doctor_live_gateway_repairs_unreadable_remote_config() { .exec_login(&cfg.id, "python3 - <<'PY'\nimport json, pathlib\njson.load(open(pathlib.Path.home()/'.openclaw'/'openclaw.json'))\nprint('ok')\nPY") .await .expect("read repaired config"); - assert_eq!(repaired.exit_code, 0, "repaired config should be valid JSON: {}", repaired.stderr); + assert_eq!( + repaired.exit_code, 0, + "repaired config should be valid JSON: {}", + repaired.stderr + ); assert_eq!(repaired.stdout.trim(), "ok"); set_active_clawpal_data_override(None).expect("clear clawpal data"); diff --git a/src-tauri/src/remote_doctor/plan.rs b/src-tauri/src/remote_doctor/plan.rs index 118071b3..1d452200 100644 --- a/src-tauri/src/remote_doctor/plan.rs +++ b/src-tauri/src/remote_doctor/plan.rs @@ -91,7 +91,11 @@ pub(crate) async fn execute_clawpal_doctor_command( pool, target_location, instance_id, - &["sh".into(), "-lc".into(), "command -v openclaw || true".into()], + &[ + "sh".into(), + "-lc".into(), + "command -v openclaw || true".into(), + ], ) .await?; Ok(json!({ @@ -267,7 +271,11 @@ pub(crate) async fn execute_command( let result = match target_location { TargetLocation::LocalOpenclaw => { if argv[0] == "openclaw" { - let arg_refs = argv.iter().skip(1).map(String::as_str).collect::>(); + let arg_refs = argv + .iter() + .skip(1) + .map(String::as_str) + .collect::>(); let output = run_openclaw(&arg_refs)?; CommandResult { argv: argv.to_vec(), @@ -299,7 +307,11 @@ pub(crate) async fn execute_command( TargetLocation::RemoteOpenclaw => { let host_id = primary_remote_target_host_id(instance_id)?; if argv[0] == "openclaw" { - let arg_refs = argv.iter().skip(1).map(String::as_str).collect::>(); + let arg_refs = argv + .iter() + .skip(1) + .map(String::as_str) + .collect::>(); let output = run_openclaw_remote(pool, &host_id, &arg_refs).await?; CommandResult { argv: argv.to_vec(), @@ -310,7 +322,9 @@ pub(crate) async fn execute_command( timed_out: false, } } else { - let output = pool.exec_login(&host_id, &build_shell_command(argv)).await?; + let output = pool + .exec_login(&host_id, &build_shell_command(argv)) + .await?; CommandResult { argv: argv.to_vec(), exit_code: Some(output.exit_code as i32), @@ -365,7 +379,10 @@ pub(crate) fn validate_plan_command_argv(argv: &[String]) -> Result<(), String> if supported { Ok(()) } else { - Err(format!("Unsupported openclaw plan command: {}", argv.join(" "))) + Err(format!( + "Unsupported openclaw plan command: {}", + argv.join(" ") + )) } } @@ -391,7 +408,9 @@ pub(crate) fn command_result_stdout(value: &Value) -> String { .get("stdout") .and_then(Value::as_str) .map(str::to_string) - .unwrap_or_else(|| serde_json::to_string_pretty(value).unwrap_or_else(|_| value.to_string())) + .unwrap_or_else(|| { + serde_json::to_string_pretty(value).unwrap_or_else(|_| value.to_string()) + }) } pub(crate) async fn execute_plan_command( diff --git a/src-tauri/src/remote_doctor/repair_loops.rs b/src-tauri/src/remote_doctor/repair_loops.rs index f0fa62a8..f11ea67c 100644 --- a/src-tauri/src/remote_doctor/repair_loops.rs +++ b/src-tauri/src/remote_doctor/repair_loops.rs @@ -10,9 +10,8 @@ use super::agent::{ }; use super::config::{ append_diagnosis_log, build_gateway_credentials, config_excerpt_log_summary, - diagnosis_has_only_non_auto_fixable_issues, diagnosis_is_healthy, - empty_config_excerpt_context, empty_diagnosis, load_gateway_config, read_target_config_raw, - run_rescue_diagnosis, + diagnosis_has_only_non_auto_fixable_issues, diagnosis_is_healthy, empty_config_excerpt_context, + empty_diagnosis, load_gateway_config, read_target_config_raw, run_rescue_diagnosis, }; use super::legacy::{ ensure_agent_bridge_connected, ensure_remote_target_connected, parse_agent_plan_response, @@ -20,9 +19,8 @@ use super::legacy::{ }; use super::plan::{ agent_plan_step_types, apply_config_set, apply_config_unset, execute_command, - execute_plan_command, plan_command_failure_message, - report_clawpal_server_final_result, report_clawpal_server_step_result, - request_clawpal_server_plan, request_plan, + execute_plan_command, plan_command_failure_message, report_clawpal_server_final_result, + report_clawpal_server_step_result, request_clawpal_server_plan, request_plan, }; use super::session::{ append_session_log, emit_session_progress, result_for_completion, @@ -397,13 +395,23 @@ pub(crate) async fn run_clawpal_server_repair_loop( None, ); apply_config_set(&mut current_config, path, value)?; - super::config::write_target_config(app, target_location, instance_id, ¤t_config).await?; - super::config::restart_target_gateway(app, target_location, instance_id).await?; + super::config::write_target_config( + app, + target_location, + instance_id, + ¤t_config, + ) + .await?; + super::config::restart_target_gateway(app, target_location, instance_id) + .await?; result.argv = vec!["configSet".into(), path.into()]; result.stdout = format!("Updated {path}"); } "configUnset" => { - let path = step.path.as_deref().ok_or("configUnset step missing path")?; + let path = step + .path + .as_deref() + .ok_or("configUnset step missing path")?; emit_session_progress( Some(app), session_id, @@ -414,8 +422,15 @@ pub(crate) async fn run_clawpal_server_repair_loop( None, ); apply_config_unset(&mut current_config, path)?; - super::config::write_target_config(app, target_location, instance_id, ¤t_config).await?; - super::config::restart_target_gateway(app, target_location, instance_id).await?; + super::config::write_target_config( + app, + target_location, + instance_id, + ¤t_config, + ) + .await?; + super::config::restart_target_gateway(app, target_location, instance_id) + .await?; result.argv = vec!["configUnset".into(), path.into()]; result.stdout = format!("Removed {path}"); } diff --git a/src-tauri/src/remote_doctor/session.rs b/src-tauri/src/remote_doctor/session.rs index 3ca70bf7..189597a4 100644 --- a/src-tauri/src/remote_doctor/session.rs +++ b/src-tauri/src/remote_doctor/session.rs @@ -125,7 +125,10 @@ mod tests { set_active_clawpal_data_override(None).expect("clear clawpal override"); - let log_path = clawpal_dir.join("doctor").join("remote").join("sess-1.jsonl"); + let log_path = clawpal_dir + .join("doctor") + .join("remote") + .join("sess-1.jsonl"); let log_text = std::fs::read_to_string(&log_path).expect("read session log"); assert!(log_text.contains("\"event\":\"hello\"")); @@ -156,13 +159,8 @@ mod tests { assert_eq!(completed.last_command, last_command); assert!(completed.latest_diagnosis_healthy); - let warning = result_for_completion_with_warnings( - "sess-2", - 5, - PlanKind::Repair, - None, - "warning", - ); + let warning = + result_for_completion_with_warnings("sess-2", 5, PlanKind::Repair, None, "warning"); assert_eq!(warning.session_id, "sess-2"); assert_eq!(warning.round, 5); assert_eq!(warning.last_plan_kind, "repair"); From 423afc29725311015a519b82b4f53763b6ff02d7 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 15:57:37 +0800 Subject: [PATCH 15/20] refactor: split remote doctor repair loops by protocol --- src-tauri/src/remote_doctor/legacy_e2e.rs | 2 +- src-tauri/src/remote_doctor/repair_loops.rs | 844 +----------------- .../repair_loops/agent_planner.rs | 243 +++++ .../repair_loops/clawpal_server.rs | 259 ++++++ .../repair_loops/legacy_doctor.rs | 175 ++++ .../src/remote_doctor/repair_loops/shared.rs | 153 ++++ 6 files changed, 848 insertions(+), 828 deletions(-) create mode 100644 src-tauri/src/remote_doctor/repair_loops/agent_planner.rs create mode 100644 src-tauri/src/remote_doctor/repair_loops/clawpal_server.rs create mode 100644 src-tauri/src/remote_doctor/repair_loops/legacy_doctor.rs create mode 100644 src-tauri/src/remote_doctor/repair_loops/shared.rs diff --git a/src-tauri/src/remote_doctor/legacy_e2e.rs b/src-tauri/src/remote_doctor/legacy_e2e.rs index f15a5924..7db18af0 100644 --- a/src-tauri/src/remote_doctor/legacy_e2e.rs +++ b/src-tauri/src/remote_doctor/legacy_e2e.rs @@ -347,7 +347,7 @@ async fn remote_doctor_docker_e2e_loop_completes() { &session_id, &format!("ssh:{}", cfg.id), TargetLocation::RemoteOpenclaw, - |kind, round, previous_results| async move { + |kind, round, previous_results: Vec| async move { match (kind, round) { (PlanKind::Detect, 1) => Ok(PlanResponse { plan_id: "detect-1".into(), diff --git a/src-tauri/src/remote_doctor/repair_loops.rs b/src-tauri/src/remote_doctor/repair_loops.rs index f11ea67c..53097e11 100644 --- a/src-tauri/src/remote_doctor/repair_loops.rs +++ b/src-tauri/src/remote_doctor/repair_loops.rs @@ -1,768 +1,37 @@ -use serde_json::{json, Value}; +mod agent_planner; +mod clawpal_server; +mod legacy_doctor; +mod shared; + +use serde_json::json; use tauri::{AppHandle, Runtime, State}; use uuid::Uuid; +pub(crate) use self::agent_planner::run_agent_planner_repair_loop; +pub(crate) use self::clawpal_server::run_clawpal_server_repair_loop; +pub(crate) use self::legacy_doctor::run_remote_doctor_repair_loop; +use self::shared::is_unknown_method_error; use super::agent::{ build_agent_plan_prompt, configured_remote_doctor_protocol, default_remote_doctor_protocol, detect_method_name, ensure_agent_workspace_ready, gateway_url_is_local, - next_agent_plan_kind_for_round, protocol_requires_bridge, protocol_runs_rescue_preflight, - remote_doctor_agent_id, remote_doctor_agent_session_key, repair_method_name, + protocol_requires_bridge, remote_doctor_agent_id, remote_doctor_agent_session_key, + repair_method_name, }; use super::config::{ - append_diagnosis_log, build_gateway_credentials, config_excerpt_log_summary, - diagnosis_has_only_non_auto_fixable_issues, diagnosis_is_healthy, empty_config_excerpt_context, - empty_diagnosis, load_gateway_config, read_target_config_raw, run_rescue_diagnosis, + build_gateway_credentials, empty_config_excerpt_context, empty_diagnosis, load_gateway_config, }; use super::legacy::{ ensure_agent_bridge_connected, ensure_remote_target_connected, parse_agent_plan_response, - repair_rescue_gateway_if_needed, request_agent_plan, run_agent_request_with_bridge, -}; -use super::plan::{ - agent_plan_step_types, apply_config_set, apply_config_unset, execute_command, - execute_plan_command, plan_command_failure_message, report_clawpal_server_final_result, - report_clawpal_server_step_result, request_clawpal_server_plan, request_plan, -}; -use super::session::{ - append_session_log, emit_session_progress, result_for_completion, - result_for_completion_with_warnings, -}; -use super::types::{ - diagnosis_issue_summaries, parse_target_location, ClawpalServerPlanStep, CommandResult, - PlanKind, PlanResponse, RemoteDoctorProtocol, RemoteDoctorRepairResult, RepairRoundObservation, - TargetLocation, + run_agent_request_with_bridge, }; +use super::plan::request_plan; +use super::session::append_session_log; +use super::types::{parse_target_location, PlanKind, RemoteDoctorProtocol, RemoteDoctorRepairResult, TargetLocation}; use crate::bridge_client::BridgeClient; use crate::commands::logs::log_dev; use crate::node_client::NodeClient; use crate::ssh::SshConnectionPool; -const MAX_REMOTE_DOCTOR_ROUNDS: usize = 50; -const REPAIR_PLAN_STALL_THRESHOLD: usize = 3; - -fn is_unknown_method_error(error: &str) -> bool { - error.contains("unknown method") - || error.contains("\"code\":\"INVALID_REQUEST\"") - || error.contains("\"code\": \"INVALID_REQUEST\"") -} - -fn clawpal_server_step_type_summary(steps: &[ClawpalServerPlanStep]) -> Value { - let mut counts = serde_json::Map::new(); - for step in steps { - let entry = counts - .entry(step.step_type.clone()) - .or_insert_with(|| Value::from(0_u64)); - let next = entry.as_u64().unwrap_or(0) + 1; - *entry = Value::from(next); - } - Value::Object(counts) -} - -pub(crate) fn repair_plan_stalled( - observations: &[RepairRoundObservation], - threshold: usize, -) -> bool { - if observations.len() < threshold { - return false; - } - let recent = &observations[observations.len() - threshold..]; - let Some(first) = recent.first() else { - return false; - }; - !first.issue_summaries.is_empty() - && recent.iter().all(|entry| { - entry.step_types.len() == 1 - && entry.step_types[0] == "doctorRediagnose" - && entry.diagnosis_signature == first.diagnosis_signature - }) -} - -pub(crate) fn round_limit_error_message( - diagnosis: &crate::commands::RescuePrimaryDiagnosisResult, - last_step_types: &[String], -) -> String { - let issue_summary = serde_json::to_string(&diagnosis_issue_summaries(diagnosis)) - .unwrap_or_else(|_| "[]".to_string()); - let step_summary = if last_step_types.is_empty() { - "[]".to_string() - } else { - serde_json::to_string(last_step_types).unwrap_or_else(|_| "[]".to_string()) - }; - format!( - "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean rescue diagnosis result. Last issues: {issue_summary}. Last repair step types: {step_summary}." - ) -} - -pub(crate) fn stalled_plan_error_message(observation: &RepairRoundObservation) -> String { - let issue_summary = - serde_json::to_string(&observation.issue_summaries).unwrap_or_else(|_| "[]".to_string()); - let step_summary = - serde_json::to_string(&observation.step_types).unwrap_or_else(|_| "[]".to_string()); - format!( - "Remote Doctor did not return actionable repair steps by round {} after {} repeated rounds. Last issues: {}. Last repair step types: {}.", - observation.round, - REPAIR_PLAN_STALL_THRESHOLD, - issue_summary, - step_summary - ) -} - -pub(crate) async fn run_remote_doctor_repair_loop( - app: Option<&AppHandle>, - pool: &SshConnectionPool, - session_id: &str, - instance_id: &str, - target_location: TargetLocation, - mut request_plan_fn: F, -) -> Result -where - F: FnMut(PlanKind, usize, Vec) -> Fut, - Fut: std::future::Future>, -{ - let mut previous_results: Vec = Vec::new(); - let mut last_command: Option> = None; - let mut last_plan_kind = PlanKind::Detect; - - for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { - emit_session_progress( - app, - session_id, - round, - "planning_detect", - format!("Requesting detection plan for round {round}"), - Some(PlanKind::Detect), - None, - ); - let detect_plan = - request_plan_fn(PlanKind::Detect, round, previous_results.clone()).await?; - append_session_log( - session_id, - json!({ - "event": "plan_received", - "round": round, - "planKind": "detect", - "planId": detect_plan.plan_id, - "summary": detect_plan.summary, - "commandCount": detect_plan.commands.len(), - "healthy": detect_plan.healthy, - "done": detect_plan.done, - }), - ); - if detect_plan.healthy || (detect_plan.done && detect_plan.commands.is_empty()) { - return Ok(RemoteDoctorRepairResult { - mode: "remoteDoctor".into(), - status: "completed".into(), - round, - phase: "completed".into(), - last_plan_kind: match last_plan_kind { - PlanKind::Detect => "detect".into(), - PlanKind::Investigate => "investigate".into(), - PlanKind::Repair => "repair".into(), - }, - latest_diagnosis_healthy: true, - last_command, - session_id: session_id.to_string(), - message: "Remote Doctor repair completed with a healthy detection result.".into(), - }); - } - previous_results.clear(); - for command in &detect_plan.commands { - last_command = Some(command.argv.clone()); - emit_session_progress( - app, - session_id, - round, - "executing_detect", - format!("Running detect command: {}", command.argv.join(" ")), - Some(PlanKind::Detect), - Some(command.argv.clone()), - ); - let command_result = - execute_command(pool, target_location, instance_id, &command.argv).await?; - append_session_log( - session_id, - json!({ - "event": "command_result", - "round": round, - "planKind": "detect", - "result": command_result, - }), - ); - if command_result.exit_code.unwrap_or(1) != 0 - && !command.continue_on_failure.unwrap_or(false) - { - previous_results.push(command_result); - return Err(format!( - "Detect command failed in round {round}: {}", - command.argv.join(" ") - )); - } - previous_results.push(command_result); - } - - emit_session_progress( - app, - session_id, - round, - "planning_repair", - format!("Requesting repair plan for round {round}"), - Some(PlanKind::Repair), - None, - ); - let repair_plan = - request_plan_fn(PlanKind::Repair, round, previous_results.clone()).await?; - last_plan_kind = PlanKind::Repair; - append_session_log( - session_id, - json!({ - "event": "plan_received", - "round": round, - "planKind": "repair", - "planId": repair_plan.plan_id, - "summary": repair_plan.summary, - "commandCount": repair_plan.commands.len(), - "success": repair_plan.success, - "done": repair_plan.done, - }), - ); - previous_results.clear(); - for command in &repair_plan.commands { - last_command = Some(command.argv.clone()); - emit_session_progress( - app, - session_id, - round, - "executing_repair", - format!("Running repair command: {}", command.argv.join(" ")), - Some(PlanKind::Repair), - Some(command.argv.clone()), - ); - let command_result = - execute_command(pool, target_location, instance_id, &command.argv).await?; - append_session_log( - session_id, - json!({ - "event": "command_result", - "round": round, - "planKind": "repair", - "result": command_result, - }), - ); - if command_result.exit_code.unwrap_or(1) != 0 - && !command.continue_on_failure.unwrap_or(false) - { - previous_results.push(command_result); - return Err(format!( - "Repair command failed in round {round}: {}", - command.argv.join(" ") - )); - } - previous_results.push(command_result); - } - } - - append_session_log( - session_id, - json!({ - "event": "session_complete", - "status": "failed", - "reason": "round_limit_exceeded", - }), - ); - Err(format!( - "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean detection result" - )) -} - -pub(crate) async fn run_clawpal_server_repair_loop( - app: &AppHandle, - client: &NodeClient, - session_id: &str, - instance_id: &str, - target_location: TargetLocation, -) -> Result { - let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "initial", 0, &diagnosis); - if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { - repair_rescue_gateway_if_needed( - app, - session_id, - 0, - target_location, - instance_id, - &mut diagnosis, - ) - .await?; - } - if diagnosis_is_healthy(&diagnosis) { - return Ok(result_for_completion( - session_id, - 0, - PlanKind::Detect, - None, - "Remote Doctor repair skipped because diagnosis is already healthy.", - )); - } - - let mut last_command = None; - let mut round_observations: Vec = Vec::new(); - let mut last_step_types: Vec = Vec::new(); - for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { - emit_session_progress( - Some(app), - session_id, - round, - "planning_repair", - format!("Requesting remote repair plan for round {round}"), - Some(PlanKind::Repair), - None, - ); - let config_context = super::config::build_config_excerpt_context( - &read_target_config_raw(app, target_location, instance_id).await?, - ); - append_session_log( - session_id, - json!({ - "event": "plan_request_context", - "protocol": "clawpal_server", - "round": round, - "planKind": "repair", - "instanceId": instance_id, - "targetLocation": target_location, - "configContext": config_excerpt_log_summary(&config_context), - "diagnosisIssueCount": diagnosis.issues.len(), - "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), - }), - ); - if config_context.config_parse_error.is_some() { - append_session_log( - session_id, - json!({ - "event": "config_recovery_context", - "round": round, - "context": config_excerpt_log_summary(&config_context), - }), - ); - } - let plan = request_clawpal_server_plan( - client, - session_id, - round, - instance_id, - target_location, - &diagnosis, - &config_context, - ) - .await?; - append_session_log( - session_id, - json!({ - "event": "plan_received", - "protocol": "clawpal_server", - "round": round, - "planKind": "repair", - "planId": plan.plan_id, - "summary": plan.summary, - "stepCount": plan.steps.len(), - "stepTypeCounts": clawpal_server_step_type_summary(&plan.steps), - }), - ); - - let mut current_config = config_context.config_excerpt.clone(); - let mut rediagnosed = false; - let mut round_step_types = Vec::new(); - for (step_index, step) in plan.steps.iter().enumerate() { - round_step_types.push(step.step_type.clone()); - let mut result = CommandResult { - argv: Vec::new(), - exit_code: Some(0), - stdout: String::new(), - stderr: String::new(), - duration_ms: 0, - timed_out: false, - }; - let started = std::time::Instant::now(); - match step.step_type.as_str() { - "configSet" => { - let path = step.path.as_deref().ok_or("configSet step missing path")?; - let value = step.value.clone().ok_or("configSet step missing value")?; - emit_session_progress( - Some(app), - session_id, - round, - "executing_repair", - format!("Applying config set: {path}"), - Some(PlanKind::Repair), - None, - ); - apply_config_set(&mut current_config, path, value)?; - super::config::write_target_config( - app, - target_location, - instance_id, - ¤t_config, - ) - .await?; - super::config::restart_target_gateway(app, target_location, instance_id) - .await?; - result.argv = vec!["configSet".into(), path.into()]; - result.stdout = format!("Updated {path}"); - } - "configUnset" => { - let path = step - .path - .as_deref() - .ok_or("configUnset step missing path")?; - emit_session_progress( - Some(app), - session_id, - round, - "executing_repair", - format!("Applying config unset: {path}"), - Some(PlanKind::Repair), - None, - ); - apply_config_unset(&mut current_config, path)?; - super::config::write_target_config( - app, - target_location, - instance_id, - ¤t_config, - ) - .await?; - super::config::restart_target_gateway(app, target_location, instance_id) - .await?; - result.argv = vec!["configUnset".into(), path.into()]; - result.stdout = format!("Removed {path}"); - } - "doctorRediagnose" => { - emit_session_progress( - Some(app), - session_id, - round, - "planning_detect", - format!("Running rescue diagnosis after repair plan round {round}"), - Some(PlanKind::Detect), - None, - ); - diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "post_step_rediagnose", round, &diagnosis); - rediagnosed = true; - result.argv = vec!["doctorRediagnose".into()]; - result.stdout = format!( - "Diagnosis status={} issues={}", - diagnosis.status, - diagnosis.issues.len() - ); - } - other => { - result.exit_code = Some(1); - result.stderr = format!("Unsupported clawpal-server step type: {other}"); - } - } - result.duration_ms = started.elapsed().as_millis() as u64; - last_command = Some(result.argv.clone()); - append_session_log( - session_id, - json!({ - "event": "command_result", - "protocol": "clawpal_server", - "round": round, - "planKind": "repair", - "stepIndex": step_index, - "step": step, - "result": result, - }), - ); - report_clawpal_server_step_result(client, &plan.plan_id, step_index, step, &result) - .await; - if result.exit_code.unwrap_or(1) != 0 { - return Err(result.stderr); - } - } - - if !rediagnosed { - diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "post_round", round, &diagnosis); - } - if protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { - repair_rescue_gateway_if_needed( - app, - session_id, - round, - target_location, - instance_id, - &mut diagnosis, - ) - .await?; - } - last_step_types = round_step_types.clone(); - round_observations.push(RepairRoundObservation::new( - round, - &round_step_types, - &diagnosis, - )); - if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { - let observation = round_observations - .last() - .expect("stalled observations should contain current round"); - append_session_log( - session_id, - json!({ - "event": "repair_plan_stalled", - "protocol": "clawpal_server", - "round": round, - "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, - "latestStepTypes": observation.step_types, - "issues": observation.issue_summaries, - }), - ); - return Err(stalled_plan_error_message(observation)); - } - let healthy = diagnosis_is_healthy(&diagnosis); - report_clawpal_server_final_result(client, &plan.plan_id, healthy, &diagnosis).await; - if healthy { - return Ok(result_for_completion( - session_id, - round, - PlanKind::Repair, - last_command, - "Remote Doctor repair completed with a healthy rescue diagnosis.", - )); - } - } - - Err(round_limit_error_message(&diagnosis, &last_step_types)) -} - -pub(crate) async fn run_agent_planner_repair_loop( - app: &AppHandle, - client: &NodeClient, - bridge_client: &BridgeClient, - pool: &SshConnectionPool, - session_id: &str, - instance_id: &str, - target_location: TargetLocation, -) -> Result { - let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "initial", 0, &diagnosis); - if diagnosis_is_healthy(&diagnosis) { - return Ok(result_for_completion( - session_id, - 0, - PlanKind::Detect, - None, - "Remote Doctor repair skipped because diagnosis is already healthy.", - )); - } - - let mut previous_results: Vec = Vec::new(); - let mut last_command = None; - let mut last_step_types: Vec = Vec::new(); - let mut round_observations: Vec = Vec::new(); - - for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { - let kind = next_agent_plan_kind_for_round(&diagnosis, &previous_results); - let config_context = super::config::build_config_excerpt_context( - &read_target_config_raw(app, target_location, instance_id).await?, - ); - let phase = match kind { - PlanKind::Detect => "planning_detect", - PlanKind::Investigate => "planning_investigate", - PlanKind::Repair => "planning_repair", - }; - let line = match kind { - PlanKind::Detect => format!("Requesting detection plan for round {round}"), - PlanKind::Investigate => format!("Requesting investigation plan for round {round}"), - PlanKind::Repair => format!("Requesting repair plan for round {round}"), - }; - emit_session_progress(Some(app), session_id, round, phase, line, Some(kind), None); - append_session_log( - session_id, - json!({ - "event": "plan_request_context", - "protocol": "agent", - "round": round, - "planKind": match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "instanceId": instance_id, - "targetLocation": target_location, - "configContext": config_excerpt_log_summary(&config_context), - "diagnosisIssueCount": diagnosis.issues.len(), - "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), - }), - ); - let plan = request_agent_plan( - app, - client, - bridge_client, - pool, - session_id, - round, - kind, - target_location, - instance_id, - &diagnosis, - &config_context, - &previous_results, - ) - .await?; - append_session_log( - session_id, - json!({ - "event": "plan_received", - "protocol": "agent", - "round": round, - "planKind": match plan.plan_kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "planId": plan.plan_id, - "summary": plan.summary, - "commandCount": plan.commands.len(), - "healthy": plan.healthy, - "done": plan.done, - "success": plan.success, - }), - ); - previous_results.clear(); - last_step_types = agent_plan_step_types(&plan); - for command in &plan.commands { - last_command = Some(command.argv.clone()); - emit_session_progress( - Some(app), - session_id, - round, - match kind { - PlanKind::Detect => "executing_detect", - PlanKind::Investigate => "executing_investigate", - PlanKind::Repair => "executing_repair", - }, - format!( - "Running {} command: {}", - match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - command.argv.join(" ") - ), - Some(kind), - Some(command.argv.clone()), - ); - append_session_log( - session_id, - json!({ - "event": "command_start", - "round": round, - "planKind": match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "argv": command.argv, - "timeoutSec": command.timeout_sec, - "purpose": command.purpose, - }), - ); - let command_result = - match execute_plan_command(app, pool, target_location, instance_id, &command.argv) - .await - { - Ok(result) => result, - Err(error) => { - return Err(plan_command_failure_message( - kind, - round, - &command.argv, - &error, - )); - } - }; - append_session_log( - session_id, - json!({ - "event": "command_result", - "round": round, - "planKind": match kind { - PlanKind::Detect => "detect", - PlanKind::Investigate => "investigate", - PlanKind::Repair => "repair", - }, - "result": command_result, - }), - ); - if command_result.exit_code.unwrap_or(1) != 0 - && !command.continue_on_failure.unwrap_or(false) - { - return Err(format!( - "{} command failed in round {round}: {}", - match kind { - PlanKind::Detect => "Detect", - PlanKind::Investigate => "Investigate", - PlanKind::Repair => "Repair", - }, - command.argv.join(" ") - )); - } - previous_results.push(command_result); - } - - diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; - append_diagnosis_log(session_id, "post_round", round, &diagnosis); - if diagnosis_is_healthy(&diagnosis) { - return Ok(result_for_completion( - session_id, - round, - kind, - last_command, - "Remote Doctor repair completed with a healthy rescue diagnosis.", - )); - } - if matches!(kind, PlanKind::Repair) - && plan.done - && plan.commands.is_empty() - && diagnosis_has_only_non_auto_fixable_issues(&diagnosis) - { - return Ok(result_for_completion_with_warnings( - session_id, - round, - kind, - last_command, - "Remote Doctor completed all safe automatic repairs. Remaining issues are non-auto-fixable warnings.", - )); - } - - round_observations.push(RepairRoundObservation::new( - round, - &last_step_types, - &diagnosis, - )); - if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { - let observation = round_observations - .last() - .expect("stalled observations should contain current round"); - append_session_log( - session_id, - json!({ - "event": "repair_plan_stalled", - "protocol": "agent", - "round": round, - "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, - "latestStepTypes": observation.step_types, - "issues": observation.issue_summaries, - }), - ); - return Err(stalled_plan_error_message(observation)); - } - } - - Err(round_limit_error_message(&diagnosis, &last_step_types)) -} - pub(crate) async fn start_remote_doctor_repair_impl( app: AppHandle, pool: &SshConnectionPool, @@ -1121,82 +390,3 @@ pub async fn start_remote_doctor_repair( ) -> Result { start_remote_doctor_repair_impl(app, &pool, instance_id, target_location).await } - -#[cfg(test)] -mod tests { - use super::*; - use crate::commands::{RescuePrimaryDiagnosisResult, RescuePrimaryIssue, RescuePrimarySummary}; - - #[test] - fn repeated_rediagnose_only_rounds_are_detected_as_stalled() { - let diagnosis = sample_diagnosis(vec![RescuePrimaryIssue { - id: "issue-1".to_string(), - code: "invalid.base_url".to_string(), - severity: "medium".to_string(), - message: "Provider base URL is invalid".to_string(), - auto_fixable: true, - fix_hint: Some("Reset baseUrl".to_string()), - source: "config".to_string(), - }]); - let step_types = vec!["doctorRediagnose".to_string()]; - - assert!(!repair_plan_stalled( - &[ - RepairRoundObservation::new(1, &step_types, &diagnosis), - RepairRoundObservation::new(2, &step_types, &diagnosis), - ], - 3, - )); - assert!(repair_plan_stalled( - &[ - RepairRoundObservation::new(1, &step_types, &diagnosis), - RepairRoundObservation::new(2, &step_types, &diagnosis), - RepairRoundObservation::new(3, &step_types, &diagnosis), - ], - 3, - )); - } - - #[test] - fn round_limit_error_message_includes_latest_issues_and_step_types() { - let diagnosis = sample_diagnosis(vec![RescuePrimaryIssue { - id: "issue-1".to_string(), - code: "invalid.base_url".to_string(), - severity: "medium".to_string(), - message: "Provider base URL is invalid".to_string(), - auto_fixable: true, - fix_hint: Some("Reset baseUrl".to_string()), - source: "config".to_string(), - }]); - let error = round_limit_error_message(&diagnosis, &["doctorRediagnose".to_string()]); - assert!(error.contains("invalid.base_url")); - assert!(error.contains("doctorRediagnose")); - assert!(error.contains("Provider base URL is invalid")); - } - - fn sample_diagnosis(issues: Vec) -> RescuePrimaryDiagnosisResult { - RescuePrimaryDiagnosisResult { - status: "degraded".to_string(), - checked_at: "2026-03-19T00:00:00Z".to_string(), - target_profile: "primary".to_string(), - rescue_profile: "rescue".to_string(), - rescue_configured: true, - rescue_port: Some(18789), - summary: RescuePrimarySummary { - status: "degraded".to_string(), - headline: "Issues found".to_string(), - recommended_action: "Repair".to_string(), - fixable_issue_count: 0, - selected_fix_issue_ids: Vec::new(), - root_cause_hypotheses: Vec::new(), - fix_steps: Vec::new(), - confidence: None, - citations: Vec::new(), - version_awareness: None, - }, - sections: Vec::new(), - checks: Vec::new(), - issues, - } - } -} diff --git a/src-tauri/src/remote_doctor/repair_loops/agent_planner.rs b/src-tauri/src/remote_doctor/repair_loops/agent_planner.rs new file mode 100644 index 00000000..090bf7cc --- /dev/null +++ b/src-tauri/src/remote_doctor/repair_loops/agent_planner.rs @@ -0,0 +1,243 @@ +use serde_json::json; +use tauri::{AppHandle, Runtime}; + +use super::shared::{ + repair_plan_stalled, round_limit_error_message, stalled_plan_error_message, + MAX_REMOTE_DOCTOR_ROUNDS, REPAIR_PLAN_STALL_THRESHOLD, +}; +use super::super::agent::next_agent_plan_kind_for_round; +use super::super::config::{ + append_diagnosis_log, build_config_excerpt_context, config_excerpt_log_summary, + diagnosis_has_only_non_auto_fixable_issues, diagnosis_is_healthy, read_target_config_raw, + run_rescue_diagnosis, +}; +use super::super::legacy::request_agent_plan; +use super::super::plan::{agent_plan_step_types, execute_plan_command, plan_command_failure_message}; +use super::super::session::{ + append_session_log, emit_session_progress, result_for_completion, + result_for_completion_with_warnings, +}; +use super::super::types::{ + diagnosis_issue_summaries, CommandResult, PlanKind, RemoteDoctorRepairResult, + RepairRoundObservation, TargetLocation, +}; +use crate::bridge_client::BridgeClient; +use crate::node_client::NodeClient; +use crate::ssh::SshConnectionPool; + +pub(crate) async fn run_agent_planner_repair_loop( + app: &AppHandle, + client: &NodeClient, + bridge_client: &BridgeClient, + pool: &SshConnectionPool, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, +) -> Result { + let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "initial", 0, &diagnosis); + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + 0, + PlanKind::Detect, + None, + "Remote Doctor repair skipped because diagnosis is already healthy.", + )); + } + + let mut previous_results: Vec = Vec::new(); + let mut last_command = None; + let mut last_step_types: Vec = Vec::new(); + let mut round_observations: Vec = Vec::new(); + + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + let kind = next_agent_plan_kind_for_round(&diagnosis, &previous_results); + let config_context = + build_config_excerpt_context(&read_target_config_raw(app, target_location, instance_id).await?); + let phase = match kind { + PlanKind::Detect => "planning_detect", + PlanKind::Investigate => "planning_investigate", + PlanKind::Repair => "planning_repair", + }; + let line = match kind { + PlanKind::Detect => format!("Requesting detection plan for round {round}"), + PlanKind::Investigate => format!("Requesting investigation plan for round {round}"), + PlanKind::Repair => format!("Requesting repair plan for round {round}"), + }; + emit_session_progress(Some(app), session_id, round, phase, line, Some(kind), None); + append_session_log( + session_id, + json!({ + "event": "plan_request_context", + "protocol": "agent", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "instanceId": instance_id, + "targetLocation": target_location, + "configContext": config_excerpt_log_summary(&config_context), + "diagnosisIssueCount": diagnosis.issues.len(), + "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), + }), + ); + let plan = request_agent_plan( + app, + client, + bridge_client, + pool, + session_id, + round, + kind, + target_location, + instance_id, + &diagnosis, + &config_context, + &previous_results, + ) + .await?; + append_session_log( + session_id, + json!({ + "event": "plan_received", + "protocol": "agent", + "round": round, + "planKind": match plan.plan_kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "planId": plan.plan_id, + "summary": plan.summary, + "commandCount": plan.commands.len(), + "healthy": plan.healthy, + "done": plan.done, + "success": plan.success, + }), + ); + previous_results.clear(); + last_step_types = agent_plan_step_types(&plan); + for command in &plan.commands { + last_command = Some(command.argv.clone()); + emit_session_progress( + Some(app), + session_id, + round, + match kind { + PlanKind::Detect => "executing_detect", + PlanKind::Investigate => "executing_investigate", + PlanKind::Repair => "executing_repair", + }, + format!( + "Running {} command: {}", + match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + command.argv.join(" ") + ), + Some(kind), + Some(command.argv.clone()), + ); + append_session_log( + session_id, + json!({ + "event": "command_start", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "argv": command.argv, + "timeoutSec": command.timeout_sec, + "purpose": command.purpose, + }), + ); + let command_result = + match execute_plan_command(app, pool, target_location, instance_id, &command.argv).await { + Ok(result) => result, + Err(error) => { + return Err(plan_command_failure_message(kind, round, &command.argv, &error)); + } + }; + append_session_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": match kind { + PlanKind::Detect => "detect", + PlanKind::Investigate => "investigate", + PlanKind::Repair => "repair", + }, + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + return Err(format!( + "{} command failed in round {round}: {}", + match kind { + PlanKind::Detect => "Detect", + PlanKind::Investigate => "Investigate", + PlanKind::Repair => "Repair", + }, + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_round", round, &diagnosis); + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + round, + kind, + last_command, + "Remote Doctor repair completed with a healthy rescue diagnosis.", + )); + } + if matches!(kind, PlanKind::Repair) + && plan.done + && plan.commands.is_empty() + && diagnosis_has_only_non_auto_fixable_issues(&diagnosis) + { + return Ok(result_for_completion_with_warnings( + session_id, + round, + kind, + last_command, + "Remote Doctor completed all safe automatic repairs. Remaining issues are non-auto-fixable warnings.", + )); + } + + round_observations.push(RepairRoundObservation::new(round, &last_step_types, &diagnosis)); + if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { + let observation = round_observations + .last() + .expect("stalled observations should contain current round"); + append_session_log( + session_id, + json!({ + "event": "repair_plan_stalled", + "protocol": "agent", + "round": round, + "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, + "latestStepTypes": observation.step_types, + "issues": observation.issue_summaries, + }), + ); + return Err(stalled_plan_error_message(observation)); + } + } + + Err(round_limit_error_message(&diagnosis, &last_step_types)) +} diff --git a/src-tauri/src/remote_doctor/repair_loops/clawpal_server.rs b/src-tauri/src/remote_doctor/repair_loops/clawpal_server.rs new file mode 100644 index 00000000..9e002df9 --- /dev/null +++ b/src-tauri/src/remote_doctor/repair_loops/clawpal_server.rs @@ -0,0 +1,259 @@ +use serde_json::json; +use tauri::{AppHandle, Runtime}; + +use super::shared::{ + clawpal_server_step_type_summary, repair_plan_stalled, round_limit_error_message, + stalled_plan_error_message, MAX_REMOTE_DOCTOR_ROUNDS, REPAIR_PLAN_STALL_THRESHOLD, +}; +use super::super::config::{ + append_diagnosis_log, build_config_excerpt_context, config_excerpt_log_summary, + diagnosis_is_healthy, read_target_config_raw, restart_target_gateway, run_rescue_diagnosis, + write_target_config, +}; +use super::super::legacy::repair_rescue_gateway_if_needed; +use super::super::plan::{ + apply_config_set, apply_config_unset, report_clawpal_server_final_result, + report_clawpal_server_step_result, request_clawpal_server_plan, +}; +use super::super::session::{append_session_log, emit_session_progress, result_for_completion}; +use super::super::types::{ + diagnosis_issue_summaries, CommandResult, PlanKind, RemoteDoctorProtocol, + RemoteDoctorRepairResult, RepairRoundObservation, TargetLocation, +}; +use crate::node_client::NodeClient; + +pub(crate) async fn run_clawpal_server_repair_loop( + app: &AppHandle, + client: &NodeClient, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, +) -> Result { + let mut diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "initial", 0, &diagnosis); + if super::super::agent::protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { + repair_rescue_gateway_if_needed( + app, + session_id, + 0, + target_location, + instance_id, + &mut diagnosis, + ) + .await?; + } + if diagnosis_is_healthy(&diagnosis) { + return Ok(result_for_completion( + session_id, + 0, + PlanKind::Detect, + None, + "Remote Doctor repair skipped because diagnosis is already healthy.", + )); + } + + let mut last_command = None; + let mut round_observations: Vec = Vec::new(); + let mut last_step_types: Vec = Vec::new(); + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + emit_session_progress( + Some(app), + session_id, + round, + "planning_repair", + format!("Requesting remote repair plan for round {round}"), + Some(PlanKind::Repair), + None, + ); + let config_context = + build_config_excerpt_context(&read_target_config_raw(app, target_location, instance_id).await?); + append_session_log( + session_id, + json!({ + "event": "plan_request_context", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "instanceId": instance_id, + "targetLocation": target_location, + "configContext": config_excerpt_log_summary(&config_context), + "diagnosisIssueCount": diagnosis.issues.len(), + "diagnosisIssues": diagnosis_issue_summaries(&diagnosis), + }), + ); + if config_context.config_parse_error.is_some() { + append_session_log( + session_id, + json!({ + "event": "config_recovery_context", + "round": round, + "context": config_excerpt_log_summary(&config_context), + }), + ); + } + let plan = request_clawpal_server_plan( + client, + session_id, + round, + instance_id, + target_location, + &diagnosis, + &config_context, + ) + .await?; + append_session_log( + session_id, + json!({ + "event": "plan_received", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "planId": plan.plan_id, + "summary": plan.summary, + "stepCount": plan.steps.len(), + "stepTypeCounts": clawpal_server_step_type_summary(&plan.steps), + }), + ); + + let mut current_config = config_context.config_excerpt.clone(); + let mut rediagnosed = false; + let mut round_step_types = Vec::new(); + for (step_index, step) in plan.steps.iter().enumerate() { + round_step_types.push(step.step_type.clone()); + let mut result = CommandResult { + argv: Vec::new(), + exit_code: Some(0), + stdout: String::new(), + stderr: String::new(), + duration_ms: 0, + timed_out: false, + }; + let started = std::time::Instant::now(); + match step.step_type.as_str() { + "configSet" => { + let path = step.path.as_deref().ok_or("configSet step missing path")?; + let value = step.value.clone().ok_or("configSet step missing value")?; + emit_session_progress( + Some(app), + session_id, + round, + "executing_repair", + format!("Applying config set: {path}"), + Some(PlanKind::Repair), + None, + ); + apply_config_set(&mut current_config, path, value)?; + write_target_config(app, target_location, instance_id, ¤t_config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + result.argv = vec!["configSet".into(), path.into()]; + result.stdout = format!("Updated {path}"); + } + "configUnset" => { + let path = step.path.as_deref().ok_or("configUnset step missing path")?; + emit_session_progress( + Some(app), + session_id, + round, + "executing_repair", + format!("Applying config unset: {path}"), + Some(PlanKind::Repair), + None, + ); + apply_config_unset(&mut current_config, path)?; + write_target_config(app, target_location, instance_id, ¤t_config).await?; + restart_target_gateway(app, target_location, instance_id).await?; + result.argv = vec!["configUnset".into(), path.into()]; + result.stdout = format!("Removed {path}"); + } + "doctorRediagnose" => { + emit_session_progress( + Some(app), + session_id, + round, + "planning_detect", + format!("Running rescue diagnosis after repair plan round {round}"), + Some(PlanKind::Detect), + None, + ); + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_step_rediagnose", round, &diagnosis); + rediagnosed = true; + result.argv = vec!["doctorRediagnose".into()]; + result.stdout = + format!("Diagnosis status={} issues={}", diagnosis.status, diagnosis.issues.len()); + } + other => { + result.exit_code = Some(1); + result.stderr = format!("Unsupported clawpal-server step type: {other}"); + } + } + result.duration_ms = started.elapsed().as_millis() as u64; + last_command = Some(result.argv.clone()); + append_session_log( + session_id, + json!({ + "event": "command_result", + "protocol": "clawpal_server", + "round": round, + "planKind": "repair", + "stepIndex": step_index, + "step": step, + "result": result, + }), + ); + report_clawpal_server_step_result(client, &plan.plan_id, step_index, step, &result) + .await; + if result.exit_code.unwrap_or(1) != 0 { + return Err(result.stderr); + } + } + + if !rediagnosed { + diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; + append_diagnosis_log(session_id, "post_round", round, &diagnosis); + } + if super::super::agent::protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { + repair_rescue_gateway_if_needed( + app, + session_id, + round, + target_location, + instance_id, + &mut diagnosis, + ) + .await?; + } + last_step_types = round_step_types.clone(); + round_observations.push(RepairRoundObservation::new(round, &round_step_types, &diagnosis)); + if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { + let observation = round_observations + .last() + .expect("stalled observations should contain current round"); + append_session_log( + session_id, + json!({ + "event": "repair_plan_stalled", + "protocol": "clawpal_server", + "round": round, + "repeatedRounds": REPAIR_PLAN_STALL_THRESHOLD, + "latestStepTypes": observation.step_types, + "issues": observation.issue_summaries, + }), + ); + return Err(stalled_plan_error_message(observation)); + } + let healthy = diagnosis_is_healthy(&diagnosis); + report_clawpal_server_final_result(client, &plan.plan_id, healthy, &diagnosis).await; + if healthy { + return Ok(result_for_completion( + session_id, + round, + PlanKind::Repair, + last_command, + "Remote Doctor repair completed with a healthy rescue diagnosis.", + )); + } + } + + Err(round_limit_error_message(&diagnosis, &last_step_types)) +} diff --git a/src-tauri/src/remote_doctor/repair_loops/legacy_doctor.rs b/src-tauri/src/remote_doctor/repair_loops/legacy_doctor.rs new file mode 100644 index 00000000..0fd2be13 --- /dev/null +++ b/src-tauri/src/remote_doctor/repair_loops/legacy_doctor.rs @@ -0,0 +1,175 @@ +use serde_json::json; +use tauri::{AppHandle, Runtime}; + +use super::shared::MAX_REMOTE_DOCTOR_ROUNDS; +use super::super::plan::execute_command; +use super::super::session::{append_session_log, emit_session_progress}; +use super::super::types::{CommandResult, PlanKind, PlanResponse, RemoteDoctorRepairResult, TargetLocation}; +use crate::ssh::SshConnectionPool; + +pub(crate) async fn run_remote_doctor_repair_loop( + app: Option<&AppHandle>, + pool: &SshConnectionPool, + session_id: &str, + instance_id: &str, + target_location: TargetLocation, + mut request_plan_fn: F, +) -> Result +where + F: FnMut(PlanKind, usize, Vec) -> Fut, + Fut: std::future::Future>, +{ + let mut previous_results: Vec = Vec::new(); + let mut last_command: Option> = None; + let mut last_plan_kind = PlanKind::Detect; + + for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { + emit_session_progress( + app, + session_id, + round, + "planning_detect", + format!("Requesting detection plan for round {round}"), + Some(PlanKind::Detect), + None, + ); + let detect_plan = + request_plan_fn(PlanKind::Detect, round, previous_results.clone()).await?; + append_session_log( + session_id, + json!({ + "event": "plan_received", + "round": round, + "planKind": "detect", + "planId": detect_plan.plan_id, + "summary": detect_plan.summary, + "commandCount": detect_plan.commands.len(), + "healthy": detect_plan.healthy, + "done": detect_plan.done, + }), + ); + if detect_plan.healthy || (detect_plan.done && detect_plan.commands.is_empty()) { + return Ok(RemoteDoctorRepairResult { + mode: "remoteDoctor".into(), + status: "completed".into(), + round, + phase: "completed".into(), + last_plan_kind: match last_plan_kind { + PlanKind::Detect => "detect".into(), + PlanKind::Investigate => "investigate".into(), + PlanKind::Repair => "repair".into(), + }, + latest_diagnosis_healthy: true, + last_command, + session_id: session_id.to_string(), + message: "Remote Doctor repair completed with a healthy detection result.".into(), + }); + } + previous_results.clear(); + for command in &detect_plan.commands { + last_command = Some(command.argv.clone()); + emit_session_progress( + app, + session_id, + round, + "executing_detect", + format!("Running detect command: {}", command.argv.join(" ")), + Some(PlanKind::Detect), + Some(command.argv.clone()), + ); + let command_result = + execute_command(pool, target_location, instance_id, &command.argv).await?; + append_session_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": "detect", + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + previous_results.push(command_result); + return Err(format!( + "Detect command failed in round {round}: {}", + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + + emit_session_progress( + app, + session_id, + round, + "planning_repair", + format!("Requesting repair plan for round {round}"), + Some(PlanKind::Repair), + None, + ); + let repair_plan = + request_plan_fn(PlanKind::Repair, round, previous_results.clone()).await?; + last_plan_kind = PlanKind::Repair; + append_session_log( + session_id, + json!({ + "event": "plan_received", + "round": round, + "planKind": "repair", + "planId": repair_plan.plan_id, + "summary": repair_plan.summary, + "commandCount": repair_plan.commands.len(), + "success": repair_plan.success, + "done": repair_plan.done, + }), + ); + previous_results.clear(); + for command in &repair_plan.commands { + last_command = Some(command.argv.clone()); + emit_session_progress( + app, + session_id, + round, + "executing_repair", + format!("Running repair command: {}", command.argv.join(" ")), + Some(PlanKind::Repair), + Some(command.argv.clone()), + ); + let command_result = + execute_command(pool, target_location, instance_id, &command.argv).await?; + append_session_log( + session_id, + json!({ + "event": "command_result", + "round": round, + "planKind": "repair", + "result": command_result, + }), + ); + if command_result.exit_code.unwrap_or(1) != 0 + && !command.continue_on_failure.unwrap_or(false) + { + previous_results.push(command_result); + return Err(format!( + "Repair command failed in round {round}: {}", + command.argv.join(" ") + )); + } + previous_results.push(command_result); + } + } + + append_session_log( + session_id, + json!({ + "event": "session_complete", + "status": "failed", + "reason": "round_limit_exceeded", + }), + ); + Err(format!( + "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean detection result" + )) +} diff --git a/src-tauri/src/remote_doctor/repair_loops/shared.rs b/src-tauri/src/remote_doctor/repair_loops/shared.rs new file mode 100644 index 00000000..58e02b36 --- /dev/null +++ b/src-tauri/src/remote_doctor/repair_loops/shared.rs @@ -0,0 +1,153 @@ +use serde_json::Value; + +use super::super::types::{diagnosis_issue_summaries, ClawpalServerPlanStep, RepairRoundObservation}; + +pub(crate) const MAX_REMOTE_DOCTOR_ROUNDS: usize = 50; +pub(crate) const REPAIR_PLAN_STALL_THRESHOLD: usize = 3; + +pub(crate) fn is_unknown_method_error(error: &str) -> bool { + error.contains("unknown method") + || error.contains("\"code\":\"INVALID_REQUEST\"") + || error.contains("\"code\": \"INVALID_REQUEST\"") +} + +pub(crate) fn clawpal_server_step_type_summary(steps: &[ClawpalServerPlanStep]) -> Value { + let mut counts = serde_json::Map::new(); + for step in steps { + let entry = counts + .entry(step.step_type.clone()) + .or_insert_with(|| Value::from(0_u64)); + let next = entry.as_u64().unwrap_or(0) + 1; + *entry = Value::from(next); + } + Value::Object(counts) +} + +pub(crate) fn repair_plan_stalled( + observations: &[RepairRoundObservation], + threshold: usize, +) -> bool { + if observations.len() < threshold { + return false; + } + let recent = &observations[observations.len() - threshold..]; + let Some(first) = recent.first() else { + return false; + }; + !first.issue_summaries.is_empty() + && recent.iter().all(|entry| { + entry.step_types.len() == 1 + && entry.step_types[0] == "doctorRediagnose" + && entry.diagnosis_signature == first.diagnosis_signature + }) +} + +pub(crate) fn round_limit_error_message( + diagnosis: &crate::commands::RescuePrimaryDiagnosisResult, + last_step_types: &[String], +) -> String { + let issue_summary = serde_json::to_string(&diagnosis_issue_summaries(diagnosis)) + .unwrap_or_else(|_| "[]".to_string()); + let step_summary = if last_step_types.is_empty() { + "[]".to_string() + } else { + serde_json::to_string(last_step_types).unwrap_or_else(|_| "[]".to_string()) + }; + format!( + "Remote Doctor repair exceeded {MAX_REMOTE_DOCTOR_ROUNDS} rounds without a clean rescue diagnosis result. Last issues: {issue_summary}. Last repair step types: {step_summary}." + ) +} + +pub(crate) fn stalled_plan_error_message(observation: &RepairRoundObservation) -> String { + let issue_summary = + serde_json::to_string(&observation.issue_summaries).unwrap_or_else(|_| "[]".to_string()); + let step_summary = + serde_json::to_string(&observation.step_types).unwrap_or_else(|_| "[]".to_string()); + format!( + "Remote Doctor did not return actionable repair steps by round {} after {} repeated rounds. Last issues: {}. Last repair step types: {}.", + observation.round, + REPAIR_PLAN_STALL_THRESHOLD, + issue_summary, + step_summary + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::commands::{RescuePrimaryDiagnosisResult, RescuePrimaryIssue, RescuePrimarySummary}; + use crate::remote_doctor::types::RepairRoundObservation; + + #[test] + fn repeated_rediagnose_only_rounds_are_detected_as_stalled() { + let diagnosis = sample_diagnosis(vec![RescuePrimaryIssue { + id: "issue-1".to_string(), + code: "invalid.base_url".to_string(), + severity: "medium".to_string(), + message: "Provider base URL is invalid".to_string(), + auto_fixable: true, + fix_hint: Some("Reset baseUrl".to_string()), + source: "config".to_string(), + }]); + let step_types = vec!["doctorRediagnose".to_string()]; + + assert!(!repair_plan_stalled( + &[ + RepairRoundObservation::new(1, &step_types, &diagnosis), + RepairRoundObservation::new(2, &step_types, &diagnosis), + ], + 3, + )); + assert!(repair_plan_stalled( + &[ + RepairRoundObservation::new(1, &step_types, &diagnosis), + RepairRoundObservation::new(2, &step_types, &diagnosis), + RepairRoundObservation::new(3, &step_types, &diagnosis), + ], + 3, + )); + } + + #[test] + fn round_limit_error_message_includes_latest_issues_and_step_types() { + let diagnosis = sample_diagnosis(vec![RescuePrimaryIssue { + id: "issue-1".to_string(), + code: "invalid.base_url".to_string(), + severity: "medium".to_string(), + message: "Provider base URL is invalid".to_string(), + auto_fixable: true, + fix_hint: Some("Reset baseUrl".to_string()), + source: "config".to_string(), + }]); + let error = round_limit_error_message(&diagnosis, &["doctorRediagnose".to_string()]); + assert!(error.contains("invalid.base_url")); + assert!(error.contains("doctorRediagnose")); + assert!(error.contains("Provider base URL is invalid")); + } + + fn sample_diagnosis(issues: Vec) -> RescuePrimaryDiagnosisResult { + RescuePrimaryDiagnosisResult { + status: "degraded".to_string(), + checked_at: "2026-03-19T00:00:00Z".to_string(), + target_profile: "primary".to_string(), + rescue_profile: "rescue".to_string(), + rescue_configured: true, + rescue_port: Some(18789), + summary: RescuePrimarySummary { + status: "degraded".to_string(), + headline: "Issues found".to_string(), + recommended_action: "Repair".to_string(), + fixable_issue_count: 0, + selected_fix_issue_ids: Vec::new(), + root_cause_hypotheses: Vec::new(), + fix_steps: Vec::new(), + confidence: None, + citations: Vec::new(), + version_awareness: None, + }, + sections: Vec::new(), + checks: Vec::new(), + issues, + } + } +} From 1fc10f076440075624c2bfdcc397fc650e2b9001 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 16:07:04 +0800 Subject: [PATCH 16/20] refactor: tighten remote doctor repair limits --- openclaw-gateway-client/Cargo.toml | 2 +- src-tauri/src/remote_doctor/repair_loops/shared.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openclaw-gateway-client/Cargo.toml b/openclaw-gateway-client/Cargo.toml index 86b52423..e2d88a66 100644 --- a/openclaw-gateway-client/Cargo.toml +++ b/openclaw-gateway-client/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openclaw-gateway-client" version = "0.1.0" -edition = "2024" +edition = "2021" [lib] name = "openclaw_gateway_client" diff --git a/src-tauri/src/remote_doctor/repair_loops/shared.rs b/src-tauri/src/remote_doctor/repair_loops/shared.rs index 58e02b36..2bde1bab 100644 --- a/src-tauri/src/remote_doctor/repair_loops/shared.rs +++ b/src-tauri/src/remote_doctor/repair_loops/shared.rs @@ -2,7 +2,7 @@ use serde_json::Value; use super::super::types::{diagnosis_issue_summaries, ClawpalServerPlanStep, RepairRoundObservation}; -pub(crate) const MAX_REMOTE_DOCTOR_ROUNDS: usize = 50; +pub(crate) const MAX_REMOTE_DOCTOR_ROUNDS: usize = 10; pub(crate) const REPAIR_PLAN_STALL_THRESHOLD: usize = 3; pub(crate) fn is_unknown_method_error(error: &str) -> bool { From 9c42faf1c9a5692de7db34f43a70813021157e78 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 16:10:35 +0800 Subject: [PATCH 17/20] fix: remove hardcoded empty diagnosis timestamp --- src-tauri/src/remote_doctor/config.rs | 46 +++++++++++++++++---------- 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/src-tauri/src/remote_doctor/config.rs b/src-tauri/src/remote_doctor/config.rs index 59661de9..3692ecba 100644 --- a/src-tauri/src/remote_doctor/config.rs +++ b/src-tauri/src/remote_doctor/config.rs @@ -15,8 +15,9 @@ use crate::commands::preferences::load_app_preferences_from_paths; use crate::commands::{ diagnose_primary_via_rescue, read_raw_config, remote_diagnose_primary_via_rescue, remote_read_raw_config, remote_restart_gateway, remote_write_raw_config, restart_gateway, - RescuePrimaryDiagnosisResult, + RescuePrimaryDiagnosisResult, RescuePrimarySummary, }; +use crate::commands::version::{format_timestamp_from_unix, unix_timestamp_secs}; use crate::models::resolve_paths; use crate::node_client::GatewayCredentials; use crate::ssh::SshConnectionPool; @@ -161,22 +162,29 @@ pub(crate) fn empty_config_excerpt_context() -> ConfigExcerptContext { } pub(crate) fn empty_diagnosis() -> RescuePrimaryDiagnosisResult { - serde_json::from_value(json!({ - "status": "healthy", - "checkedAt": "2026-03-18T00:00:00Z", - "targetProfile": "primary", - "rescueProfile": "rescue", - "summary": { - "status": "healthy", - "headline": "Healthy", - "recommendedAction": null, - "fixableIssueCount": 0, - "selectedFixIssueIds": [] + RescuePrimaryDiagnosisResult { + status: "healthy".into(), + checked_at: format_timestamp_from_unix(unix_timestamp_secs()), + target_profile: "primary".into(), + rescue_profile: "rescue".into(), + rescue_configured: false, + rescue_port: None, + summary: RescuePrimarySummary { + status: "healthy".into(), + headline: "Healthy".into(), + recommended_action: "No action needed".into(), + fixable_issue_count: 0, + selected_fix_issue_ids: Vec::new(), + root_cause_hypotheses: Vec::new(), + fix_steps: Vec::new(), + confidence: None, + citations: Vec::new(), + version_awareness: None, }, - "issues": [], - "sections": [] - })) - .expect("empty diagnosis should deserialize") + sections: Vec::new(), + checks: Vec::new(), + issues: Vec::new(), + } } pub(crate) fn diagnosis_has_only_non_auto_fixable_issues( @@ -474,6 +482,12 @@ mod tests { .contains("Failed to parse target config")); } + #[test] + fn empty_diagnosis_checked_at_is_not_hardcoded() { + let diagnosis = empty_diagnosis(); + assert_ne!(diagnosis.checked_at, "2026-03-18T00:00:00Z"); + } + #[test] fn diagnosis_missing_rescue_profile_is_detected() { let diagnosis = empty_diagnosis_with_issues(vec![json!({ From 8489d4bdac19f2768214e526eb5cf59ff59557c9 Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Thu, 19 Mar 2026 16:13:19 +0800 Subject: [PATCH 18/20] style: run cargo fmt --- openclaw-gateway-client/src/client.rs | 6 ++-- openclaw-gateway-client/src/identity.rs | 6 ++-- .../tests/client_handshake.rs | 2 +- openclaw-gateway-client/tests/client_rpc.rs | 2 +- openclaw-gateway-client/tests/node_client.rs | 2 +- .../tests/protocol_roundtrip.rs | 2 +- src-tauri/src/remote_doctor/config.rs | 2 +- src-tauri/src/remote_doctor/repair_loops.rs | 4 ++- .../repair_loops/agent_planner.rs | 34 +++++++++++++------ .../repair_loops/clawpal_server.rs | 34 +++++++++++++------ .../repair_loops/legacy_doctor.rs | 6 ++-- .../src/remote_doctor/repair_loops/shared.rs | 4 ++- 12 files changed, 68 insertions(+), 36 deletions(-) diff --git a/openclaw-gateway-client/src/client.rs b/openclaw-gateway-client/src/client.rs index 92147ba8..433fb005 100644 --- a/openclaw-gateway-client/src/client.rs +++ b/openclaw-gateway-client/src/client.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use futures::{SinkExt, StreamExt}; use serde_json::Value; -use tokio::sync::{Mutex, broadcast, oneshot}; +use tokio::sync::{broadcast, oneshot, Mutex}; use tokio::task::JoinHandle; use tokio_tungstenite::{connect_async, tungstenite::Message}; use url::Url; @@ -10,8 +10,8 @@ use uuid::Uuid; use crate::error::Error; use crate::protocol::{ - ClientInfo, ConnectParams, EventFrame, GatewayFrame, HelloOk, PROTOCOL_VERSION, RequestFrame, - ResponseFrame, + ClientInfo, ConnectParams, EventFrame, GatewayFrame, HelloOk, RequestFrame, ResponseFrame, + PROTOCOL_VERSION, }; use crate::tls::normalize_fingerprint; diff --git a/openclaw-gateway-client/src/identity.rs b/openclaw-gateway-client/src/identity.rs index b6efb9f3..4d83fb20 100644 --- a/openclaw-gateway-client/src/identity.rs +++ b/openclaw-gateway-client/src/identity.rs @@ -1,9 +1,9 @@ -use base64::Engine; use base64::engine::general_purpose::URL_SAFE_NO_PAD; -use ed25519_dalek::{Signer, SigningKey}; +use base64::Engine; use ed25519_dalek::{pkcs8::DecodePrivateKey, pkcs8::EncodePrivateKey, pkcs8::EncodePublicKey}; +use ed25519_dalek::{Signer, SigningKey}; use rand_core::OsRng; -use serde_json::{Value, json}; +use serde_json::{json, Value}; use uuid::Uuid; use crate::error::Error; diff --git a/openclaw-gateway-client/tests/client_handshake.rs b/openclaw-gateway-client/tests/client_handshake.rs index 6d2c58bb..f23780e5 100644 --- a/openclaw-gateway-client/tests/client_handshake.rs +++ b/openclaw-gateway-client/tests/client_handshake.rs @@ -1,6 +1,6 @@ use futures::{SinkExt, StreamExt}; use openclaw_gateway_client::client::GatewayClientBuilder; -use serde_json::{Value, json}; +use serde_json::{json, Value}; use tokio::net::TcpListener; use tokio::sync::oneshot; use tokio_tungstenite::{accept_async, tungstenite::Message}; diff --git a/openclaw-gateway-client/tests/client_rpc.rs b/openclaw-gateway-client/tests/client_rpc.rs index f4715646..118efe0c 100644 --- a/openclaw-gateway-client/tests/client_rpc.rs +++ b/openclaw-gateway-client/tests/client_rpc.rs @@ -1,6 +1,6 @@ use futures::{SinkExt, StreamExt}; use openclaw_gateway_client::client::GatewayClientBuilder; -use serde_json::{Value, json}; +use serde_json::{json, Value}; use tokio::net::TcpListener; use tokio::sync::{mpsc, oneshot}; use tokio_tungstenite::{accept_async, tungstenite::Message}; diff --git a/openclaw-gateway-client/tests/node_client.rs b/openclaw-gateway-client/tests/node_client.rs index 4f6b1a4a..63e7752a 100644 --- a/openclaw-gateway-client/tests/node_client.rs +++ b/openclaw-gateway-client/tests/node_client.rs @@ -1,7 +1,7 @@ use futures::{SinkExt, StreamExt}; use openclaw_gateway_client::client::GatewayClientBuilder; use openclaw_gateway_client::node::NodeClient; -use serde_json::{Value, json}; +use serde_json::{json, Value}; use tokio::net::TcpListener; use tokio::sync::oneshot; use tokio_tungstenite::{accept_async, tungstenite::Message}; diff --git a/openclaw-gateway-client/tests/protocol_roundtrip.rs b/openclaw-gateway-client/tests/protocol_roundtrip.rs index 699d8331..70afa53d 100644 --- a/openclaw-gateway-client/tests/protocol_roundtrip.rs +++ b/openclaw-gateway-client/tests/protocol_roundtrip.rs @@ -1,6 +1,6 @@ use openclaw_gateway_client::protocol::{EventFrame, GatewayFrame, RequestFrame, ResponseFrame}; use pretty_assertions::assert_eq; -use serde_json::{Value, json}; +use serde_json::{json, Value}; #[test] fn serializes_request_frame() { diff --git a/src-tauri/src/remote_doctor/config.rs b/src-tauri/src/remote_doctor/config.rs index 3692ecba..a2017fba 100644 --- a/src-tauri/src/remote_doctor/config.rs +++ b/src-tauri/src/remote_doctor/config.rs @@ -12,12 +12,12 @@ use super::types::{ diagnosis_issue_summaries, ConfigExcerptContext, StoredRemoteDoctorIdentity, TargetLocation, }; use crate::commands::preferences::load_app_preferences_from_paths; +use crate::commands::version::{format_timestamp_from_unix, unix_timestamp_secs}; use crate::commands::{ diagnose_primary_via_rescue, read_raw_config, remote_diagnose_primary_via_rescue, remote_read_raw_config, remote_restart_gateway, remote_write_raw_config, restart_gateway, RescuePrimaryDiagnosisResult, RescuePrimarySummary, }; -use crate::commands::version::{format_timestamp_from_unix, unix_timestamp_secs}; use crate::models::resolve_paths; use crate::node_client::GatewayCredentials; use crate::ssh::SshConnectionPool; diff --git a/src-tauri/src/remote_doctor/repair_loops.rs b/src-tauri/src/remote_doctor/repair_loops.rs index 53097e11..44c41ec9 100644 --- a/src-tauri/src/remote_doctor/repair_loops.rs +++ b/src-tauri/src/remote_doctor/repair_loops.rs @@ -26,7 +26,9 @@ use super::legacy::{ }; use super::plan::request_plan; use super::session::append_session_log; -use super::types::{parse_target_location, PlanKind, RemoteDoctorProtocol, RemoteDoctorRepairResult, TargetLocation}; +use super::types::{ + parse_target_location, PlanKind, RemoteDoctorProtocol, RemoteDoctorRepairResult, TargetLocation, +}; use crate::bridge_client::BridgeClient; use crate::commands::logs::log_dev; use crate::node_client::NodeClient; diff --git a/src-tauri/src/remote_doctor/repair_loops/agent_planner.rs b/src-tauri/src/remote_doctor/repair_loops/agent_planner.rs index 090bf7cc..01cd51d1 100644 --- a/src-tauri/src/remote_doctor/repair_loops/agent_planner.rs +++ b/src-tauri/src/remote_doctor/repair_loops/agent_planner.rs @@ -1,10 +1,6 @@ use serde_json::json; use tauri::{AppHandle, Runtime}; -use super::shared::{ - repair_plan_stalled, round_limit_error_message, stalled_plan_error_message, - MAX_REMOTE_DOCTOR_ROUNDS, REPAIR_PLAN_STALL_THRESHOLD, -}; use super::super::agent::next_agent_plan_kind_for_round; use super::super::config::{ append_diagnosis_log, build_config_excerpt_context, config_excerpt_log_summary, @@ -12,7 +8,9 @@ use super::super::config::{ run_rescue_diagnosis, }; use super::super::legacy::request_agent_plan; -use super::super::plan::{agent_plan_step_types, execute_plan_command, plan_command_failure_message}; +use super::super::plan::{ + agent_plan_step_types, execute_plan_command, plan_command_failure_message, +}; use super::super::session::{ append_session_log, emit_session_progress, result_for_completion, result_for_completion_with_warnings, @@ -21,6 +19,10 @@ use super::super::types::{ diagnosis_issue_summaries, CommandResult, PlanKind, RemoteDoctorRepairResult, RepairRoundObservation, TargetLocation, }; +use super::shared::{ + repair_plan_stalled, round_limit_error_message, stalled_plan_error_message, + MAX_REMOTE_DOCTOR_ROUNDS, REPAIR_PLAN_STALL_THRESHOLD, +}; use crate::bridge_client::BridgeClient; use crate::node_client::NodeClient; use crate::ssh::SshConnectionPool; @@ -53,8 +55,9 @@ pub(crate) async fn run_agent_planner_repair_loop( for round in 1..=MAX_REMOTE_DOCTOR_ROUNDS { let kind = next_agent_plan_kind_for_round(&diagnosis, &previous_results); - let config_context = - build_config_excerpt_context(&read_target_config_raw(app, target_location, instance_id).await?); + let config_context = build_config_excerpt_context( + &read_target_config_raw(app, target_location, instance_id).await?, + ); let phase = match kind { PlanKind::Detect => "planning_detect", PlanKind::Investigate => "planning_investigate", @@ -159,10 +162,17 @@ pub(crate) async fn run_agent_planner_repair_loop( }), ); let command_result = - match execute_plan_command(app, pool, target_location, instance_id, &command.argv).await { + match execute_plan_command(app, pool, target_location, instance_id, &command.argv) + .await + { Ok(result) => result, Err(error) => { - return Err(plan_command_failure_message(kind, round, &command.argv, &error)); + return Err(plan_command_failure_message( + kind, + round, + &command.argv, + &error, + )); } }; append_session_log( @@ -219,7 +229,11 @@ pub(crate) async fn run_agent_planner_repair_loop( )); } - round_observations.push(RepairRoundObservation::new(round, &last_step_types, &diagnosis)); + round_observations.push(RepairRoundObservation::new( + round, + &last_step_types, + &diagnosis, + )); if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { let observation = round_observations .last() diff --git a/src-tauri/src/remote_doctor/repair_loops/clawpal_server.rs b/src-tauri/src/remote_doctor/repair_loops/clawpal_server.rs index 9e002df9..ecf02599 100644 --- a/src-tauri/src/remote_doctor/repair_loops/clawpal_server.rs +++ b/src-tauri/src/remote_doctor/repair_loops/clawpal_server.rs @@ -1,10 +1,6 @@ use serde_json::json; use tauri::{AppHandle, Runtime}; -use super::shared::{ - clawpal_server_step_type_summary, repair_plan_stalled, round_limit_error_message, - stalled_plan_error_message, MAX_REMOTE_DOCTOR_ROUNDS, REPAIR_PLAN_STALL_THRESHOLD, -}; use super::super::config::{ append_diagnosis_log, build_config_excerpt_context, config_excerpt_log_summary, diagnosis_is_healthy, read_target_config_raw, restart_target_gateway, run_rescue_diagnosis, @@ -20,6 +16,10 @@ use super::super::types::{ diagnosis_issue_summaries, CommandResult, PlanKind, RemoteDoctorProtocol, RemoteDoctorRepairResult, RepairRoundObservation, TargetLocation, }; +use super::shared::{ + clawpal_server_step_type_summary, repair_plan_stalled, round_limit_error_message, + stalled_plan_error_message, MAX_REMOTE_DOCTOR_ROUNDS, REPAIR_PLAN_STALL_THRESHOLD, +}; use crate::node_client::NodeClient; pub(crate) async fn run_clawpal_server_repair_loop( @@ -65,8 +65,9 @@ pub(crate) async fn run_clawpal_server_repair_loop( Some(PlanKind::Repair), None, ); - let config_context = - build_config_excerpt_context(&read_target_config_raw(app, target_location, instance_id).await?); + let config_context = build_config_excerpt_context( + &read_target_config_raw(app, target_location, instance_id).await?, + ); append_session_log( session_id, json!({ @@ -149,7 +150,10 @@ pub(crate) async fn run_clawpal_server_repair_loop( result.stdout = format!("Updated {path}"); } "configUnset" => { - let path = step.path.as_deref().ok_or("configUnset step missing path")?; + let path = step + .path + .as_deref() + .ok_or("configUnset step missing path")?; emit_session_progress( Some(app), session_id, @@ -179,8 +183,11 @@ pub(crate) async fn run_clawpal_server_repair_loop( append_diagnosis_log(session_id, "post_step_rediagnose", round, &diagnosis); rediagnosed = true; result.argv = vec!["doctorRediagnose".into()]; - result.stdout = - format!("Diagnosis status={} issues={}", diagnosis.status, diagnosis.issues.len()); + result.stdout = format!( + "Diagnosis status={} issues={}", + diagnosis.status, + diagnosis.issues.len() + ); } other => { result.exit_code = Some(1); @@ -212,7 +219,8 @@ pub(crate) async fn run_clawpal_server_repair_loop( diagnosis = run_rescue_diagnosis(app, target_location, instance_id).await?; append_diagnosis_log(session_id, "post_round", round, &diagnosis); } - if super::super::agent::protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) { + if super::super::agent::protocol_runs_rescue_preflight(RemoteDoctorProtocol::ClawpalServer) + { repair_rescue_gateway_if_needed( app, session_id, @@ -224,7 +232,11 @@ pub(crate) async fn run_clawpal_server_repair_loop( .await?; } last_step_types = round_step_types.clone(); - round_observations.push(RepairRoundObservation::new(round, &round_step_types, &diagnosis)); + round_observations.push(RepairRoundObservation::new( + round, + &round_step_types, + &diagnosis, + )); if repair_plan_stalled(&round_observations, REPAIR_PLAN_STALL_THRESHOLD) { let observation = round_observations .last() diff --git a/src-tauri/src/remote_doctor/repair_loops/legacy_doctor.rs b/src-tauri/src/remote_doctor/repair_loops/legacy_doctor.rs index 0fd2be13..37de1e6c 100644 --- a/src-tauri/src/remote_doctor/repair_loops/legacy_doctor.rs +++ b/src-tauri/src/remote_doctor/repair_loops/legacy_doctor.rs @@ -1,10 +1,12 @@ use serde_json::json; use tauri::{AppHandle, Runtime}; -use super::shared::MAX_REMOTE_DOCTOR_ROUNDS; use super::super::plan::execute_command; use super::super::session::{append_session_log, emit_session_progress}; -use super::super::types::{CommandResult, PlanKind, PlanResponse, RemoteDoctorRepairResult, TargetLocation}; +use super::super::types::{ + CommandResult, PlanKind, PlanResponse, RemoteDoctorRepairResult, TargetLocation, +}; +use super::shared::MAX_REMOTE_DOCTOR_ROUNDS; use crate::ssh::SshConnectionPool; pub(crate) async fn run_remote_doctor_repair_loop( diff --git a/src-tauri/src/remote_doctor/repair_loops/shared.rs b/src-tauri/src/remote_doctor/repair_loops/shared.rs index 2bde1bab..d5d6780e 100644 --- a/src-tauri/src/remote_doctor/repair_loops/shared.rs +++ b/src-tauri/src/remote_doctor/repair_loops/shared.rs @@ -1,6 +1,8 @@ use serde_json::Value; -use super::super::types::{diagnosis_issue_summaries, ClawpalServerPlanStep, RepairRoundObservation}; +use super::super::types::{ + diagnosis_issue_summaries, ClawpalServerPlanStep, RepairRoundObservation, +}; pub(crate) const MAX_REMOTE_DOCTOR_ROUNDS: usize = 10; pub(crate) const REPAIR_PLAN_STALL_THRESHOLD: usize = 3; From 63224d8088b0a9e4d9c3ee7a1e322be4a6bbe1ee Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Fri, 20 Mar 2026 15:23:21 +0800 Subject: [PATCH 19/20] chore: sync github workflows from origin develop --- .github/workflows/coverage.yml | 12 ++++ .github/workflows/fork-pr-comment.yml | 87 +++++++++++++++++++++++++++ .github/workflows/home-perf-e2e.yml | 33 ++++++++-- .github/workflows/metrics.yml | 77 ++++++++++++++++++------ .github/workflows/screenshot.yml | 16 +++-- 5 files changed, 198 insertions(+), 27 deletions(-) create mode 100644 .github/workflows/fork-pr-comment.yml diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 2bf4368e..966847b8 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -113,11 +113,22 @@ jobs: f.write('body< + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.head_repository.full_name != github.repository + runs-on: ubuntu-latest + steps: + - name: Determine artifact name + id: meta + run: | + WF="${{ github.event.workflow_run.name }}" + if [ "$WF" = "Coverage" ]; then + echo "artifact=coverage-comment" >> "$GITHUB_OUTPUT" + echo "marker=" >> "$GITHUB_OUTPUT" + elif [ "$WF" = "Metrics Gate" ]; then + echo "artifact=metrics-comment" >> "$GITHUB_OUTPUT" + echo "marker=" >> "$GITHUB_OUTPUT" + else + echo "Unknown workflow: $WF" + exit 1 + fi + + - name: Get PR number + id: pr + uses: actions/github-script@v7 + with: + script: | + const result = await github.rest.pulls.list({ + owner: context.repo.owner, + repo: context.repo.repo, + state: 'open', + head: `${context.payload.workflow_run.head_repository.owner.login}:${context.payload.workflow_run.head_branch}`, + }); + if (result.data.length > 0) { + core.setOutput('number', result.data[0].number); + } else { + core.setFailed('Could not find PR for this workflow run'); + } + + - name: Download artifact + id: download + continue-on-error: true + uses: actions/download-artifact@v4 + with: + name: ${{ steps.meta.outputs.artifact }} + run-id: ${{ github.event.workflow_run.id }} + github-token: ${{ secrets.GITHUB_TOKEN }} + path: /tmp/comment + + - name: Find comment file + if: steps.download.outcome == 'success' + id: file + run: | + FILE=$(find /tmp/comment -name '*.md' | head -1) + echo "path=${FILE}" >> "$GITHUB_OUTPUT" + + - name: Find existing comment + if: steps.download.outcome == 'success' + uses: peter-evans/find-comment@v3 + id: fc + with: + issue-number: ${{ steps.pr.outputs.number }} + comment-author: 'github-actions[bot]' + body-includes: ${{ steps.meta.outputs.marker }} + + - name: Create or update comment + if: steps.download.outcome == 'success' + uses: peter-evans/create-or-update-comment@v4 + with: + comment-id: ${{ steps.fc.outputs.comment-id }} + issue-number: ${{ steps.pr.outputs.number }} + body-path: ${{ steps.file.outputs.path }} + edit-mode: replace diff --git a/.github/workflows/home-perf-e2e.yml b/.github/workflows/home-perf-e2e.yml index b0673732..119e2f61 100644 --- a/.github/workflows/home-perf-e2e.yml +++ b/.github/workflows/home-perf-e2e.yml @@ -36,16 +36,39 @@ jobs: - name: Start container run: | - docker run -d --name oc-perf -p 2299:22 clawpal-perf-e2e + docker run -d --name oc-perf -p 2299:22 -p 18789:18790 clawpal-perf-e2e for i in $(seq 1 15); do sshpass -p clawpal-perf-e2e ssh -o StrictHostKeyChecking=no -p 2299 root@localhost echo ok 2>/dev/null && break sleep 1 done + # Wait for OpenClaw gateway HTTP API (port 18789 exposed to host) + for i in $(seq 1 60); do + GW=$(curl -sf http://localhost:18789/ 2>/dev/null || true) + if [ -n "$GW" ]; then echo "Gateway HTTP ready after ${i}s"; break; fi + sleep 1 + done + # Wait for gateway API to be fully ready (not just dashboard) + for j in $(seq 1 30); do + API=$(curl -sf http://localhost:18789/api/status 2>/dev/null || true) + if [ -n "$API" ]; then echo "Gateway API ready after additional ${j}s"; break; fi + sleep 1 + done - - name: Extract fixtures from container - run: node tests/e2e/perf/extract-fixtures.mjs + - name: Start IPC bridge server + run: | + node tests/e2e/perf/ipc-bridge-server.mjs & + # Wait for bridge to be ready + for i in $(seq 1 60); do + RESP=$(curl -s http://localhost:3399/invoke -X POST -H 'Content-Type: application/json' -d '{"cmd":"get_instance_runtime_snapshot","args":{}}' 2>/dev/null || true) + if echo "$RESP" | jq -e '.ok == true and .result != null' > /dev/null 2>&1; then break; fi + sleep 1 + done + # Verify an SSH-backed command returned real data (get_status_extra calls openclaw --version via SSH) + VERIFY=$(curl -sf http://localhost:3399/invoke -X POST -H 'Content-Type: application/json' -d '{"cmd":"get_status_extra","args":{}}') || { echo "Bridge readiness check failed: SSH-backed command errored"; exit 1; } + echo "$VERIFY" | jq -e '.ok == true and .result.openclawVersion != null and .result.openclawVersion != "unknown"' || { echo "Bridge readiness check failed: SSH did not return a valid openclaw version"; exit 1; } env: CLAWPAL_PERF_SSH_PORT: "2299" + PERF_SETTLED_GATE_MS: "500" - name: Start Vite dev server run: | @@ -58,8 +81,8 @@ jobs: - name: Run render probe E2E run: npx playwright test --config tests/e2e/perf/playwright.config.mjs env: - PERF_MOCK_LATENCY_MS: "50" - PERF_SETTLED_GATE_MS: "5000" + PERF_BRIDGE_URL: "http://localhost:3399" + PERF_SETTLED_GATE_MS: "500" - name: Ensure report exists if: always() diff --git a/.github/workflows/metrics.yml b/.github/workflows/metrics.yml index 68a234e8..f43e3d09 100644 --- a/.github/workflows/metrics.yml +++ b/.github/workflows/metrics.yml @@ -274,9 +274,21 @@ jobs: - name: Start SSH container run: | - docker run -d --name oc-remote-perf -p 2299:22 clawpal-perf-e2e + docker run -d --name oc-remote-perf -p 2298:22 clawpal-perf-e2e for i in $(seq 1 15); do - sshpass -p clawpal-perf-e2e ssh -o StrictHostKeyChecking=no -p 2299 root@localhost echo ok 2>/dev/null && break + sshpass -p clawpal-perf-e2e ssh -o StrictHostKeyChecking=no -p 2298 root@localhost echo ok 2>/dev/null && break + sleep 1 + done + # Wait for OpenClaw gateway HTTP API (port 18789 exposed to host) + for i in $(seq 1 60); do + GW=$(curl -sf http://localhost:18789/ 2>/dev/null || true) + if [ -n "$GW" ]; then echo "Gateway HTTP ready after ${i}s"; break; fi + sleep 1 + done + # Wait for gateway API to be fully ready (not just dashboard) + for j in $(seq 1 30); do + API=$(curl -sf http://localhost:18789/api/status 2>/dev/null || true) + if [ -n "$API" ]; then echo "Gateway API ready after additional ${j}s"; break; fi sleep 1 done @@ -287,7 +299,7 @@ jobs: SSH_FAIL=0 # SSH transport failures (exit 255) CMD_FAIL_COUNT=0 # remote commands that ran but returned non-zero TOTAL_RUNS=0 - SSH="sshpass -p clawpal-perf-e2e ssh -o StrictHostKeyChecking=no -p 2299 root@localhost" + SSH="sshpass -p clawpal-perf-e2e ssh -o StrictHostKeyChecking=no -p 2298 root@localhost" # Exercise remote OpenClaw commands and measure timing CMDS=( @@ -377,16 +389,38 @@ jobs: - name: Start container (reuses image from remote perf step) run: | - docker run -d --name oc-perf -p 2299:22 clawpal-perf-e2e + docker run -d --name oc-perf -p 2299:22 -p 18789:18790 clawpal-perf-e2e for i in $(seq 1 15); do sshpass -p clawpal-perf-e2e ssh -o StrictHostKeyChecking=no -p 2299 root@localhost echo ok 2>/dev/null && break sleep 1 done + # Wait for OpenClaw gateway HTTP API (port 18789 exposed to host) + for i in $(seq 1 60); do + GW=$(curl -sf http://localhost:18789/ 2>/dev/null || true) + if [ -n "$GW" ]; then echo "Gateway HTTP ready after ${i}s"; break; fi + sleep 1 + done + # Wait for gateway API to be fully ready (not just dashboard) + for j in $(seq 1 30); do + API=$(curl -sf http://localhost:18789/api/status 2>/dev/null || true) + if [ -n "$API" ]; then echo "Gateway API ready after additional ${j}s"; break; fi + sleep 1 + done - - name: Extract fixtures from container - run: node tests/e2e/perf/extract-fixtures.mjs + - name: Start IPC bridge server + run: | + node tests/e2e/perf/ipc-bridge-server.mjs & + for i in $(seq 1 60); do + RESP=$(curl -s http://localhost:3399/invoke -X POST -H 'Content-Type: application/json' -d '{"cmd":"get_instance_runtime_snapshot","args":{}}' 2>/dev/null || true) + if echo "$RESP" | jq -e '.ok == true and .result != null' > /dev/null 2>&1; then break; fi + sleep 1 + done + # Verify SSH-backed data is available + VERIFY=$(curl -s http://localhost:3399/invoke -X POST -H 'Content-Type: application/json' -d '{"cmd":"get_instance_runtime_snapshot","args":{}}' || true) + echo "$VERIFY" | jq -e '.ok == true and .result != null' || { echo "Bridge readiness failed"; exit 1; } env: CLAWPAL_PERF_SSH_PORT: "2299" + PERF_SETTLED_GATE_MS: "15000" - name: Start Vite dev server run: | @@ -426,8 +460,8 @@ jobs: echo "pass=true" >> "$GITHUB_OUTPUT" fi env: - PERF_MOCK_LATENCY_MS: "50" - PERF_SETTLED_GATE_MS: "5000" + PERF_BRIDGE_URL: "http://localhost:3399" + PERF_SETTLED_GATE_MS: "15000" - name: Cleanup container if: always() @@ -466,7 +500,7 @@ jobs: OVERALL="❌ Some gates failed"; GATE_FAIL=1 fi for PROBE_VAL in "${{ steps.home_perf.outputs.status_ms }}" "${{ steps.home_perf.outputs.version_ms }}" "${{ steps.home_perf.outputs.agents_ms }}" "${{ steps.home_perf.outputs.models_ms }}"; do - if [ "$PROBE_VAL" != "N/A" ] && [ "$PROBE_VAL" -gt 200 ] 2>/dev/null; then + if [ "$PROBE_VAL" != "N/A" ] && [ "$PROBE_VAL" -gt 500 ] 2>/dev/null; then OVERALL="❌ Some gates failed"; GATE_FAIL=1 fi done @@ -475,7 +509,7 @@ jobs: fi BUNDLE_ICON=$( [ "${{ steps.bundle_size.outputs.pass }}" = "true" ] && echo "✅" || echo "❌" ) - MOCK_LATENCY="${{ env.PERF_MOCK_LATENCY_MS || '50' }}" + MOCK_LATENCY="N/A" COMMIT_ICON=$( [ "${{ steps.commit_size.outputs.fail }}" = "0" ] && echo "✅" || echo "❌" ) cat > /tmp/metrics_comment.md << COMMENTEOF @@ -507,7 +541,7 @@ jobs: | Tests | ${{ steps.perf_tests.outputs.passed }} passed, ${{ steps.perf_tests.outputs.failed }} failed | 0 failures | $( [ "${{ steps.perf_tests.outputs.failed }}" = "0" ] && echo "✅" || echo "❌" ) | | RSS (test process) | ${{ steps.perf_tests.outputs.rss_mb }} MB | ≤ 20 MB | $( echo "${{ steps.perf_tests.outputs.rss_mb }}" | awk '{print ($1 <= 80) ? "✅" : "❌"}' ) | | VMS (test process) | ${{ steps.perf_tests.outputs.vms_mb }} MB | — | ℹ️ | - | Command P50 latency | ${{ steps.perf_tests.outputs.cmd_p50_us }} µs | ≤ 1000 µs | $( echo "${{ steps.perf_tests.outputs.cmd_p50_us }}" | awk '{print ($1 != "N/A" && $1 <= 1000) ? "✅" : "❌"}' ) | + | Command P50 latency | ${{ steps.perf_tests.outputs.cmd_p50_us }} µs | ≤ 1000 µs | $( echo "${{ steps.perf_tests.outputs.cmd_p50_us }}" | awk '{print ($1 != "N/A" && $1 <= 500) ? "✅" : "❌"}' ) | | Command P95 latency | ${{ steps.perf_tests.outputs.cmd_p95_us }} µs | ≤ 5000 µs | $( echo "${{ steps.perf_tests.outputs.cmd_p95_us }}" | awk '{print ($1 != "N/A" && $1 <= 5000) ? "✅" : "❌"}' ) | | Command max latency | ${{ steps.perf_tests.outputs.cmd_max_us }} µs | ≤ 50000 µs | $( echo "${{ steps.perf_tests.outputs.cmd_max_us }}" | awk '{print ($1 != "N/A" && $1 <= 50000) ? "✅" : "❌"}' ) | @@ -542,15 +576,15 @@ jobs: - ### Home Page Render Probes (mock IPC ${MOCK_LATENCY}ms, cache-first render) $( [ "${{ steps.home_perf.outputs.pass }}" = "true" ] && echo "✅" || echo "❌" ) + ### Home Page Render Probes (real IPC) $( [ "${{ steps.home_perf.outputs.pass }}" = "true" ] && echo "✅" || echo "❌" ) | Probe | Value | Limit | Status | |-------|-------|-------|--------| - | status | ${{ steps.home_perf.outputs.status_ms }} ms | ≤ 200 ms | $( echo "${{ steps.home_perf.outputs.status_ms }}" | awk '{print ($1 != "N/A" && $1 <= 200) ? "✅" : "❌"}' ) | - | version | ${{ steps.home_perf.outputs.version_ms }} ms | ≤ 200 ms | $( echo "${{ steps.home_perf.outputs.version_ms }}" | awk '{print ($1 != "N/A" && $1 <= 200) ? "✅" : "❌"}' ) | - | agents | ${{ steps.home_perf.outputs.agents_ms }} ms | ≤ 200 ms | $( echo "${{ steps.home_perf.outputs.agents_ms }}" | awk '{print ($1 != "N/A" && $1 <= 200) ? "✅" : "❌"}' ) | - | models | ${{ steps.home_perf.outputs.models_ms }} ms | ≤ 300 ms | $( echo "${{ steps.home_perf.outputs.models_ms }}" | awk '{print ($1 != "N/A" && $1 <= 300) ? "✅" : "❌"}' ) | - | settled | ${{ steps.home_perf.outputs.settled_ms }} ms | ≤ 1000 ms | $( echo "${{ steps.home_perf.outputs.settled_ms }}" | awk '{print ($1 != "N/A" && $1 <= 1000) ? "✅" : "❌"}' ) | + | status | ${{ steps.home_perf.outputs.status_ms }} ms | ≤ 500 ms | $( echo "${{ steps.home_perf.outputs.status_ms }}" | awk '{print ($1 != "N/A" && $1 <= 500) ? "✅" : "❌"}' ) | + | version | ${{ steps.home_perf.outputs.version_ms }} ms | ≤ 500 ms | $( echo "${{ steps.home_perf.outputs.version_ms }}" | awk '{print ($1 != "N/A" && $1 <= 500) ? "✅" : "❌"}' ) | + | agents | ${{ steps.home_perf.outputs.agents_ms }} ms | ≤ 500 ms | $( echo "${{ steps.home_perf.outputs.agents_ms }}" | awk '{print ($1 != "N/A" && $1 <= 500) ? "✅" : "❌"}' ) | + | models | ${{ steps.home_perf.outputs.models_ms }} ms | ≤ 500 ms | $( echo "${{ steps.home_perf.outputs.models_ms }}" | awk '{print ($1 != "N/A" && $1 <= 500) ? "✅" : "❌"}' ) | + | settled | ${{ steps.home_perf.outputs.settled_ms }} ms | ≤ 500 ms | $( echo "${{ steps.home_perf.outputs.settled_ms }}" | awk '{print ($1 != "N/A" && $1 <= 500) ? "✅" : "❌"}' ) | ### Code Readability @@ -569,9 +603,17 @@ jobs: echo "gate_fail=${GATE_FAIL}" >> "$GITHUB_OUTPUT" + - name: Save comment as artifact (for fork PRs) + uses: actions/upload-artifact@v4 + with: + name: metrics-comment + path: /tmp/metrics_comment.md + retention-days: 1 + - name: Find existing metrics comment uses: peter-evans/find-comment@v3 id: fc + if: github.event.pull_request.head.repo.full_name == github.repository with: issue-number: ${{ github.event.pull_request.number }} comment-author: 'github-actions[bot]' @@ -579,6 +621,7 @@ jobs: - name: Create or update metrics comment uses: peter-evans/create-or-update-comment@v4 + if: github.event.pull_request.head.repo.full_name == github.repository with: comment-id: ${{ steps.fc.outputs.comment-id }} issue-number: ${{ github.event.pull_request.number }} diff --git a/.github/workflows/screenshot.yml b/.github/workflows/screenshot.yml index 310ebc5e..36274e58 100644 --- a/.github/workflows/screenshot.yml +++ b/.github/workflows/screenshot.yml @@ -22,7 +22,8 @@ jobs: steps: - uses: actions/checkout@v4 with: - ref: ${{ github.head_ref }} + repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }} + ref: ${{ github.event.pull_request.head.ref || github.ref }} fetch-depth: 0 - name: Build screenshot Docker image @@ -51,16 +52,20 @@ jobs: # Push screenshots to a ref so we can embed them in the PR comment - name: Push screenshots to ref + if: github.event.pull_request.head.repo.full_name == github.repository id: push_ref run: | REF_NAME="screenshots/pr-${{ github.event.pull_request.number || 'manual' }}" git config user.name "github-actions[bot]" git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + # Save screenshots before clearing the worktree + cp -r screenshots /tmp/pr-screenshots + # Create orphan branch with only screenshots git checkout --orphan "${REF_NAME}" git rm -rf . > /dev/null 2>&1 || true - cp -r screenshots/* . + cp -r /tmp/pr-screenshots/* . git add -A git commit -m "Screenshots for ${{ github.sha }}" --allow-empty git push origin "${REF_NAME}" --force @@ -69,9 +74,10 @@ jobs: echo "sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT" # Return to PR branch - git checkout "${{ github.head_ref }}" 2>/dev/null || git checkout "${{ github.sha }}" + git checkout "${{ github.event.pull_request.head.ref }}" 2>/dev/null || git checkout "${{ github.sha }}" - name: Generate PR comment body + if: github.event.pull_request.head.repo.full_name == github.repository id: comment run: | REF="${{ steps.push_ref.outputs.ref }}" @@ -143,7 +149,7 @@ jobs: - name: Find existing screenshot comment uses: peter-evans/find-comment@v3 id: fc - if: github.event_name == 'pull_request' + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository with: issue-number: ${{ github.event.pull_request.number }} comment-author: 'github-actions[bot]' @@ -151,7 +157,7 @@ jobs: - name: Create or update screenshot comment uses: peter-evans/create-or-update-comment@v4 - if: github.event_name == 'pull_request' + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository with: comment-id: ${{ steps.fc.outputs.comment-id }} issue-number: ${{ github.event.pull_request.number }} From bb04d7e376d93ec5d24a4e06a7cddb8c6c24c48d Mon Sep 17 00:00:00 2001 From: zzhengzhuo015 Date: Tue, 24 Mar 2026 17:40:21 +0800 Subject: [PATCH 20/20] feat: improve remote doctor bootstrap and diagnostics --- agents.md | 117 +++++++++- .../2026-03-24-clawpal-server-url-design.md | 68 ++++++ .../2026-03-24-clawpal-server-url-plan.md | 100 +++++++++ ...4-remote-doctor-bootstrap-script-design.md | 94 ++++++++ ...-24-remote-doctor-bootstrap-script-plan.md | 119 ++++++++++ ...emote-doctor-connect-diagnostics-design.md | 57 +++++ ...-remote-doctor-connect-diagnostics-plan.md | 112 ++++++++++ scripts/README.md | 2 + scripts/remote-doctor-bootstrap.sh | 208 ++++++++++++++++++ src-tauri/src/commands/app_logs.rs | 2 - src-tauri/src/commands/preferences.rs | 64 ++++++ src-tauri/src/commands/recipe_cmds.rs | 2 - src-tauri/src/commands/upgrade.rs | 2 - src-tauri/src/commands/util.rs | 2 - src-tauri/src/commands/watchdog_cmds.rs | 2 - src-tauri/src/lib.rs | 4 +- src-tauri/src/node_client.rs | 68 +++++- src-tauri/src/remote_doctor/config.rs | 26 +-- src-tauri/src/remote_doctor/repair_loops.rs | 12 +- src-tauri/src/remote_doctor/session.rs | 50 +++++ .../tests/remote_doctor_bootstrap_script.rs | 176 +++++++++++++++ src/components/SettingsAlphaFeaturesCard.tsx | 63 ++---- .../SettingsAlphaFeaturesCard.test.tsx | 14 +- src/lib/__tests__/invite-code.test.ts | 93 ++++++++ src/lib/__tests__/remote-doctor-error.test.ts | 25 +++ src/lib/__tests__/use-api-extra.test.ts | 8 + src/lib/api.ts | 4 + src/lib/invite-code.ts | 83 +++++++ src/lib/remote-doctor-error.ts | 24 ++ src/lib/use-api.ts | 3 + src/locales/en.json | 15 +- src/locales/zh.json | 15 +- src/pages/Doctor.tsx | 48 +--- src/pages/Settings.tsx | 108 ++++++--- 34 files changed, 1613 insertions(+), 177 deletions(-) create mode 100644 docs/plans/2026-03-24-clawpal-server-url-design.md create mode 100644 docs/plans/2026-03-24-clawpal-server-url-plan.md create mode 100644 docs/plans/2026-03-24-remote-doctor-bootstrap-script-design.md create mode 100644 docs/plans/2026-03-24-remote-doctor-bootstrap-script-plan.md create mode 100644 docs/plans/2026-03-24-remote-doctor-connect-diagnostics-design.md create mode 100644 docs/plans/2026-03-24-remote-doctor-connect-diagnostics-plan.md create mode 100755 scripts/remote-doctor-bootstrap.sh create mode 100644 src-tauri/tests/remote_doctor_bootstrap_script.rs create mode 100644 src/lib/__tests__/invite-code.test.ts create mode 100644 src/lib/__tests__/remote-doctor-error.test.ts create mode 100644 src/lib/invite-code.ts create mode 100644 src/lib/remote-doctor-error.ts diff --git a/agents.md b/agents.md index f061a817..822c690a 100644 --- a/agents.md +++ b/agents.md @@ -1,2 +1,115 @@ - -Moved to [`AGENTS.md`](AGENTS.md). +# AGENTS.md + +ClawPal 是基于 Tauri 的 OpenClaw 桌面伴侣应用,覆盖安装、配置、Doctor 诊断、版本回滚、远程 SSH 管理和多平台打包发布。 + +技术栈:Tauri v2 + Rust + React + TypeScript + Bun + +## 目录说明 + +``` +src/ # 前端(React/TypeScript) +src/lib/api.ts # 前端对 Tauri command 的统一封装 +src-tauri/src/commands/ # Tauri command 层(参数校验、权限检查、错误映射) +src-tauri/src/commands/mod.rs # Command 路由与公共逻辑 +clawpal-core/ # 核心业务逻辑(与 Tauri 解耦) +clawpal-cli/ # CLI 接口 +docs/architecture/ # 模块边界、分层原则、核心数据流 +docs/decisions/ # 关键设计决策(ADR) +docs/plans/ # 任务计划与实施方案 +docs/runbooks/ # 启动、调试、发布、回滚、故障处理 +docs/testing/ # 测试矩阵与验证策略 +harness/fixtures/ # 最小稳定测试数据 +harness/artifacts/ # 日志、截图、trace、失败产物收集 +Makefile # 统一命令入口 +``` + +## 启动命令 + +本项目使用 `Makefile` 作为统一命令入口(无需额外安装,macOS/Linux 自带 `make`): + +```bash +make install # 安装前端依赖 +make dev # 启动开发模式(前端 + Tauri) +make dev-frontend # 仅启动前端 +make test-unit # 运行所有单元测试(前端 + Rust) +make lint # 运行所有 lint(TypeScript + Rust fmt + clippy) +make fmt # 自动修复 Rust 格式 +make build # 构建 Tauri 应用(debug) +make ci # 本地运行完整 CI 检查 +make doctor # 检查开发环境依赖 +``` + +完整命令列表:`make help` + +底层命令(不使用 make 时): + +```bash +bun install # 安装前端依赖 +bun run dev:tauri # 启动开发模式(前端 + Tauri) +bun run dev # 仅启动前端 +cargo test --workspace # Rust 单元测试 +bun test # 前端单元测试 +bun run typecheck # TypeScript 类型检查 +cargo fmt --check # Rust 格式检查 +cargo clippy # Rust lint +``` + +## 代码分层约束 + +### UI 层 (`src/`) +- 不直接在组件中使用 `invoke("xxx")`,通过 `src/lib/api.ts` 封装调用 +- 不直接访问原生能力 +- 不拼接 command 名称和错误字符串 + +### Command 层 (`src-tauri/src/commands/`) +- 保持薄层:参数校验、权限检查、错误映射、事件分发 +- 不堆积业务编排逻辑 +- 不直接写文件系统或数据库 + +### Domain 层 (`clawpal-core/`) +- 核心业务规则和用例编排 +- 尽量不依赖 `tauri::*` +- 输入输出保持普通 Rust 类型 + +### Adapter 层 +- 所有原生副作用(文件系统、shell、通知、剪贴板、updater)从 adapter 层进入 +- 须提供测试替身(mock/fake) + +## 提交与 PR 要求 + +- Conventional Commits: `feat:` / `fix:` / `docs:` / `refactor:` / `chore:` +- 分支命名: `feat/*` / `fix/*` / `chore/*` +- PR 变更建议 ≤ 500 行(不含自动生成文件) +- PR 必须通过所有 CI gate +- 涉及 UI 改动须附截图 +- 涉及权限/安全改动须附 capability 变更说明 + +## 新增 Command 检查清单 + +- [ ] Command 定义在 `src-tauri/src/commands/` 对应模块 +- [ ] 参数校验和错误映射完整 +- [ ] 已在 `lib.rs` 的 `invoke_handler!` 中注册 +- [ ] 前端 API 封装已更新 +- [ ] 相关文档已更新 + +## 安全约束 + +- 禁止提交明文密钥或配置路径泄露 +- Command 白名单制,新增原生能力必须补文档和验证 +- 对 `~/.openclaw` 的读写需包含异常回退和用户可见提示 +- 默认最小权限原则 + +## 常见排查路径 + +- **Command 调用失败** → 见 `docs/runbooks/command-debugging.md` +- **本地开发启动** → 见 `docs/runbooks/local-development.md` +- **版本发布** → 见 `docs/runbooks/release-process.md` +- **打包后行为与 dev 不一致** → 检查资源路径、权限配置、签名、窗口事件 +- **跨平台差异** → 检查 adapter 层平台分支和 CI 构建日志 + +## 参考文档 + +- [Harness Engineering 标准](https://github.com/lay2dev/clawpal/issues/123) +- [落地计划](docs/plans/2026-03-16-harness-engineering-standard.md) +- [架构设计](docs/architecture/design.md) +- [测试矩阵](docs/testing/business-flow-test-matrix.md) diff --git a/docs/plans/2026-03-24-clawpal-server-url-design.md b/docs/plans/2026-03-24-clawpal-server-url-design.md new file mode 100644 index 00000000..dee8407b --- /dev/null +++ b/docs/plans/2026-03-24-clawpal-server-url-design.md @@ -0,0 +1,68 @@ +# ClawPal Server URL Default Update Design + +**Date:** 2026-03-24 + +**Goal:** Update ClawPal's default clawpal-server endpoints from `127.0.0.1:3000` to `65.21.45.43:3040` without changing how user-saved overrides work. + +## Scope + +- Update the default Remote Doctor websocket gateway URL to `ws://65.21.45.43:3040/ws`. +- Update the default invite-code exchange base URL to `http://65.21.45.43:3040`. +- Update frontend fallback logic, UI placeholder text, and related tests so the app surface stays consistent. + +## Non-Goals + +- No new settings fields. +- No runtime detection or environment-based switching. +- No refactor to shared cross-language constants in this change. + +## Approach Options + +### Option 1: Backend-only hardcoded update + +Change only the Rust defaults used by Remote Doctor and invite exchange. + +**Pros:** Smallest code diff. +**Cons:** Frontend placeholders, fallback URL derivation, and tests would still point at the old address. + +### Option 2: Unified default update across backend and frontend + +Change every default/fallback/reference that currently treats `127.0.0.1:3000` as the clawpal-server default. + +**Pros:** UI text, fallback behavior, logs, and tests all stay aligned. +**Cons:** Slightly broader edit set. + +### Option 3: Shared config abstraction + +Introduce a shared constant/config layer for the default URL family. + +**Pros:** Cleaner long-term maintenance. +**Cons:** Unnecessary refactor for a one-address change. + +## Recommended Design + +Use **Option 2**. + +The Rust Remote Doctor config should keep ignoring any saved gateway URL override for the current fixed server path behavior, but the fixed websocket constant should move to `ws://65.21.45.43:3040/ws`. + +The invite exchange flow should move its fixed HTTP endpoint to `http://65.21.45.43:3040/api-keys/exchange`, and the frontend fallback helper should derive `http://65.21.45.43:3040` whenever no gateway URL is provided or parsing fails. + +Settings copy and placeholder text should show the new websocket endpoint so users see the same default the app actually uses. Any logging payloads that embed the old default URL should be updated too. + +## Data Flow + +1. Remote Doctor repair loads the fixed gateway URL from Rust config. +2. Invite-code exchange posts to the fixed HTTP exchange endpoint in Rust. +3. Frontend fallback logic uses the new HTTP base URL when the gateway URL is blank or invalid. +4. Settings screen examples and logging reflect the same websocket default. + +## Error Handling + +- Existing blank-input and invalid invite-code handling stays unchanged. +- Invalid custom gateway URLs in the frontend should continue to fall back to the default HTTP base URL, now pointing at `65.21.45.43:3040`. + +## Testing + +- Update the Rust unit test that asserts the fixed Remote Doctor gateway URL. +- Update the frontend invite-code tests to assert the new default HTTP base URL and exchange endpoint. +- Run the focused frontend and Rust tests that cover the changed defaults. diff --git a/docs/plans/2026-03-24-clawpal-server-url-plan.md b/docs/plans/2026-03-24-clawpal-server-url-plan.md new file mode 100644 index 00000000..36de0a55 --- /dev/null +++ b/docs/plans/2026-03-24-clawpal-server-url-plan.md @@ -0,0 +1,100 @@ +# ClawPal Server URL Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Change ClawPal's default clawpal-server endpoints from `127.0.0.1:3000` to `65.21.45.43:3040` while keeping frontend copy, fallback behavior, and tests consistent. + +**Architecture:** Keep the current fixed-default design. Update the Rust websocket and HTTP constants, update the frontend HTTP fallback constant, and align UI copy plus focused tests with the new address. Do not change user-saved preference behavior or add new configuration layers. + +**Tech Stack:** Rust, Tauri v2, React, TypeScript, Bun + +--- + +### Task 1: Update frontend fallback expectations first + +**Files:** +- Modify: `src/lib/__tests__/invite-code.test.ts` +- Test: `src/lib/__tests__/invite-code.test.ts` + +**Step 1: Write the failing test** + +Update the test expectations so blank gateway URLs and localhost websocket defaults now resolve to `http://65.21.45.43:3040`, and invite exchange posts to `http://65.21.45.43:3040/api-keys/exchange`. + +**Step 2: Run test to verify it fails** + +Run: `bun test src/lib/__tests__/invite-code.test.ts` +Expected: FAIL because the implementation still returns `http://127.0.0.1:3000`. + +**Step 3: Write minimal implementation** + +Update `src/lib/invite-code.ts` to use `http://65.21.45.43:3040` as the default base URL. + +**Step 4: Run test to verify it passes** + +Run: `bun test src/lib/__tests__/invite-code.test.ts` +Expected: PASS + +### Task 2: Update Rust fixed gateway default + +**Files:** +- Modify: `src-tauri/src/remote_doctor/config.rs` +- Test: `src-tauri/src/remote_doctor/config.rs` + +**Step 1: Write the failing test** + +Update the existing fixed-gateway test so it expects `ws://65.21.45.43:3040/ws`. + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::config::tests::load_gateway_config_uses_fixed_clawpal_server_url -- --nocapture` +Expected: FAIL because the implementation still returns `ws://127.0.0.1:3000/ws`. + +**Step 3: Write minimal implementation** + +Update the fixed websocket constant in `src-tauri/src/remote_doctor/config.rs`. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::config::tests::load_gateway_config_uses_fixed_clawpal_server_url -- --nocapture` +Expected: PASS + +### Task 3: Update Rust invite exchange endpoint and frontend copy + +**Files:** +- Modify: `src-tauri/src/commands/preferences.rs` +- Modify: `src/pages/Settings.tsx` +- Modify: `src/locales/en.json` +- Modify: `src/locales/zh.json` + +**Step 1: Write the change** + +Update the fixed invite exchange URL to `http://65.21.45.43:3040/api-keys/exchange`. Update settings placeholder/hint text and the invite-exchange error log payload to reference `ws://65.21.45.43:3040/ws`. + +**Step 2: Run focused verification** + +Run: `bun test src/lib/__tests__/invite-code.test.ts` +Expected: PASS + +Run: `cargo test remote_doctor::config::tests::load_gateway_config_uses_fixed_clawpal_server_url -- --nocapture` +Expected: PASS + +### Task 4: Run final verification + +**Files:** +- Modify: `docs/plans/2026-03-24-clawpal-server-url-design.md` +- Modify: `docs/plans/2026-03-24-clawpal-server-url-plan.md` + +**Step 1: Run frontend verification** + +Run: `bun test src/lib/__tests__/invite-code.test.ts` +Expected: PASS + +**Step 2: Run Rust verification** + +Run: `cargo test remote_doctor::config::tests::load_gateway_config_uses_fixed_clawpal_server_url -- --nocapture` +Expected: PASS + +**Step 3: Review diff** + +Run: `git diff -- docs/plans/2026-03-24-clawpal-server-url-design.md docs/plans/2026-03-24-clawpal-server-url-plan.md src/lib/invite-code.ts src/lib/__tests__/invite-code.test.ts src-tauri/src/remote_doctor/config.rs src-tauri/src/commands/preferences.rs src/pages/Settings.tsx src/locales/en.json src/locales/zh.json` +Expected: Only the default ClawPal server URL references and aligned docs/copy should change. diff --git a/docs/plans/2026-03-24-remote-doctor-bootstrap-script-design.md b/docs/plans/2026-03-24-remote-doctor-bootstrap-script-design.md new file mode 100644 index 00000000..f8e294c0 --- /dev/null +++ b/docs/plans/2026-03-24-remote-doctor-bootstrap-script-design.md @@ -0,0 +1,94 @@ +# Remote Doctor Bootstrap Script Design + +**Date:** 2026-03-24 + +**Goal:** Provide a one-off remote-host script that prepares the dedicated `clawpal-remote-doctor` agent, workspace, and bootstrap files needed by the Remote Doctor planner flow. + +## Scope + +- Run directly on the remote host with `bash`. +- Require `python3` for config mutation. +- Back up the existing `~/.openclaw/openclaw.json` before changing it. +- Ensure `agents.list` contains a `clawpal-remote-doctor` entry with an explicit workspace. +- Create the dedicated workspace and write the expected bootstrap files. +- Be safe to run more than once. + +## Non-Goals + +- No ClawPal/Tauri command dispatch. +- No SSH orchestration from the desktop app. +- No automatic execution from the Remote Doctor connection path. +- No full OpenClaw config repair beyond the dedicated agent/workspace setup. + +## Approach Options + +### Option 1: Pure bash/sed + +Use shell string operations to patch `openclaw.json` in place. + +**Pros:** Lowest dependency footprint. +**Cons:** Too easy to corrupt JSON, especially on repeat runs or partially configured hosts. + +### Option 2: Bash plus `python3` JSON mutation + +Use `bash` for file/dir orchestration and `python3` for config parsing, mutation, and pretty-printing. + +**Pros:** Good balance of portability and safety. Keeps the script self-contained. +**Cons:** Depends on `python3`. Native `json` parsing needs a small normalization layer for common JSON5-style comments and trailing commas. + +### Option 3: Depend on OpenClaw CLI write commands + +Use remote `openclaw agents add` / `openclaw config set` commands instead of editing the config file directly. + +**Pros:** Avoids manual file mutation if the CLI behavior is perfectly known. +**Cons:** Current repository evidence is not strong enough to rely on exact CLI write semantics for this one-off recovery script. + +## Recommended Design + +Use **Option 2**. + +Add a standalone script at `scripts/remote-doctor-bootstrap.sh`. The script should default to: + +- config path: `~/.openclaw/openclaw.json` +- agent id: `clawpal-remote-doctor` +- agent display name: `ClawPal Remote Doctor` +- workspace: `~/.openclaw/workspaces/clawpal-remote-doctor` + +The script should: + +1. Validate that `bash` and `python3` are available. +2. Create `~/.openclaw` and the config file if they do not exist. +3. Create a timestamped backup of the current config before mutation. +4. Use an embedded `python3` block to: + - parse the config as JSON + - retry with a lightweight normalization pass for common JSON5-style comments and trailing commas + - ensure `/agents/list` exists as an array + - add or update the dedicated `clawpal-remote-doctor` entry + - preserve unrelated agent entries + - write pretty JSON back to disk +5. Create the workspace directory. +6. Write: + - `IDENTITY.md` + - `AGENTS.md` + - `BOOTSTRAP.md` + - `USER.md` + - `HEARTBEAT.md` +7. Print a short summary including the config path, backup path, workspace path, and whether the agent entry already existed. + +The script should be idempotent: + +- If the agent already exists, do not append a duplicate. +- If the workspace already exists, keep it and refresh the bootstrap files. +- If `agents` or `agents.list` are missing, create them. + +## Testing + +- Add an integration test that runs the script against a temporary `HOME`. +- Start from a minimal config that only contains the main agent. +- Verify the script: + - exits successfully + - creates a backup file + - writes the dedicated `clawpal-remote-doctor` entry into `openclaw.json` + - creates the workspace + - writes all expected bootstrap files +- Add coverage for a config input that includes comments or trailing commas so the JSON normalization path is exercised. diff --git a/docs/plans/2026-03-24-remote-doctor-bootstrap-script-plan.md b/docs/plans/2026-03-24-remote-doctor-bootstrap-script-plan.md new file mode 100644 index 00000000..3c0ac154 --- /dev/null +++ b/docs/plans/2026-03-24-remote-doctor-bootstrap-script-plan.md @@ -0,0 +1,119 @@ +# Remote Doctor Bootstrap Script Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Add a one-off remote-host bootstrap script that safely prepares the dedicated `clawpal-remote-doctor` agent entry, workspace, and bootstrap files required by the Remote Doctor planner flow. + +**Architecture:** Keep the implementation fully self-contained in a repository script. Use `bash` for orchestration and file setup, and use a small embedded `python3` block for config parsing and mutation so the script stays idempotent and avoids brittle shell JSON edits. Verify behavior through an integration test that runs the real script in a temporary `HOME`. + +**Tech Stack:** Bash, Python 3, Rust integration tests, Cargo + +--- + +### Task 1: Write the approved design artifact + +**Files:** +- Create: `docs/plans/2026-03-24-remote-doctor-bootstrap-script-design.md` +- Create: `docs/plans/2026-03-24-remote-doctor-bootstrap-script-plan.md` + +**Step 1: Save the design** + +Write the approved script design into `docs/plans/2026-03-24-remote-doctor-bootstrap-script-design.md`. + +**Step 2: Save the plan** + +Write this implementation plan into `docs/plans/2026-03-24-remote-doctor-bootstrap-script-plan.md`. + +### Task 2: Add a failing integration test for the bootstrap script + +**Files:** +- Create: `src-tauri/tests/remote_doctor_bootstrap_script.rs` +- Test: `src-tauri/tests/remote_doctor_bootstrap_script.rs` + +**Step 1: Write the failing test** + +Add an integration test that: +- creates a temporary `HOME` +- seeds `~/.openclaw/openclaw.json` with a minimal config containing only the main agent +- runs `bash ../scripts/remote-doctor-bootstrap.sh` +- asserts: + - exit code is `0` + - a config backup file exists + - `clawpal-remote-doctor` is present in `agents.list` + - the agent workspace equals `~/.openclaw/workspaces/clawpal-remote-doctor` + - `IDENTITY.md`, `AGENTS.md`, `BOOTSTRAP.md`, `USER.md`, and `HEARTBEAT.md` exist + +Add a second test that seeds a config with comments and trailing commas so the JSON normalization path is covered. + +**Step 2: Run test to verify it fails** + +Run: `cargo test -p clawpal --test remote_doctor_bootstrap_script -- --nocapture` +Expected: FAIL because the script does not exist yet. + +**Step 3: Commit** + +```bash +git add docs/plans/2026-03-24-remote-doctor-bootstrap-script-design.md docs/plans/2026-03-24-remote-doctor-bootstrap-script-plan.md src-tauri/tests/remote_doctor_bootstrap_script.rs +git commit -m "test: add remote doctor bootstrap script coverage" +``` + +### Task 3: Implement the standalone bootstrap script + +**Files:** +- Create: `scripts/remote-doctor-bootstrap.sh` +- Modify: `scripts/README.md` + +**Step 1: Write minimal implementation** + +Create `scripts/remote-doctor-bootstrap.sh` that: +- uses `#!/usr/bin/env bash` +- enables `set -euo pipefail` +- validates `python3` +- resolves: + - `OPENCLAW_HOME` defaulting to `$HOME/.openclaw` + - `CONFIG_PATH="$OPENCLAW_HOME/openclaw.json"` + - `AGENT_ID="clawpal-remote-doctor"` + - `WORKSPACE_PATH="$OPENCLAW_HOME/workspaces/$AGENT_ID"` +- creates the config dir and config file if missing +- creates a timestamped backup before mutation +- uses embedded `python3` to parse/update/write the config +- writes the five bootstrap files with the expected Remote Doctor content +- prints a readable summary of the applied paths + +Document the new script briefly in `scripts/README.md`. + +**Step 2: Run test to verify it passes** + +Run: `cargo test -p clawpal --test remote_doctor_bootstrap_script -- --nocapture` +Expected: PASS + +**Step 3: Commit** + +```bash +git add scripts/remote-doctor-bootstrap.sh scripts/README.md src-tauri/tests/remote_doctor_bootstrap_script.rs +git commit -m "feat: add remote doctor bootstrap script" +``` + +### Task 4: Final focused verification + +**Files:** +- Modify: `docs/plans/2026-03-24-remote-doctor-bootstrap-script-design.md` +- Modify: `docs/plans/2026-03-24-remote-doctor-bootstrap-script-plan.md` +- Create: `scripts/remote-doctor-bootstrap.sh` +- Create: `src-tauri/tests/remote_doctor_bootstrap_script.rs` +- Modify: `scripts/README.md` + +**Step 1: Run focused Rust verification** + +Run: `cargo test -p clawpal --test remote_doctor_bootstrap_script -- --nocapture` +Expected: PASS + +**Step 2: Run focused script lint check** + +Run: `bash -n scripts/remote-doctor-bootstrap.sh` +Expected: PASS + +**Step 3: Review diff** + +Run: `git diff -- docs/plans/2026-03-24-remote-doctor-bootstrap-script-design.md docs/plans/2026-03-24-remote-doctor-bootstrap-script-plan.md scripts/README.md scripts/remote-doctor-bootstrap.sh src-tauri/tests/remote_doctor_bootstrap_script.rs` +Expected: Only the new bootstrap docs, script, and focused test changes should appear. diff --git a/docs/plans/2026-03-24-remote-doctor-connect-diagnostics-design.md b/docs/plans/2026-03-24-remote-doctor-connect-diagnostics-design.md new file mode 100644 index 00000000..5c947e4d --- /dev/null +++ b/docs/plans/2026-03-24-remote-doctor-connect-diagnostics-design.md @@ -0,0 +1,57 @@ +# Remote Doctor Connect Diagnostics Design + +**Date:** 2026-03-24 + +**Goal:** Make Remote Doctor websocket handshake failures actionable by exposing the close/error reason in backend errors, session logs, and Doctor UI messaging. + +## Scope + +- Preserve the latest websocket disconnect reason inside `NodeClient`. +- Surface that reason in request failures such as `Connection lost while waiting for response`. +- Write Remote Doctor gateway connect failures into the per-session JSONL log. +- Show a more specific Doctor page error when the websocket is accepted but closed before the server replies. + +## Non-Goals + +- No protocol changes to clawpal-server. +- No new persistent settings. +- No large telemetry/event schema redesign. + +## Approach Options + +### Option 1: Session log only + +Add a `gateway_connect_failed` event to the Remote Doctor session log and keep the current UI text. + +**Pros:** Smallest code change. +**Cons:** Users still see the same vague error until someone opens logs. + +### Option 2: Backend diagnostics plus UI hint + +Capture the latest websocket close/error reason in `NodeClient`, include it in handshake/request errors, write the failure to the Remote Doctor session log, and map the Doctor UI to a more actionable message. + +**Pros:** Best balance of debuggability and user clarity. +**Cons:** Slightly broader surface area. + +### Option 3: Full structured websocket tracing + +Emit challenge/connect/close frames and handshake state transitions as dedicated debug events. + +**Pros:** Richest diagnostics. +**Cons:** Too much scope for the current need. + +## Recommended Design + +Use **Option 2**. + +`NodeClient` should keep an in-memory `last_disconnect_reason` string. When the websocket reader receives a close frame, it should record a message with the close code and optional reason. When it receives a websocket transport error, it should record that error text. Any pending `send_request()` call that loses its response channel should include this stored reason in the returned error string. + +Remote Doctor should log gateway connect failures immediately after `client.connect(...)` fails. The log entry should include the session id, gateway URL, whether an auth token override was present, and the specific error text. This makes the JSONL artifact useful even when the connection fails before any plan request is sent. + +The Doctor page should turn the raw `Connection lost while waiting for response: ...` family into a more actionable message that tells the user the websocket was accepted but the server closed before replying, and that the invite-code-derived token or saved Remote Doctor auth token should be checked first. The original low-level detail should still be kept in the displayed message. + +## Testing + +- Add Rust unit tests for the disconnect-reason formatting helper(s). +- Add a Rust unit test for the session log helper that writes gateway connect failures. +- Add a frontend unit test for the Doctor-facing error formatter. diff --git a/docs/plans/2026-03-24-remote-doctor-connect-diagnostics-plan.md b/docs/plans/2026-03-24-remote-doctor-connect-diagnostics-plan.md new file mode 100644 index 00000000..83fa85c9 --- /dev/null +++ b/docs/plans/2026-03-24-remote-doctor-connect-diagnostics-plan.md @@ -0,0 +1,112 @@ +# Remote Doctor Connect Diagnostics Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Expose actionable Remote Doctor websocket handshake failure details in backend errors, session logs, and Doctor UI messaging. + +**Architecture:** Keep the existing Remote Doctor connection flow, but add one small state slot in `NodeClient` for the latest disconnect reason. Reuse that state to improve returned errors, log connect failures into the existing session JSONL stream, and format a more helpful Doctor page error string without changing the underlying protocol. + +**Tech Stack:** Rust, Tauri v2, React, TypeScript, Bun + +--- + +### Task 1: Lock backend disconnect wording with failing tests + +**Files:** +- Modify: `src-tauri/src/node_client.rs` +- Test: `src-tauri/src/node_client.rs` + +**Step 1: Write the failing test** + +Add focused unit tests for: +- formatting a websocket close frame into a readable reason string +- building the `Connection lost while waiting for response: ...` message when a disconnect reason exists + +**Step 2: Run test to verify it fails** + +Run: `cargo test node_client::tests -- --nocapture` +Expected: FAIL because the helper functions and richer message formatting do not exist yet. + +**Step 3: Write minimal implementation** + +Add small helper functions and `last_disconnect_reason` storage to `NodeClient`, and use them when the reader task receives a close/error and when `send_request()` loses its response channel. + +**Step 4: Run test to verify it passes** + +Run: `cargo test node_client::tests -- --nocapture` +Expected: PASS + +### Task 2: Lock Remote Doctor session logging with a failing test + +**Files:** +- Modify: `src-tauri/src/remote_doctor/session.rs` +- Modify: `src-tauri/src/remote_doctor/repair_loops.rs` +- Test: `src-tauri/src/remote_doctor/session.rs` + +**Step 1: Write the failing test** + +Add a unit test that writes a gateway connect failure event and asserts the JSONL line contains: +- `event = "gateway_connect_failed"` +- the gateway URL +- whether a gateway auth token override was present +- the specific error string + +**Step 2: Run test to verify it fails** + +Run: `cargo test remote_doctor::session::tests -- --nocapture` +Expected: FAIL because the helper and event do not exist yet. + +**Step 3: Write minimal implementation** + +Add a small session logging helper and call it from `start_remote_doctor_repair_impl(...)` when `client.connect(...)` returns an error. + +**Step 4: Run test to verify it passes** + +Run: `cargo test remote_doctor::session::tests -- --nocapture` +Expected: PASS + +### Task 3: Lock Doctor UI wording with a failing test + +**Files:** +- Create: `src/lib/remote-doctor-error.ts` +- Create: `src/lib/__tests__/remote-doctor-error.test.ts` +- Modify: `src/pages/Doctor.tsx` + +**Step 1: Write the failing test** + +Add a frontend unit test that verifies `Connection lost while waiting for response: ...` becomes a more actionable Doctor error message that mentions the websocket was accepted and that the invite-code-derived token or saved Remote Doctor token should be checked. + +**Step 2: Run test to verify it fails** + +Run: `bun test src/lib/__tests__/remote-doctor-error.test.ts` +Expected: FAIL because the formatter helper does not exist yet. + +**Step 3: Write minimal implementation** + +Implement the formatter helper and use it in the Remote Doctor repair catch block in `Doctor.tsx`. + +**Step 4: Run test to verify it passes** + +Run: `bun test src/lib/__tests__/remote-doctor-error.test.ts` +Expected: PASS + +### Task 4: Final focused verification + +**Files:** +- Modify: `docs/plans/2026-03-24-remote-doctor-connect-diagnostics-design.md` +- Modify: `docs/plans/2026-03-24-remote-doctor-connect-diagnostics-plan.md` + +**Step 1: Run Rust verification** + +Run: `cargo test node_client::tests remote_doctor::session::tests -- --nocapture` +Expected: PASS + +**Step 2: Run frontend verification** + +Run: `bun test src/lib/__tests__/remote-doctor-error.test.ts` +Expected: PASS + +**Step 3: Review diff** + +Run: `git diff -- docs/plans/2026-03-24-remote-doctor-connect-diagnostics-design.md docs/plans/2026-03-24-remote-doctor-connect-diagnostics-plan.md src-tauri/src/node_client.rs src-tauri/src/remote_doctor/session.rs src-tauri/src/remote_doctor/repair_loops.rs src/lib/remote-doctor-error.ts src/lib/__tests__/remote-doctor-error.test.ts src/pages/Doctor.tsx` +Expected: Only Remote Doctor diagnostics and UI wording should change. diff --git a/scripts/README.md b/scripts/README.md index 655b9cd7..4758ef97 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -20,6 +20,8 @@ These scripts mirror the repository CI checks locally without installing system - `scripts/precommit.sh` All-in-one script to run the pre-commit checks manually. Supports `--staged` flag. Runs frontend CI, Rust CI, and metrics CI before each commit. +- `scripts/remote-doctor-bootstrap.sh` + One-off remote-host bootstrap script that creates the `clawpal-remote-doctor` agent entry, dedicated workspace, and planner bootstrap files under `~/.openclaw`. All scripts resolve the repo root from their own path and can be run from anywhere inside the worktree. diff --git a/scripts/remote-doctor-bootstrap.sh b/scripts/remote-doctor-bootstrap.sh new file mode 100755 index 00000000..d1694a48 --- /dev/null +++ b/scripts/remote-doctor-bootstrap.sh @@ -0,0 +1,208 @@ +#!/usr/bin/env bash + +set -euo pipefail + +AGENT_ID="${AGENT_ID:-clawpal-remote-doctor}" +AGENT_NAME="${AGENT_NAME:-ClawPal Remote Doctor}" +OPENCLAW_HOME="${OPENCLAW_HOME:-$HOME/.openclaw}" +CONFIG_PATH="${CONFIG_PATH:-$OPENCLAW_HOME/openclaw.json}" +WORKSPACE_CONFIG_PATH="${WORKSPACE_CONFIG_PATH:-~/.openclaw/workspaces/$AGENT_ID}" +WORKSPACE_DIR="${WORKSPACE_DIR:-$OPENCLAW_HOME/workspaces/$AGENT_ID}" + +require_command() { + local missing=0 + local cmd + for cmd in "$@"; do + if ! command -v "$cmd" >/dev/null 2>&1; then + printf 'Missing required command: %s\n' "$cmd" >&2 + missing=1 + fi + done + + if [ "$missing" -ne 0 ]; then + exit 127 + fi +} + +write_file() { + local path="$1" + local content="$2" + mkdir -p "$(dirname "$path")" + printf '%s' "$content" >"$path" +} + +require_command python3 + +mkdir -p "$(dirname "$CONFIG_PATH")" +if [ ! -f "$CONFIG_PATH" ]; then + printf '{}\n' >"$CONFIG_PATH" +fi + +BACKUP_PATH="${CONFIG_PATH}.bak-$(date +%Y%m%d-%H%M%S)-$$" +cp "$CONFIG_PATH" "$BACKUP_PATH" + +PYTHON_SUMMARY="$( + python3 - "$CONFIG_PATH" "$AGENT_ID" "$WORKSPACE_CONFIG_PATH" <<'PY' +import json +import pathlib +import sys + + +def strip_comments(text: str) -> str: + result = [] + in_string = False + string_quote = "" + escaped = False + i = 0 + while i < len(text): + ch = text[i] + if in_string: + result.append(ch) + if escaped: + escaped = False + elif ch == "\\": + escaped = True + elif ch == string_quote: + in_string = False + i += 1 + continue + if ch in ('"', "'"): + in_string = True + string_quote = ch + result.append(ch) + i += 1 + continue + if ch == "/" and i + 1 < len(text): + nxt = text[i + 1] + if nxt == "/": + i += 2 + while i < len(text) and text[i] not in "\r\n": + i += 1 + continue + if nxt == "*": + i += 2 + while i + 1 < len(text) and not (text[i] == "*" and text[i + 1] == "/"): + i += 1 + i = min(i + 2, len(text)) + continue + result.append(ch) + i += 1 + return "".join(result) + + +def strip_trailing_commas(text: str) -> str: + result = [] + in_string = False + string_quote = "" + escaped = False + i = 0 + while i < len(text): + ch = text[i] + if in_string: + result.append(ch) + if escaped: + escaped = False + elif ch == "\\": + escaped = True + elif ch == string_quote: + in_string = False + i += 1 + continue + if ch in ('"', "'"): + in_string = True + string_quote = ch + result.append(ch) + i += 1 + continue + if ch == ",": + j = i + 1 + while j < len(text) and text[j] in " \t\r\n": + j += 1 + if j < len(text) and text[j] in "}]": + i += 1 + continue + result.append(ch) + i += 1 + return "".join(result) + + +def load_config(raw: str): + text = raw.strip() + if not text: + return {} + try: + return json.loads(text) + except json.JSONDecodeError: + normalized = strip_trailing_commas(strip_comments(text)) + return json.loads(normalized) + + +config_path = pathlib.Path(sys.argv[1]) +agent_id = sys.argv[2] +workspace = sys.argv[3] + +raw = config_path.read_text(encoding="utf-8") +config = load_config(raw) +if not isinstance(config, dict): + raise SystemExit("openclaw.json must contain a top-level object") + +agents = config.get("agents") +if agents is None: + agents = {} + config["agents"] = agents +if not isinstance(agents, dict): + raise SystemExit("config field 'agents' must be an object") + +agents_list = agents.get("list") +if agents_list is None: + agents_list = [] + agents["list"] = agents_list +if not isinstance(agents_list, list): + raise SystemExit("config field 'agents.list' must be an array") + +existing = None +for item in agents_list: + if isinstance(item, dict) and str(item.get("id", "")).strip() == agent_id: + existing = item + break + +agent_existed = existing is not None +if existing is None: + existing = {"id": agent_id} + agents_list.append(existing) + +existing["id"] = agent_id +existing["workspace"] = workspace + +config_path.write_text(json.dumps(config, indent=2) + "\n", encoding="utf-8") +print(json.dumps({ + "agentExisted": agent_existed, + "agentCount": len(agents_list), +})) +PY +)" + +mkdir -p "$WORKSPACE_DIR" + +write_file "$WORKSPACE_DIR/IDENTITY.md" "- Name: $AGENT_NAME +" +write_file "$WORKSPACE_DIR/AGENTS.md" "# Remote Doctor +Use this workspace only for ClawPal remote doctor planning sessions. +Return structured, operational answers. +" +write_file "$WORKSPACE_DIR/BOOTSTRAP.md" "Bootstrap is already complete for this workspace. +Do not ask who you are or who the user is. +Use IDENTITY.md and USER.md as the canonical identity context. +" +write_file "$WORKSPACE_DIR/USER.md" "- Name: ClawPal Desktop +- Role: desktop repair orchestrator +- Preferences: concise, operational, no bootstrap chatter +" +write_file "$WORKSPACE_DIR/HEARTBEAT.md" "Status: active remote-doctor planning workspace. +" + +printf 'Remote Doctor bootstrap complete.\n' +printf 'config=%s\n' "$CONFIG_PATH" +printf 'backup=%s\n' "$BACKUP_PATH" +printf 'workspace=%s\n' "$WORKSPACE_DIR" +printf 'summary=%s\n' "$PYTHON_SUMMARY" diff --git a/src-tauri/src/commands/app_logs.rs b/src-tauri/src/commands/app_logs.rs index e65797f2..5abcd5aa 100644 --- a/src-tauri/src/commands/app_logs.rs +++ b/src-tauri/src/commands/app_logs.rs @@ -1,5 +1,3 @@ -use super::*; - const MAX_LOG_TAIL_LINES: usize = 400; fn clamp_log_lines(lines: Option) -> usize { diff --git a/src-tauri/src/commands/preferences.rs b/src-tauri/src/commands/preferences.rs index 7d345628..49b02232 100644 --- a/src-tauri/src/commands/preferences.rs +++ b/src-tauri/src/commands/preferences.rs @@ -2,6 +2,7 @@ use std::collections::HashMap; use std::sync::{Mutex, OnceLock}; use serde::{Deserialize, Serialize}; +use serde_json::Value; use crate::bug_report::settings::{ normalize_settings as normalize_bug_report_settings, BugReportSettings, @@ -67,6 +68,32 @@ fn normalize_remote_doctor_gateway_auth_token(value: Option) -> Option Result { + let payload: Option = serde_json::from_str(body_text).ok(); + if status == 200 { + let api_key = payload + .as_ref() + .and_then(|obj| obj.get("apiKey")) + .and_then(Value::as_str) + .map(str::trim) + .filter(|value| !value.is_empty()) + .ok_or_else(|| "apiKey missing in exchange response".to_string())?; + return Ok(api_key.to_string()); + } + let error_text = payload + .as_ref() + .and_then(|obj| obj.get("error")) + .and_then(Value::as_str) + .map(str::trim) + .filter(|value| !value.is_empty()) + .map(str::to_string) + .unwrap_or_else(|| format!("HTTP {status}")); + Err(error_text) +} + fn load_stored_preferences_from_paths(paths: &OpenClawPaths) -> StoredAppPreferences { let path = app_preferences_path(paths); let mut prefs = read_json::(&path).unwrap_or_default(); @@ -176,6 +203,29 @@ pub fn set_remote_doctor_gateway_auth_token_preference( Ok(prefs) } +#[tauri::command] +pub fn exchange_remote_doctor_invite_code(invite_code: String) -> Result { + let trimmed = invite_code.trim(); + if trimmed.is_empty() { + return Err("inviteCode is required".to_string()); + } + + let client = reqwest::blocking::Client::builder() + .timeout(std::time::Duration::from_secs(8)) + .build() + .map_err(|error| format!("request client init failed: {error}"))?; + let response = client + .post(FIXED_REMOTE_DOCTOR_INVITE_EXCHANGE_URL) + .json(&serde_json::json!({ "inviteCode": trimmed })) + .send() + .map_err(|error| format!("request failed: {error}"))?; + let status = response.status().as_u16(); + let body_text = response + .text() + .map_err(|error| format!("response read failed: {error}"))?; + parse_invite_exchange_response(status, &body_text) +} + // --------------------------------------------------------------------------- // Per-session model overrides (in-memory only) // --------------------------------------------------------------------------- @@ -358,6 +408,20 @@ mod tests { let _ = std::fs::remove_dir_all(root); } + #[test] + fn parse_invite_exchange_response_returns_api_key_on_success() { + let api_key = parse_invite_exchange_response(200, r#"{"apiKey":"abc-123"}"#) + .expect("api key should parse"); + assert_eq!(api_key, "abc-123"); + } + + #[test] + fn parse_invite_exchange_response_returns_error_message() { + let error = parse_invite_exchange_response(400, r#"{"error":"invalid invite code"}"#) + .expect_err("should parse error"); + assert_eq!(error, "invalid invite code"); + } + #[test] fn legacy_zeroclaw_preference_fields_are_ignored() { let (paths, root) = test_paths(); diff --git a/src-tauri/src/commands/recipe_cmds.rs b/src-tauri/src/commands/recipe_cmds.rs index 38780798..1bbd349c 100644 --- a/src-tauri/src/commands/recipe_cmds.rs +++ b/src-tauri/src/commands/recipe_cmds.rs @@ -1,5 +1,3 @@ -use super::*; - use crate::models::resolve_paths; use crate::recipe::load_recipes_with_fallback; diff --git a/src-tauri/src/commands/upgrade.rs b/src-tauri/src/commands/upgrade.rs index 84d144ea..6e36dd78 100644 --- a/src-tauri/src/commands/upgrade.rs +++ b/src-tauri/src/commands/upgrade.rs @@ -1,5 +1,3 @@ -use super::*; - use std::process::Command; #[tauri::command] diff --git a/src-tauri/src/commands/util.rs b/src-tauri/src/commands/util.rs index de3963a3..59905971 100644 --- a/src-tauri/src/commands/util.rs +++ b/src-tauri/src/commands/util.rs @@ -1,5 +1,3 @@ -use super::*; - use std::process::Command; #[tauri::command] diff --git a/src-tauri/src/commands/watchdog_cmds.rs b/src-tauri/src/commands/watchdog_cmds.rs index fde3ea9e..091d1c33 100644 --- a/src-tauri/src/commands/watchdog_cmds.rs +++ b/src-tauri/src/commands/watchdog_cmds.rs @@ -1,5 +1,3 @@ -use super::*; - use serde_json::Value; use tauri::Manager; diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 58c9fa91..2ec927d1 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -14,7 +14,8 @@ use crate::commands::{ connect_local_instance, connect_ssh_instance, create_agent, delete_agent, delete_backup, delete_cron_job, delete_local_instance_home, delete_model_profile, delete_registered_instance, delete_sessions_by_ids, delete_ssh_host, deploy_watchdog, diagnose_doctor_assistant, - diagnose_primary_via_rescue, diagnose_ssh, discover_local_instances, ensure_access_profile, + diagnose_primary_via_rescue, diagnose_ssh, discover_local_instances, + exchange_remote_doctor_invite_code, ensure_access_profile, extract_model_profiles_from_config, fix_issues, get_app_preferences, get_bug_report_settings, get_cached_model_catalog, get_channels_config_snapshot, get_channels_runtime_snapshot, get_cron_config_snapshot, get_cron_runs, get_cron_runtime_snapshot, @@ -192,6 +193,7 @@ pub fn run() { repair_primary_via_rescue, set_global_model, set_agent_model, + exchange_remote_doctor_invite_code, set_remote_doctor_gateway_auth_token_preference, set_remote_doctor_gateway_url_preference, set_ssh_transfer_speed_ui_preference, diff --git a/src-tauri/src/node_client.rs b/src-tauri/src/node_client.rs index b2288cb4..9a5b52d7 100644 --- a/src-tauri/src/node_client.rs +++ b/src-tauri/src/node_client.rs @@ -50,6 +50,7 @@ pub struct NodeClient { inner: Arc>>, credentials: Arc>>, pending_chat_final: Arc>>>, + last_disconnect_reason: Arc>>, } impl NodeClient { @@ -58,6 +59,7 @@ impl NodeClient { inner: Arc::new(Mutex::new(None)), credentials: Arc::new(Mutex::new(None)), pending_chat_final: Arc::new(Mutex::new(None)), + last_disconnect_reason: Arc::new(Mutex::new(None)), } } @@ -72,6 +74,7 @@ impl NodeClient { // Store credentials for use in handshake *self.credentials.lock().await = creds; + *self.last_disconnect_reason.lock().await = None; let (ws_stream, _) = connect_async(url) .await @@ -95,6 +98,7 @@ impl NodeClient { let inner_ref = Arc::clone(&self.inner); let app_clone = app.clone(); let chat_ref = Arc::clone(&self.pending_chat_final); + let disconnect_reason_ref = Arc::clone(&self.last_disconnect_reason); tokio::spawn(async move { while let Some(msg) = rx.next().await { @@ -112,20 +116,24 @@ impl NodeClient { Self::handle_message_payload(&bytes, &inner_ref, &chat_ref, &app_clone) .await; } - Ok(Message::Close(_)) => { + Ok(Message::Close(frame)) => { + let reason = format_close_reason(frame.as_ref()); + *disconnect_reason_ref.lock().await = Some(reason.clone()); let _ = app_clone - .emit("doctor:disconnected", json!({"reason": "server closed"})); + .emit("doctor:disconnected", json!({"reason": reason})); let mut guard = inner_ref.lock().await; *guard = None; break; } Err(e) => { + let reason = format!("websocket error: {e}"); + *disconnect_reason_ref.lock().await = Some(reason.clone()); let _ = app_clone.emit( "doctor:error", json!({"message": format!("WebSocket error: {e}")}), ); let _ = app_clone - .emit("doctor:disconnected", json!({"reason": format!("{e}")})); + .emit("doctor:disconnected", json!({"reason": reason})); let mut guard = inner_ref.lock().await; *guard = None; break; @@ -203,7 +211,8 @@ impl NodeClient { if let Some(inner) = guard.as_mut() { inner.pending.remove(&id); } - Err("Connection lost while waiting for response".into()) + let reason = self.last_disconnect_reason.lock().await.clone(); + Err(connection_lost_error_message(reason.as_deref())) } Err(_) => { let mut guard = self.inner.lock().await; @@ -540,3 +549,54 @@ impl Default for NodeClient { Self::new() } } + +fn format_close_reason( + frame: Option<&tokio_tungstenite::tungstenite::protocol::CloseFrame<'_>>, +) -> String { + let Some(frame) = frame else { + return "server closed".to_string(); + }; + let code = u16::from(frame.code); + let reason = frame.reason.trim(); + if reason.is_empty() { + format!("server closed (close code {code})") + } else { + format!("server closed (close code {code}: {reason})") + } +} + +fn connection_lost_error_message(last_disconnect_reason: Option<&str>) -> String { + match last_disconnect_reason.map(str::trim).filter(|value| !value.is_empty()) { + Some(reason) => format!("Connection lost while waiting for response: {reason}"), + None => "Connection lost while waiting for response".to_string(), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tokio_tungstenite::tungstenite::protocol::{frame::coding::CloseCode, CloseFrame}; + + #[test] + fn format_close_reason_includes_code_and_text() { + let frame = CloseFrame { + code: CloseCode::Policy, + reason: "invalid token".into(), + }; + + assert_eq!( + format_close_reason(Some(&frame)), + "server closed (close code 1008: invalid token)" + ); + } + + #[test] + fn connection_lost_error_message_includes_disconnect_reason() { + assert_eq!( + connection_lost_error_message(Some( + "server closed (close code 1008: invalid token)" + )), + "Connection lost while waiting for response: server closed (close code 1008: invalid token)" + ); + } +} diff --git a/src-tauri/src/remote_doctor/config.rs b/src-tauri/src/remote_doctor/config.rs index a2017fba..863e04eb 100644 --- a/src-tauri/src/remote_doctor/config.rs +++ b/src-tauri/src/remote_doctor/config.rs @@ -22,8 +22,7 @@ use crate::models::resolve_paths; use crate::node_client::GatewayCredentials; use crate::ssh::SshConnectionPool; -const DEFAULT_GATEWAY_HOST: &str = "127.0.0.1"; -const DEFAULT_GATEWAY_PORT: u16 = 18789; +const FIXED_REMOTE_DOCTOR_GATEWAY_URL: &str = "ws://65.21.45.43:3040/ws"; #[derive(Debug, Clone)] pub(crate) struct RemoteDoctorGatewayConfig { @@ -34,25 +33,8 @@ pub(crate) struct RemoteDoctorGatewayConfig { pub(crate) fn load_gateway_config() -> Result { let paths = resolve_paths(); let app_preferences = load_app_preferences_from_paths(&paths); - if let Some(url) = app_preferences.remote_doctor_gateway_url { - return Ok(RemoteDoctorGatewayConfig { - url, - auth_token_override: app_preferences.remote_doctor_gateway_auth_token, - }); - } - let configured_port = std::fs::read_to_string(&paths.config_path) - .ok() - .and_then(|text| serde_json::from_str::(&text).ok()) - .and_then(|config| { - config - .get("gateway") - .and_then(|gateway| gateway.get("port")) - .and_then(|value| value.as_u64()) - }) - .map(|value| value as u16) - .unwrap_or(DEFAULT_GATEWAY_PORT); Ok(RemoteDoctorGatewayConfig { - url: format!("ws://{DEFAULT_GATEWAY_HOST}:{configured_port}"), + url: FIXED_REMOTE_DOCTOR_GATEWAY_URL.to_string(), auth_token_override: app_preferences.remote_doctor_gateway_auth_token, }) } @@ -399,7 +381,7 @@ mod tests { } #[test] - fn load_gateway_config_prefers_app_preferences() { + fn load_gateway_config_uses_fixed_clawpal_server_url() { let _guard = override_lock().lock().expect("lock override state"); let temp_root = std::env::temp_dir().join(format!( "clawpal-remote-doctor-config-pref-test-{}", @@ -423,7 +405,7 @@ mod tests { .expect("write prefs"); let config = load_gateway_config().expect("load gateway config"); - assert_eq!(config.url, "ws://example.test:9999"); + assert_eq!(config.url, "ws://65.21.45.43:3040/ws"); assert_eq!(config.auth_token_override.as_deref(), Some("abc")); set_active_clawpal_data_override(None).expect("clear clawpal override"); diff --git a/src-tauri/src/remote_doctor/repair_loops.rs b/src-tauri/src/remote_doctor/repair_loops.rs index 44c41ec9..725957f5 100644 --- a/src-tauri/src/remote_doctor/repair_loops.rs +++ b/src-tauri/src/remote_doctor/repair_loops.rs @@ -25,7 +25,7 @@ use super::legacy::{ run_agent_request_with_bridge, }; use super::plan::request_plan; -use super::session::append_session_log; +use super::session::{append_gateway_connect_failure_log, append_session_log}; use super::types::{ parse_target_location, PlanKind, RemoteDoctorProtocol, RemoteDoctorRepairResult, TargetLocation, }; @@ -67,7 +67,15 @@ pub(crate) async fn start_remote_doctor_repair_impl( ); let client = NodeClient::new(); - client.connect(&gateway.url, app.clone(), creds).await?; + if let Err(error) = client.connect(&gateway.url, app.clone(), creds).await { + append_gateway_connect_failure_log( + &session_id, + &gateway.url, + gateway.auth_token_override.is_some(), + &error, + ); + return Err(format!("Remote Doctor gateway connect failed: {error}")); + } let bridge = BridgeClient::new(); let forced_protocol = configured_remote_doctor_protocol(); diff --git a/src-tauri/src/remote_doctor/session.rs b/src-tauri/src/remote_doctor/session.rs index 189597a4..37e45f6d 100644 --- a/src-tauri/src/remote_doctor/session.rs +++ b/src-tauri/src/remote_doctor/session.rs @@ -24,6 +24,23 @@ pub(crate) fn append_session_log(session_id: &str, payload: Value) { let _ = writeln!(file, "{}", payload); } +pub(crate) fn append_gateway_connect_failure_log( + session_id: &str, + gateway_url: &str, + gateway_auth_token_override: bool, + error: &str, +) { + append_session_log( + session_id, + serde_json::json!({ + "event": "gateway_connect_failed", + "gatewayUrl": gateway_url, + "gatewayAuthTokenOverride": gateway_auth_token_override, + "error": error, + }), + ); +} + pub(crate) fn emit_session_progress( app: Option<&AppHandle>, session_id: &str, @@ -166,4 +183,37 @@ mod tests { assert_eq!(warning.last_plan_kind, "repair"); assert!(!warning.latest_diagnosis_healthy); } + + #[test] + fn append_gateway_connect_failure_log_writes_context() { + let temp_root = std::env::temp_dir().join(format!( + "clawpal-remote-doctor-connect-failure-log-test-{}", + uuid::Uuid::new_v4() + )); + let clawpal_dir = temp_root.join(".clawpal"); + std::fs::create_dir_all(&clawpal_dir).expect("create clawpal dir"); + set_active_clawpal_data_override(Some(clawpal_dir.to_string_lossy().to_string())) + .expect("set clawpal override"); + + append_gateway_connect_failure_log( + "sess-connect", + "ws://65.21.45.43:3040/ws", + true, + "Connection lost while waiting for response: server closed (close code 1008: invalid token)", + ); + + set_active_clawpal_data_override(None).expect("clear clawpal override"); + + let log_path = clawpal_dir + .join("doctor") + .join("remote") + .join("sess-connect.jsonl"); + let log_text = std::fs::read_to_string(&log_path).expect("read session log"); + assert!(log_text.contains("\"event\":\"gateway_connect_failed\"")); + assert!(log_text.contains("\"gatewayUrl\":\"ws://65.21.45.43:3040/ws\"")); + assert!(log_text.contains("\"gatewayAuthTokenOverride\":true")); + assert!(log_text.contains("invalid token")); + + let _ = std::fs::remove_dir_all(&temp_root); + } } diff --git a/src-tauri/tests/remote_doctor_bootstrap_script.rs b/src-tauri/tests/remote_doctor_bootstrap_script.rs new file mode 100644 index 00000000..97c011b3 --- /dev/null +++ b/src-tauri/tests/remote_doctor_bootstrap_script.rs @@ -0,0 +1,176 @@ +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::Command; + +use serde_json::Value; +use uuid::Uuid; + +fn temp_home_dir(label: &str) -> PathBuf { + std::env::temp_dir().join(format!( + "clawpal-remote-doctor-bootstrap-script-{label}-{}", + Uuid::new_v4() + )) +} + +fn script_path() -> PathBuf { + Path::new(env!("CARGO_MANIFEST_DIR")) + .parent() + .expect("repo root") + .join("scripts") + .join("remote-doctor-bootstrap.sh") +} + +fn seed_openclaw_config(home_dir: &Path, config_text: &str) -> PathBuf { + let openclaw_dir = home_dir.join(".openclaw"); + fs::create_dir_all(&openclaw_dir).expect("create openclaw dir"); + let config_path = openclaw_dir.join("openclaw.json"); + fs::write(&config_path, config_text).expect("write config"); + config_path +} + +fn run_bootstrap_script(home_dir: &Path) -> std::process::Output { + Command::new("bash") + .arg(script_path()) + .env("HOME", home_dir) + .output() + .expect("run bootstrap script") +} + +fn backup_files(config_path: &Path) -> Vec { + let parent = config_path.parent().expect("config parent"); + let prefix = format!( + "{}.bak-", + config_path + .file_name() + .and_then(|name| name.to_str()) + .expect("config file name") + ); + fs::read_dir(parent) + .expect("read config dir") + .filter_map(|entry| entry.ok().map(|item| item.path())) + .filter(|path| { + path.file_name() + .and_then(|name| name.to_str()) + .map(|name| name.starts_with(&prefix)) + .unwrap_or(false) + }) + .collect() +} + +fn load_config(config_path: &Path) -> Value { + serde_json::from_str(&fs::read_to_string(config_path).expect("read config")) + .expect("parse config") +} + +fn assert_bootstrap_workspace(home_dir: &Path) { + let workspace = home_dir + .join(".openclaw") + .join("workspaces") + .join("clawpal-remote-doctor"); + for file_name in [ + "IDENTITY.md", + "AGENTS.md", + "BOOTSTRAP.md", + "USER.md", + "HEARTBEAT.md", + ] { + let path = workspace.join(file_name); + let text = fs::read_to_string(&path) + .unwrap_or_else(|error| panic!("read {}: {error}", path.display())); + assert!( + !text.trim().is_empty(), + "{file_name} should not be empty in {}", + workspace.display() + ); + } +} + +#[test] +fn bootstrap_script_adds_remote_doctor_agent_and_workspace() { + let home_dir = temp_home_dir("basic"); + fs::create_dir_all(&home_dir).expect("create temp home"); + let config_path = seed_openclaw_config( + &home_dir, + r#"{ + "agents": { + "list": [ + { "id": "main", "workspace": "~/.openclaw/workspaces/main" } + ] + } +} +"#, + ); + + let output = run_bootstrap_script(&home_dir); + + assert_eq!( + output.status.code(), + Some(0), + "stdout:\n{}\n\nstderr:\n{}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + + assert_eq!(backup_files(&config_path).len(), 1, "expected one config backup"); + + let config = load_config(&config_path); + let agents = config + .pointer("/agents/list") + .and_then(Value::as_array) + .expect("agents.list array"); + let remote_doctor = agents + .iter() + .find(|entry| entry.get("id").and_then(Value::as_str) == Some("clawpal-remote-doctor")) + .expect("remote doctor agent entry"); + assert_eq!( + remote_doctor.get("workspace").and_then(Value::as_str), + Some("~/.openclaw/workspaces/clawpal-remote-doctor") + ); + + assert_bootstrap_workspace(&home_dir); + + let _ = fs::remove_dir_all(&home_dir); +} + +#[test] +fn bootstrap_script_accepts_comment_and_trailing_comma_config() { + let home_dir = temp_home_dir("json5"); + fs::create_dir_all(&home_dir).expect("create temp home"); + let config_path = seed_openclaw_config( + &home_dir, + r#"{ + // existing agent + "agents": { + "list": [ + { "id": "main", "workspace": "~/.openclaw/workspaces/main", }, + ], + }, +} +"#, + ); + + let output = run_bootstrap_script(&home_dir); + + assert_eq!( + output.status.code(), + Some(0), + "stdout:\n{}\n\nstderr:\n{}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + + let config = load_config(&config_path); + let agents = config + .pointer("/agents/list") + .and_then(Value::as_array) + .expect("agents.list array"); + let matches = agents + .iter() + .filter(|entry| entry.get("id").and_then(Value::as_str) == Some("clawpal-remote-doctor")) + .count(); + assert_eq!(matches, 1, "remote doctor agent should be added exactly once"); + + assert_bootstrap_workspace(&home_dir); + + let _ = fs::remove_dir_all(&home_dir); +} diff --git a/src/components/SettingsAlphaFeaturesCard.tsx b/src/components/SettingsAlphaFeaturesCard.tsx index 380a62d1..814bb658 100644 --- a/src/components/SettingsAlphaFeaturesCard.tsx +++ b/src/components/SettingsAlphaFeaturesCard.tsx @@ -8,26 +8,20 @@ import { DisclosureCard } from "@/components/DisclosureCard"; interface SettingsAlphaFeaturesCardProps { showSshTransferSpeedUi: boolean; - remoteDoctorGatewayUrl: string; - remoteDoctorGatewayAuthToken: string; - remoteDoctorGatewayUrlInputRef?: Ref; + remoteDoctorInviteCode: string; + remoteDoctorInviteCodeInputRef?: Ref; onSshTransferSpeedUiToggle: (checked: boolean) => void; - onRemoteDoctorGatewayUrlChange: (value: string) => void; - onRemoteDoctorGatewayUrlSave: () => void; - onRemoteDoctorGatewayAuthTokenChange: (value: string) => void; - onRemoteDoctorGatewayAuthTokenSave: () => void; + onRemoteDoctorInviteCodeChange: (value: string) => void; + onRemoteDoctorInviteCodeSave: () => void; } export function SettingsAlphaFeaturesCard({ showSshTransferSpeedUi, - remoteDoctorGatewayUrl, - remoteDoctorGatewayAuthToken, - remoteDoctorGatewayUrlInputRef, + remoteDoctorInviteCode, + remoteDoctorInviteCodeInputRef, onSshTransferSpeedUiToggle, - onRemoteDoctorGatewayUrlChange, - onRemoteDoctorGatewayUrlSave, - onRemoteDoctorGatewayAuthTokenChange, - onRemoteDoctorGatewayAuthTokenSave, + onRemoteDoctorInviteCodeChange, + onRemoteDoctorInviteCodeSave, }: SettingsAlphaFeaturesCardProps) { const { t } = useTranslation(); @@ -49,51 +43,28 @@ export function SettingsAlphaFeaturesCard({ {t("settings.alphaEnableSshTransferSpeedUiHint")}

-
-
- -
- onRemoteDoctorGatewayAuthTokenChange(event.target.value)} - placeholder={t("settings.remoteDoctorGatewayAuthTokenPlaceholder")} + ref={remoteDoctorInviteCodeInputRef} + value={remoteDoctorInviteCode} + onChange={(event) => onRemoteDoctorInviteCodeChange(event.target.value)} + placeholder={t("settings.remoteDoctorInviteCodePlaceholder")} />

- {t("settings.remoteDoctorGatewayAuthTokenHint")} + {t("settings.remoteDoctorInviteCodeHint")}

diff --git a/src/components/__tests__/SettingsAlphaFeaturesCard.test.tsx b/src/components/__tests__/SettingsAlphaFeaturesCard.test.tsx index 85bff0ee..db147a9e 100644 --- a/src/components/__tests__/SettingsAlphaFeaturesCard.test.tsx +++ b/src/components/__tests__/SettingsAlphaFeaturesCard.test.tsx @@ -15,20 +15,18 @@ describe("SettingsAlphaFeaturesCard", () => { i18n, children: React.createElement(SettingsAlphaFeaturesCard, { showSshTransferSpeedUi: false, - remoteDoctorGatewayUrl: "", - remoteDoctorGatewayAuthToken: "", + remoteDoctorInviteCode: "", onSshTransferSpeedUiToggle: () => {}, - onRemoteDoctorGatewayUrlChange: () => {}, - onRemoteDoctorGatewayUrlSave: () => {}, - onRemoteDoctorGatewayAuthTokenChange: () => {}, - onRemoteDoctorGatewayAuthTokenSave: () => {}, + onRemoteDoctorInviteCodeChange: () => {}, + onRemoteDoctorInviteCodeSave: () => {}, }), }), ); expect(html).toContain("SSH transfer speed"); - expect(html).toContain("Remote Doctor Gateway URL"); - expect(html).toContain("Remote Doctor Gateway Auth Token"); + expect(html).toContain("Remote Doctor Invite Code"); + expect(html).not.toContain("Remote Doctor Gateway URL"); + expect(html).not.toContain("Remote Doctor Gateway Auth Token"); expect(html).not.toContain("ClawPal Logs"); expect(html).not.toContain("OpenClaw Gateway Logs"); expect(html).not.toContain("OpenClaw Context"); diff --git a/src/lib/__tests__/invite-code.test.ts b/src/lib/__tests__/invite-code.test.ts new file mode 100644 index 00000000..dc68b8f3 --- /dev/null +++ b/src/lib/__tests__/invite-code.test.ts @@ -0,0 +1,93 @@ +import { afterEach, beforeEach, describe, expect, test } from "bun:test"; + +import { + deriveServerBaseUrlFromGatewayUrl, + exchangeInviteCodeForApiKey, + InviteCodeExchangeError, +} from "../invite-code"; + +describe("deriveServerBaseUrlFromGatewayUrl", () => { + test("returns clawpal server default when gateway url is empty", () => { + expect(deriveServerBaseUrlFromGatewayUrl("")).toBe("http://65.21.45.43:3040"); + }); + + test("converts websocket gateway url to http origin", () => { + expect(deriveServerBaseUrlFromGatewayUrl("ws://65.21.45.43:3040/ws")).toBe("http://65.21.45.43:3040"); + expect(deriveServerBaseUrlFromGatewayUrl("wss://server.example.com/ws")).toBe("https://server.example.com"); + }); + + test("keeps http/https url origin", () => { + expect(deriveServerBaseUrlFromGatewayUrl("https://server.example.com/path")).toBe("https://server.example.com"); + }); +}); + +describe("exchangeInviteCodeForApiKey", () => { + const originalFetch = globalThis.fetch; + + beforeEach(() => { + globalThis.fetch = (async () => new Response("unexpected call", { status: 500 })) as typeof fetch; + }); + + afterEach(() => { + globalThis.fetch = originalFetch; + }); + + test("returns api key on successful exchange", async () => { + globalThis.fetch = (async (input: RequestInfo | URL, init?: RequestInit) => { + expect(String(input)).toBe("http://65.21.45.43:3040/api-keys/exchange"); + expect(init?.method).toBe("POST"); + expect(init?.headers).toEqual({ "content-type": "application/json" }); + expect(init?.body).toBe(JSON.stringify({ inviteCode: "invite-001" })); + return new Response(JSON.stringify({ apiKey: "new-api-key" }), { + status: 200, + headers: { "content-type": "application/json" }, + }); + }) as typeof fetch; + + await expect(exchangeInviteCodeForApiKey("invite-001", "")).resolves.toBe("new-api-key"); + }); + + test("throws INVITE_CODE_REQUIRED for empty invite code", async () => { + await expect(exchangeInviteCodeForApiKey(" ", "")).rejects.toMatchObject({ + code: "INVITE_CODE_REQUIRED", + }); + }); + + test("maps invalid invite code from server", async () => { + globalThis.fetch = (async () => + new Response(JSON.stringify({ error: "invalid invite code" }), { + status: 400, + headers: { "content-type": "application/json" }, + })) as typeof fetch; + + await expect(exchangeInviteCodeForApiKey("bad-code", "")).rejects.toMatchObject({ + code: "INVALID_INVITE_CODE", + }); + }); + + test("maps internal server error from server", async () => { + globalThis.fetch = (async () => + new Response(JSON.stringify({ error: "internal server error" }), { + status: 500, + headers: { "content-type": "application/json" }, + })) as typeof fetch; + + await expect(exchangeInviteCodeForApiKey("invite-001", "")).rejects.toMatchObject({ + code: "EXCHANGE_FAILED", + }); + }); + + test("wraps network failures", async () => { + globalThis.fetch = (async () => { + throw new Error("socket hang up"); + }) as typeof fetch; + + try { + await exchangeInviteCodeForApiKey("invite-001", ""); + throw new Error("expected exchange to fail"); + } catch (error) { + expect(error).toBeInstanceOf(InviteCodeExchangeError); + expect((error as InviteCodeExchangeError).code).toBe("NETWORK_ERROR"); + } + }); +}); diff --git a/src/lib/__tests__/remote-doctor-error.test.ts b/src/lib/__tests__/remote-doctor-error.test.ts new file mode 100644 index 00000000..ed9b45c9 --- /dev/null +++ b/src/lib/__tests__/remote-doctor-error.test.ts @@ -0,0 +1,25 @@ +import { describe, expect, test } from "bun:test"; + +import { formatRemoteDoctorErrorMessage } from "../remote-doctor-error"; + +describe("formatRemoteDoctorErrorMessage", () => { + test("adds an api-key-focused hint for invalid-token handshake failures", () => { + expect( + formatRemoteDoctorErrorMessage( + "Connection lost while waiting for response: server closed (close code 1008: invalid token)", + ), + ).toContain("Remote Doctor API key"); + }); + + test("surfaces invalid api key handshake failures explicitly", () => { + expect( + formatRemoteDoctorErrorMessage( + "Remote Doctor gateway connect failed: Connection lost while waiting for response: server closed (close code 1008: invalid api key)", + ), + ).toContain("Re-save the invite code in Settings"); + }); + + test("keeps unrelated errors unchanged", () => { + expect(formatRemoteDoctorErrorMessage("Request timed out")).toBe("Request timed out"); + }); +}); diff --git a/src/lib/__tests__/use-api-extra.test.ts b/src/lib/__tests__/use-api-extra.test.ts index 131bbb11..7af095dd 100644 --- a/src/lib/__tests__/use-api-extra.test.ts +++ b/src/lib/__tests__/use-api-extra.test.ts @@ -180,4 +180,12 @@ describe("remote doctor api bindings", () => { test("exposes remote doctor gateway auth token preference binding", () => { expect(typeof api.setRemoteDoctorGatewayAuthTokenPreference).toBe("function"); }); + + test("exposes invite exchange binding", () => { + expect(typeof api.exchangeRemoteDoctorInviteCode).toBe("function"); + }); + + test("exposes app event logging binding", () => { + expect(typeof api.logAppEvent).toBe("function"); + }); }); diff --git a/src/lib/api.ts b/src/lib/api.ts index 2d8baa48..31954da7 100644 --- a/src/lib/api.ts +++ b/src/lib/api.ts @@ -18,12 +18,16 @@ export const api = { invoke("test_bug_report_connection", {}), captureFrontendError: (message: string, stack?: string, level?: string) => invoke("capture_frontend_error", { message, stack, level }), + logAppEvent: (message: string): Promise => + invoke("log_app_event", { message }), setSshTransferSpeedUiPreference: (showUi: boolean): Promise => invoke("set_ssh_transfer_speed_ui_preference", { showUi }), setRemoteDoctorGatewayUrlPreference: (gatewayUrl: string | null): Promise => invoke("set_remote_doctor_gateway_url_preference", { gatewayUrl }), setRemoteDoctorGatewayAuthTokenPreference: (authToken: string | null): Promise => invoke("set_remote_doctor_gateway_auth_token_preference", { authToken }), + exchangeRemoteDoctorInviteCode: (inviteCode: string): Promise => + invoke("exchange_remote_doctor_invite_code", { inviteCode }), explainOperationError: ( instanceId: string, operation: string, diff --git a/src/lib/invite-code.ts b/src/lib/invite-code.ts new file mode 100644 index 00000000..9940157f --- /dev/null +++ b/src/lib/invite-code.ts @@ -0,0 +1,83 @@ +const DEFAULT_CLAWPAL_SERVER_BASE_URL = "http://65.21.45.43:3040"; + +export type InviteCodeExchangeErrorCode = + | "INVITE_CODE_REQUIRED" + | "INVALID_INVITE_CODE" + | "NETWORK_ERROR" + | "EXCHANGE_FAILED"; + +export class InviteCodeExchangeError extends Error { + code: InviteCodeExchangeErrorCode; + + constructor(code: InviteCodeExchangeErrorCode, message: string) { + super(message); + this.name = "InviteCodeExchangeError"; + this.code = code; + } +} + +export function deriveServerBaseUrlFromGatewayUrl(gatewayUrl: string): string { + const trimmed = gatewayUrl.trim(); + if (!trimmed) return DEFAULT_CLAWPAL_SERVER_BASE_URL; + + try { + const url = new URL(trimmed); + if (url.protocol === "ws:") url.protocol = "http:"; + if (url.protocol === "wss:") url.protocol = "https:"; + return url.origin; + } catch { + return DEFAULT_CLAWPAL_SERVER_BASE_URL; + } +} + +export async function exchangeInviteCodeForApiKey( + inviteCode: string, + gatewayUrl: string, +): Promise { + const normalizedInviteCode = inviteCode.trim(); + if (!normalizedInviteCode) { + throw new InviteCodeExchangeError("INVITE_CODE_REQUIRED", "inviteCode is required"); + } + + const serverBaseUrl = deriveServerBaseUrlFromGatewayUrl(gatewayUrl); + const endpoint = `${serverBaseUrl}/api-keys/exchange`; + let response: Response; + try { + response = await fetch(endpoint, { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ inviteCode: normalizedInviteCode }), + }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new InviteCodeExchangeError("NETWORK_ERROR", message); + } + + let payload: unknown = null; + try { + payload = await response.json(); + } catch { + payload = null; + } + + if (!response.ok) { + const errorText = payload && typeof payload === "object" && "error" in payload + ? String((payload as { error: unknown }).error) + : `HTTP ${response.status}`; + if (response.status === 400 && errorText === "inviteCode is required") { + throw new InviteCodeExchangeError("INVITE_CODE_REQUIRED", errorText); + } + if (response.status === 400 && errorText === "invalid invite code") { + throw new InviteCodeExchangeError("INVALID_INVITE_CODE", errorText); + } + throw new InviteCodeExchangeError("EXCHANGE_FAILED", errorText); + } + + const apiKey = payload && typeof payload === "object" && "apiKey" in payload + ? String((payload as { apiKey: unknown }).apiKey ?? "") + : ""; + if (!apiKey) { + throw new InviteCodeExchangeError("EXCHANGE_FAILED", "apiKey missing in exchange response"); + } + return apiKey; +} diff --git a/src/lib/remote-doctor-error.ts b/src/lib/remote-doctor-error.ts new file mode 100644 index 00000000..41e9d2ed --- /dev/null +++ b/src/lib/remote-doctor-error.ts @@ -0,0 +1,24 @@ +const CONNECTION_LOST_PREFIX = "Connection lost while waiting for response"; +const INVALID_REMOTE_DOCTOR_AUTH_MARKERS = ["invalid token", "invalid api key"]; + +export function formatRemoteDoctorErrorMessage(message: string): string { + const trimmed = message.trim(); + if (!trimmed.includes(CONNECTION_LOST_PREFIX)) { + return trimmed; + } + + const lower = trimmed.toLowerCase(); + if (INVALID_REMOTE_DOCTOR_AUTH_MARKERS.some((marker) => lower.includes(marker))) { + return [ + "Remote Doctor gateway rejected the saved Remote Doctor API key.", + "Re-save the invite code in Settings to refresh it, then try again.", + `Details: ${trimmed}`, + ].join(" "); + } + + return [ + "Remote Doctor server accepted the WebSocket but closed before replying.", + "Check the invite code or saved Remote Doctor token first.", + `Details: ${trimmed}`, + ].join(" "); +} diff --git a/src/lib/use-api.ts b/src/lib/use-api.ts index 89f5917c..d3964bba 100644 --- a/src/lib/use-api.ts +++ b/src/lib/use-api.ts @@ -623,6 +623,8 @@ export function useApi() { api.testBugReportConnection, ["getBugReportStats"], ), + captureFrontendError: api.captureFrontendError, + logAppEvent: api.logAppEvent, setSshTransferSpeedUiPreference: withGlobalInvalidation( api.setSshTransferSpeedUiPreference, ["getAppPreferences"], @@ -635,6 +637,7 @@ export function useApi() { api.setRemoteDoctorGatewayAuthTokenPreference, ["getAppPreferences"], ), + exchangeRemoteDoctorInviteCode: api.exchangeRemoteDoctorInviteCode, ensureAccessProfile: api.ensureAccessProfile, recordInstallExperience: api.recordInstallExperience, openUrl: api.openUrl, diff --git a/src/locales/en.json b/src/locales/en.json index ef9edd16..8601d5f7 100644 --- a/src/locales/en.json +++ b/src/locales/en.json @@ -205,6 +205,7 @@ "settings.credentialKind.manual": "Manual Key", "settings.credentialKind.unset": "Unset", "settings.apiKey": "API Key", + "settings.inviteCode": "Invite Code", "settings.oauthProviderHint": "Provider {{provider}} uses OAuth. Log in first, then add/save the profile.", "settings.oauthAuthRefHint": "If empty, ClawPal will use the default credential source (openai-codex:default).", "settings.oauthStart": "Start OAuth", @@ -222,6 +223,9 @@ "settings.apiKeyUnchanged": "(unchanged if empty)", "settings.apiKeyOptional": "(optional — key already available)", "settings.apiKeyPlaceholder": "sk-...", + "settings.inviteCodeUnchanged": "(unchanged if empty)", + "settings.inviteCodeOptional": "(optional — credential already available)", + "settings.inviteCodePlaceholder": "invite-001", "settings.keyAvailable": "Usable credential detected (source: {{source}}). Leave empty to reuse.", "settings.customBaseUrl": "Custom Base URL", "settings.baseUrl": "Base URL", @@ -236,6 +240,9 @@ "settings.deleteFailed": "Delete failed: {{error}}", "settings.providerModelRequired": "Provider and Model are required", "settings.apiKeyRequired": "API Key is required", + "settings.inviteCodeRequired": "Invite code is required", + "settings.inviteCodeInvalid": "Invalid invite code or already used", + "settings.inviteCodeExchangeFailed": "Failed to exchange invite code for API key: {{error}}", "settings.currentVersion": "Current Version", "settings.checkForUpdates": "Check for Updates", "settings.checkingUpdates": "Checking...", @@ -317,10 +324,14 @@ "settings.alphaEnableOpenclawContextUi": "Show OpenClaw Context (Alpha)", "settings.alphaEnableOpenclawContextUiHint": "When enabled, add a Context page to the sidebar with collapsible Sessions and Backups.", "settings.remoteDoctorGatewayUrl": "Remote Doctor Gateway URL", - "settings.remoteDoctorGatewayUrlPlaceholder": "ws://127.0.0.1:3000/ws", - "settings.remoteDoctorGatewayUrlHint": "Used by Remote Doctor Repair. For clawpal-server, use a websocket endpoint like ws://127.0.0.1:3000/ws. Leave empty to fall back to the local gateway port from openclaw.json.", + "settings.remoteDoctorGatewayUrlPlaceholder": "ws://65.21.45.43:3040/ws", + "settings.remoteDoctorGatewayUrlHint": "Used by Remote Doctor Repair. For clawpal-server, use a websocket endpoint like ws://65.21.45.43:3040/ws. Leave empty to fall back to the local gateway port from openclaw.json.", "settings.remoteDoctorGatewayUrlSaved": "Remote Doctor gateway URL saved", "settings.remoteDoctorGatewayUrlSaveFailed": "Failed to save Remote Doctor gateway URL: {{error}}", + "settings.remoteDoctorInviteCode": "Remote Doctor Invite Code", + "settings.remoteDoctorInviteCodePlaceholder": "invite-001", + "settings.remoteDoctorInviteCodeHint": "Enter invite code and save. ClawPal exchanges it via /api-keys/exchange and stores the returned key for Remote Doctor.", + "settings.remoteDoctorInviteCodeSaved": "Remote Doctor invite code exchanged and saved", "settings.remoteDoctorGatewayAuthToken": "Remote Doctor Gateway Auth Token", "settings.remoteDoctorGatewayAuthTokenPlaceholder": "gateway auth token", "settings.remoteDoctorGatewayAuthTokenHint": "Optional override token for Remote Doctor Repair. Leave empty to reuse the local gateway auth token.", diff --git a/src/locales/zh.json b/src/locales/zh.json index cfb76f00..f023ac19 100644 --- a/src/locales/zh.json +++ b/src/locales/zh.json @@ -204,6 +204,7 @@ "settings.credentialKind.manual": "手动密钥", "settings.credentialKind.unset": "未配置", "settings.apiKey": "API 密钥", + "settings.inviteCode": "邀请码", "settings.oauthProviderHint": "提供商 {{provider}} 使用 OAuth。请先登录,再新增/保存该 profile。", "settings.oauthAuthRefHint": "若留空,ClawPal 会自动使用默认凭证来源(openai-codex:default)。", "settings.oauthStart": "开始 OAuth 授权", @@ -221,6 +222,9 @@ "settings.apiKeyUnchanged": "(留空则不修改)", "settings.apiKeyOptional": "(可选 — 密钥已可用)", "settings.apiKeyPlaceholder": "sk-...", + "settings.inviteCodeUnchanged": "(留空则不修改)", + "settings.inviteCodeOptional": "(可选 — 已有可用凭证)", + "settings.inviteCodePlaceholder": "invite-001", "settings.keyAvailable": "已检测到可用凭证(来源:{{source}})。留空即可复用。", "settings.customBaseUrl": "自定义 Base URL", "settings.baseUrl": "Base URL", @@ -235,6 +239,9 @@ "settings.deleteFailed": "删除失败:{{error}}", "settings.providerModelRequired": "提供商和模型为必填项", "settings.apiKeyRequired": "API 密钥为必填项", + "settings.inviteCodeRequired": "邀请码为必填项", + "settings.inviteCodeInvalid": "邀请码无效或已使用", + "settings.inviteCodeExchangeFailed": "邀请码兑换 API 密钥失败:{{error}}", "settings.currentVersion": "当前版本", "settings.checkForUpdates": "检查更新", "settings.checkingUpdates": "检查中...", @@ -316,10 +323,14 @@ "settings.alphaEnableOpenclawContextUi": "显示 OpenClaw Context(实验)", "settings.alphaEnableOpenclawContextUiHint": "开启后,在侧边栏显示 Context 页面,里面包含可展开的 Sessions 和 Backups。", "settings.remoteDoctorGatewayUrl": "远程 Doctor Gateway 地址", - "settings.remoteDoctorGatewayUrlPlaceholder": "ws://127.0.0.1:3000/ws", - "settings.remoteDoctorGatewayUrlHint": "用于“远程 Doctor 修复”。如果接 clawpal-server,请填写类似 ws://127.0.0.1:3000/ws 的 websocket 地址。留空时回退到 openclaw.json 中的本地 gateway 端口。", + "settings.remoteDoctorGatewayUrlPlaceholder": "ws://65.21.45.43:3040/ws", + "settings.remoteDoctorGatewayUrlHint": "用于“远程 Doctor 修复”。如果接 clawpal-server,请填写类似 ws://65.21.45.43:3040/ws 的 websocket 地址。留空时回退到 openclaw.json 中的本地 gateway 端口。", "settings.remoteDoctorGatewayUrlSaved": "远程 Doctor Gateway 地址已保存", "settings.remoteDoctorGatewayUrlSaveFailed": "保存远程 Doctor Gateway 地址失败:{{error}}", + "settings.remoteDoctorInviteCode": "远程 Doctor 邀请码", + "settings.remoteDoctorInviteCodePlaceholder": "invite-001", + "settings.remoteDoctorInviteCodeHint": "输入邀请码并保存。ClawPal 会通过 /api-keys/exchange 兑换,并将返回的密钥保存给远程 Doctor 使用。", + "settings.remoteDoctorInviteCodeSaved": "远程 Doctor 邀请码兑换并保存成功", "settings.remoteDoctorGatewayAuthToken": "远程 Doctor Gateway Auth Token", "settings.remoteDoctorGatewayAuthTokenPlaceholder": "gateway auth token", "settings.remoteDoctorGatewayAuthTokenHint": "用于“远程 Doctor 修复”的可选 token 覆盖。留空时复用本地 gateway auth token。", diff --git a/src/pages/Doctor.tsx b/src/pages/Doctor.tsx index c8aca054..0a83f825 100644 --- a/src/pages/Doctor.tsx +++ b/src/pages/Doctor.tsx @@ -12,21 +12,11 @@ import { DoctorLogsDialog } from "@/components/DoctorLogsDialog"; import { DoctorRecoveryOverview } from "@/components/DoctorRecoveryOverview"; import { DoctorTempProviderDialog } from "@/components/DoctorTempProviderDialog"; import { RescueAsciiHeader } from "@/components/RescueAsciiHeader"; -import { - AlertDialog, - AlertDialogAction, - AlertDialogCancel, - AlertDialogContent, - AlertDialogDescription, - AlertDialogFooter, - AlertDialogHeader, - AlertDialogTitle, -} from "@/components/ui/alert-dialog"; import { Button } from "@/components/ui/button"; import { Card, CardContent, CardHeader } from "@/components/ui/card"; import { useInstance } from "@/lib/instance-context"; import { localizeDoctorReportText } from "@/lib/doctor-report-i18n"; -import { requestRemoteDoctorSettingsFocus } from "@/lib/remote-doctor-navigation"; +import { formatRemoteDoctorErrorMessage } from "@/lib/remote-doctor-error"; import { createDataLoadRequestId, emitDataLoadMetric, @@ -91,7 +81,6 @@ export function Doctor(_: DoctorProps) { const [tempProviderDialogOpen, setTempProviderDialogOpen] = useState(false); const [tempProviderProfileId, setTempProviderProfileId] = useState(null); const [activeRepairMode, setActiveRepairMode] = useState<"localRepair" | "remoteDoctor" | null>(null); - const [remoteDoctorConfigPromptOpen, setRemoteDoctorConfigPromptOpen] = useState(false); const busy = diagnosisLoading || repairing; const liveReadsReady = ua.instanceToken !== 0; @@ -340,11 +329,6 @@ export function Doctor(_: DoctorProps) { setError(t("doctor.rescueBotConnectRequired", { defaultValue: "Connect to SSH first." })); return; } - const prefs = await ua.getAppPreferences(); - if (!prefs.remoteDoctorGatewayUrl?.trim()) { - setRemoteDoctorConfigPromptOpen(true); - return; - } setRepairing(true); setActiveRepairMode("remoteDoctor"); setError(null); @@ -359,19 +343,15 @@ export function Doctor(_: DoctorProps) { } } catch (cause) { const text = cause instanceof Error ? cause.message : String(cause); - setError(text); - setStatusLine(text); + const formatted = formatRemoteDoctorErrorMessage(text); + setError(formatted); + setStatusLine(formatted); } finally { setRepairing(false); setActiveRepairMode(null); } }, [busy, diagnosis, isConnected, isRemote, liveReadsReady, runDiagnosis, t, ua]); - const handleOpenRemoteDoctorSettings = useCallback(() => { - setRemoteDoctorConfigPromptOpen(false); - requestRemoteDoctorSettingsFocus(); - }, []); - const buttonLabel = useMemo(() => { if (diagnosisLoading) { return t("doctor.analyzing", { defaultValue: "Diagnosing..." }); @@ -551,26 +531,6 @@ export function Doctor(_: DoctorProps) { initialProfileId={tempProviderProfileId} onSaved={handleTempProviderSaved} /> - - - - - {t("doctor.remoteDoctorGatewayRequiredTitle")} - - - {t("doctor.remoteDoctorGatewayRequiredDescription")} - - - - - {t("doctor.cancel")} - - - {t("doctor.openRemoteDoctorSettings")} - - - - ); } diff --git a/src/pages/Settings.tsx b/src/pages/Settings.tsx index 9ee67f8e..04d3d053 100644 --- a/src/pages/Settings.tsx +++ b/src/pages/Settings.tsx @@ -109,9 +109,8 @@ export function Settings({ const [authSuggestion, setAuthSuggestion] = useState(null); const [testingProfileId, setTestingProfileId] = useState(null); const [showSshTransferSpeedUi, setShowSshTransferSpeedUi] = useState(false); - const [remoteDoctorGatewayUrl, setRemoteDoctorGatewayUrl] = useState(""); - const [remoteDoctorGatewayAuthToken, setRemoteDoctorGatewayAuthToken] = useState(""); - const remoteDoctorGatewayUrlInputRef = useRef(null); + const [remoteDoctorInviteCode, setRemoteDoctorInviteCode] = useState(""); + const remoteDoctorInviteCodeInputRef = useRef(null); const [catalogRefreshed, setCatalogRefreshed] = useState(false); @@ -119,7 +118,7 @@ export function Settings({ const { appVersion, appUpdate, appUpdateChecking, appUpdating, appUpdateProgress, handleCheckForUpdates, handleAppUpdate } = useAppUpdate(hasAppUpdate, onAppUpdateSeen); const focusRemoteDoctorGatewayUrlInput = useCallback(() => { - const input = remoteDoctorGatewayUrlInputRef.current; + const input = remoteDoctorInviteCodeInputRef.current; if (!input) return; input.scrollIntoView({ behavior: "smooth", block: "center" }); window.setTimeout(() => { @@ -186,8 +185,6 @@ export function Settings({ ua.getAppPreferences() .then((prefs) => { setShowSshTransferSpeedUi(Boolean(prefs.showSshTransferSpeedUi)); - setRemoteDoctorGatewayUrl(prefs.remoteDoctorGatewayUrl ?? ""); - setRemoteDoctorGatewayAuthToken(prefs.remoteDoctorGatewayAuthToken ?? ""); }) .catch((e) => console.error("Failed to load app preferences:", e)); }, [ua]); @@ -481,31 +478,76 @@ export function Settings({ }); }, [t, ua]); - const handleRemoteDoctorGatewayUrlSave = useCallback(() => { - const nextValue = remoteDoctorGatewayUrl.trim(); - ua.setRemoteDoctorGatewayUrlPreference(nextValue || null) - .then((prefs) => { - setRemoteDoctorGatewayUrl(prefs.remoteDoctorGatewayUrl ?? ""); - toast.success(t("settings.remoteDoctorGatewayUrlSaved")); - }) - .catch((e) => { - const errorText = e instanceof Error ? e.message : String(e); - toast.error(t("settings.remoteDoctorGatewayUrlSaveFailed", { error: errorText })); + const logRemoteDoctorInviteCodeFailure = useCallback( + (errorCode: string, errorMessage: string, inviteCodeLength: number) => { + const payload = { + event: "remote_doctor_invite_code_exchange_failed", + errorCode, + errorMessage, + inviteCodeLength, + gatewayUrl: "ws://65.21.45.43:3040/ws", + }; + console.error("Remote doctor invite code exchange failed:", payload); + void ua.logAppEvent(`[invite_exchange][error] ${JSON.stringify(payload)}`).catch((logError) => { + if (import.meta.env.DEV) { + console.warn("[dev ignored error] logRemoteDoctorInviteCodeFailure", logError); + } }); - }, [remoteDoctorGatewayUrl, t, ua]); + void ua.captureFrontendError( + `Remote doctor invite code exchange failed: ${errorCode}`, + JSON.stringify(payload), + "error", + ).catch((reportError: unknown) => { + if (import.meta.env.DEV) { + console.warn("[dev ignored error] captureFrontendError invite exchange", reportError); + } + }); + }, + [ua], + ); - const handleRemoteDoctorGatewayAuthTokenSave = useCallback(() => { - const nextValue = remoteDoctorGatewayAuthToken.trim(); - ua.setRemoteDoctorGatewayAuthTokenPreference(nextValue || null) - .then((prefs) => { - setRemoteDoctorGatewayAuthToken(prefs.remoteDoctorGatewayAuthToken ?? ""); - toast.success(t("settings.remoteDoctorGatewayAuthTokenSaved")); + const classifyInviteExchangeFailure = useCallback((error: unknown): { code: string; message: string } => { + const text = error instanceof Error ? error.message : String(error); + const lower = text.toLowerCase(); + if (lower.includes("invitecode is required")) { + return { code: "INVITE_CODE_REQUIRED", message: "inviteCode is required" }; + } + if (lower.includes("invalid invite code")) { + return { code: "INVALID_INVITE_CODE", message: "invalid invite code" }; + } + if (lower.includes("request failed") || lower.includes("timed out") || lower.includes("connect")) { + return { code: "NETWORK_ERROR", message: text }; + } + return { code: "EXCHANGE_FAILED", message: text }; + }, []); + + const handleRemoteDoctorInviteCodeSave = useCallback(() => { + const inviteCode = remoteDoctorInviteCode.trim(); + if (!inviteCode) { + logRemoteDoctorInviteCodeFailure("INVITE_CODE_REQUIRED", "inviteCode is required", 0); + toast.error(t("settings.inviteCodeRequired")); + return; + } + ua.exchangeRemoteDoctorInviteCode(inviteCode) + .then((apiKey) => ua.setRemoteDoctorGatewayAuthTokenPreference(apiKey)) + .then(() => { + setRemoteDoctorInviteCode(""); + toast.success(t("settings.remoteDoctorInviteCodeSaved")); }) - .catch((e) => { - const errorText = e instanceof Error ? e.message : String(e); - toast.error(t("settings.remoteDoctorGatewayAuthTokenSaveFailed", { error: errorText })); + .catch((error) => { + const classified = classifyInviteExchangeFailure(error); + logRemoteDoctorInviteCodeFailure(classified.code, classified.message, inviteCode.length); + if (classified.code === "INVITE_CODE_REQUIRED") { + toast.error(t("settings.inviteCodeRequired")); + return; + } + if (classified.code === "INVALID_INVITE_CODE") { + toast.error(t("settings.inviteCodeInvalid")); + return; + } + toast.error(t("settings.inviteCodeExchangeFailed", { error: classified.message })); }); - }, [remoteDoctorGatewayAuthToken, t, ua]); + }, [classifyInviteExchangeFailure, logRemoteDoctorInviteCodeFailure, remoteDoctorInviteCode, t, ua]); return (
@@ -737,14 +779,11 @@ export function Settings({ {showPreferences && ( )} @@ -838,6 +877,7 @@ export function Settings({ const currentRef = p.authRef.trim(); return { ...p, + apiKey: "", authRef: currentRef || defaultEnvAuthRef(p.provider), }; }