From 4fcb475afbef092ed6d4ceb9356d01b91f7adac8 Mon Sep 17 00:00:00 2001 From: quangdang46 Date: Tue, 12 May 2026 06:26:30 +0000 Subject: [PATCH] =?UTF-8?q?feat(cli):=20M5=20tooling=20integration=20?= =?UTF-8?q?=E2=80=94=20mitm/tunnel/tool/translator/media=20(30=20cmds)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Implement the M5 milestone from todo-openproxy.md — ~30 runtime CLI commands that talk to the running server's HTTP API. Each command supports both human and `--robot` (`openproxy.v1.*` envelope) output, and returns exit code 6 when the server is unreachable. New modules: - src/cli/mitm.rs (~8 cmds): status, start, stop, cert generate|path, config get|set|apply - src/cli/tunnel_rt.rs (~7 cmds, extends M1 stub): enable|disable cloudflare|tailscale, tailscale install|login|check|enable|disable - src/cli/tool.rs (~8 cmds): list, show, apply (with --dry-run), revert, execute, run, doc, antigravity-mitm enable|disable - src/cli/translator.rs (~6 cmds): formats, translate, send, preset list|save|load - src/cli/media.rs (~13 cmds): providers list|add|edit|delete, combo list|create, tts voices|speak (binary stdout), stt transcribe, embed, image generate, search, web fetch Runtime helpers added: put_json, patch_json, delete_json, post_json_bytes in src/cli/runtime.rs. Tests: tests/cli_m5_robot_envelopes.rs (15 integration tests covering the happy path for each command group via wiremock). --- src/cli/media.rs | 692 ++++++++++++++++++++++++++++++++ src/cli/mitm.rs | 339 ++++++++++++++++ src/cli/mod.rs | 130 +++++- src/cli/runtime.rs | 57 +++ src/cli/tool.rs | 500 +++++++++++++++++++++++ src/cli/translator.rs | 303 ++++++++++++++ src/cli/tunnel_rt.rs | 184 +++++++++ src/main.rs | 90 ++++- tests/cli_m5_robot_envelopes.rs | 470 ++++++++++++++++++++++ 9 files changed, 2751 insertions(+), 14 deletions(-) create mode 100644 src/cli/media.rs create mode 100644 src/cli/mitm.rs create mode 100644 src/cli/tool.rs create mode 100644 src/cli/translator.rs create mode 100644 src/cli/tunnel_rt.rs create mode 100644 tests/cli_m5_robot_envelopes.rs diff --git a/src/cli/media.rs b/src/cli/media.rs new file mode 100644 index 00000000..1d25c589 --- /dev/null +++ b/src/cli/media.rs @@ -0,0 +1,692 @@ +//! `openproxy media *` — media provider + media endpoint helpers (PLAN v3 mục +//! 4.15). Wraps `/api/media-providers/*` for CRUD on TTS/STT/embed/image/web +//! providers and the synchronous `/v1/audio/*`, `/v1/embeddings`, +//! `/v1/images/generations`, `/v1/search`, `/v1/web/fetch` endpoints. +//! +//! `media tts speak` is the only command that writes raw bytes (audio) to +//! stdout — everything else emits JSON envelopes via `--robot`. + +use std::io::Write; + +use clap::Subcommand; +use serde_json::{json, Map, Value}; + +use crate::cli::config::ResolvedConfig; +use crate::cli::output::{emit_robot, humanln, OutputCtx}; +use crate::cli::runtime::{read_input, require_runtime, rt_error_to_exit, Runtime}; + +#[derive(Debug, Clone, Subcommand)] +pub enum MediaCmd { + /// Manage media providers (TTS, STT, embed, image, web). + Providers { + #[command(subcommand)] + cmd: ProvidersCmd, + }, + /// Manage media combos (chained provider fallbacks per kind). + Combo { + #[command(subcommand)] + cmd: ComboCmd, + }, + /// Text-to-speech commands. + Tts { + #[command(subcommand)] + cmd: TtsCmd, + }, + /// Speech-to-text commands. + Stt { + #[command(subcommand)] + cmd: SttCmd, + }, + /// Generate embeddings. + Embed { + #[arg(long)] + provider: String, + /// Embedding model id. + #[arg(long, default_value = "")] + model: String, + /// Text input or `-` for stdin. + #[arg(long, default_value = "-")] + text: String, + }, + /// Image generation. + Image { + #[command(subcommand)] + cmd: ImageCmd, + }, + /// Generic web search via `/v1/search`. + Search { + #[arg(long)] + provider: String, + /// Query string or `-` for stdin. + #[arg(long, default_value = "-")] + query: String, + }, + /// Web fetch (extracted page content). + Web { + #[command(subcommand)] + cmd: WebCmd, + }, +} + +#[derive(Debug, Clone, Subcommand)] +pub enum ProvidersCmd { + /// List media providers, optionally filtered by kind. + List { + /// One of `tts|stt|embed|image|web` (legacy: `embedding`, `search`). + #[arg(long)] + kind: Option, + }, + /// Add a media provider via POST `/api/media-providers`. + Add { + /// Provider id (e.g. `elevenlabs`, `openai`, `cohere`, `firecrawl`). + #[arg(long)] + provider: String, + /// Kind: `tts|stt|embedding|image|search|webSearch|webFetch`. + #[arg(long)] + kind: String, + /// Display name for the provider entry. + #[arg(long)] + name: String, + /// JSON body or `-` for stdin (merged on top of `{provider, kind, name}`). + #[arg(long = "from-file")] + from_file: Option, + }, + /// Edit a media provider (PUT via `/api/media-providers/`). + Edit { + /// Provider connection id. + id: String, + /// JSON body of fields to update, or `-` for stdin. + #[arg(long = "from-file", default_value = "-")] + from_file: String, + }, + /// Delete a media provider. + Delete { + /// Provider connection id. + id: String, + /// Kind path segment (defaults to `tts`). + #[arg(long, default_value = "tts")] + kind: String, + }, +} + +#[derive(Debug, Clone, Subcommand)] +pub enum ComboCmd { + /// List media combos. + List, + /// Create a media combo (chained providers per kind). + Create { + /// Combo kind: `tts|stt|embedding|image|search`. + #[arg(long)] + kind: String, + /// Combo display name. + #[arg(long)] + name: String, + /// Comma-separated list of provider ids. + #[arg(long, value_delimiter = ',')] + members: Vec, + }, +} + +#[derive(Debug, Clone, Subcommand)] +pub enum TtsCmd { + /// List available voices, optionally filtered to one provider. + Voices { + #[arg(long)] + provider: Option, + /// Optional language filter (e.g. `en`). + #[arg(long)] + lang: Option, + }, + /// Synthesize speech to stdout (raw audio bytes). + Speak { + #[arg(long)] + provider: String, + #[arg(long, default_value = "")] + model: String, + #[arg(long)] + voice: String, + /// Text input or `-` for stdin. + #[arg(long, default_value = "-")] + text: String, + /// Output format hint (mp3, wav, ...). Default: `mp3`. + #[arg(long, default_value = "mp3")] + format: String, + }, +} + +#[derive(Debug, Clone, Subcommand)] +pub enum SttCmd { + /// Transcribe an audio file. + Transcribe { + #[arg(long)] + provider: String, + #[arg(long, default_value = "")] + model: String, + /// Path to the audio file on disk (base64-encoded into the request). + #[arg(long)] + file: String, + }, +} + +#[derive(Debug, Clone, Subcommand)] +pub enum ImageCmd { + /// Generate an image and print the JSON response. + Generate { + #[arg(long)] + provider: String, + #[arg(long, default_value = "")] + model: String, + /// Prompt text or `-` for stdin. + #[arg(long, default_value = "-")] + prompt: String, + /// Image size (e.g. `1024x1024`). + #[arg(long, default_value = "1024x1024")] + size: String, + }, +} + +#[derive(Debug, Clone, Subcommand)] +pub enum WebCmd { + /// Fetch a URL via `/v1/web/fetch`. + Fetch { + /// Page URL to fetch (positional to avoid colliding with the + /// global `--url` server-override flag). + page: String, + #[arg(long)] + provider: String, + /// Output format: markdown (default), html, text. + #[arg(long, default_value = "markdown")] + format: String, + /// Truncate the output to N characters. + #[arg(long)] + max_chars: Option, + }, +} + +pub async fn run(cmd: MediaCmd, cfg: &ResolvedConfig, ctx: OutputCtx) -> anyhow::Result { + let rt = match require_runtime(cfg).await { + Ok(rt) => rt, + Err(e) => return rt_error_to_exit(ctx, e), + }; + match cmd { + MediaCmd::Providers { cmd } => match cmd { + ProvidersCmd::List { kind } => run_providers_list(&rt, ctx, kind).await, + ProvidersCmd::Add { + provider, + kind, + name, + from_file, + } => run_providers_add(&rt, ctx, provider, kind, name, from_file).await, + ProvidersCmd::Edit { id, from_file } => { + run_providers_edit(&rt, ctx, id, from_file).await + } + ProvidersCmd::Delete { id, kind } => run_providers_delete(&rt, ctx, id, kind).await, + }, + MediaCmd::Combo { cmd } => match cmd { + ComboCmd::List => run_combo_list(&rt, ctx).await, + ComboCmd::Create { + kind, + name, + members, + } => run_combo_create(&rt, ctx, kind, name, members).await, + }, + MediaCmd::Tts { cmd } => match cmd { + TtsCmd::Voices { provider, lang } => run_tts_voices(&rt, ctx, provider, lang).await, + TtsCmd::Speak { + provider, + model, + voice, + text, + format, + } => run_tts_speak(&rt, ctx, provider, model, voice, text, format).await, + }, + MediaCmd::Stt { cmd } => match cmd { + SttCmd::Transcribe { + provider, + model, + file, + } => run_stt_transcribe(&rt, ctx, provider, model, file).await, + }, + MediaCmd::Embed { + provider, + model, + text, + } => run_embed(&rt, ctx, provider, model, text).await, + MediaCmd::Image { cmd } => match cmd { + ImageCmd::Generate { + provider, + model, + prompt, + size, + } => run_image_generate(&rt, ctx, provider, model, prompt, size).await, + }, + MediaCmd::Search { provider, query } => run_search(&rt, ctx, provider, query).await, + MediaCmd::Web { cmd } => match cmd { + WebCmd::Fetch { + provider, + page, + format, + max_chars, + } => run_web_fetch(&rt, ctx, provider, page, format, max_chars).await, + }, + } +} + +async fn run_providers_list( + rt: &Runtime, + ctx: OutputCtx, + kind: Option, +) -> anyhow::Result { + let path = match &kind { + Some(k) => format!("/api/media-providers/{}", encode_kind(k)), + None => "/api/media-providers".to_string(), + }; + match rt.get_json(&path).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.providers.list", payload)?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&payload).unwrap_or_default(), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_providers_add( + rt: &Runtime, + ctx: OutputCtx, + provider: String, + kind: String, + name: String, + from_file: Option, +) -> anyhow::Result { + let mut body = if let Some(path) = from_file { + let raw = read_input(&path)?; + serde_json::from_str(raw.trim()).map_err(|e| anyhow::anyhow!("--from-file JSON: {e}"))? + } else { + Value::Object(Map::new()) + }; + if let Some(obj) = body.as_object_mut() { + obj.insert("provider".to_string(), Value::String(provider)); + obj.insert("mediaType".to_string(), Value::String(server_kind(&kind))); + obj.insert("name".to_string(), Value::String(name)); + } + match rt.post_json("/api/media-providers", &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.providers.add", payload)?; + } else { + humanln( + ctx, + format!( + "Added media provider id={}", + payload.get("id").and_then(Value::as_str).unwrap_or("?") + ), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_providers_edit( + rt: &Runtime, + ctx: OutputCtx, + id: String, + from_file: String, +) -> anyhow::Result { + let raw = read_input(&from_file)?; + let body: Value = + serde_json::from_str(raw.trim()).map_err(|e| anyhow::anyhow!("--from-file JSON: {e}"))?; + let path = format!("/api/media-providers/{}", urlencoding::encode(&id)); + match rt.put_json(&path, &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.providers.edit", payload)?; + } else { + humanln(ctx, format!("Edited media provider id={id}")); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_providers_delete( + rt: &Runtime, + ctx: OutputCtx, + id: String, + kind: String, +) -> anyhow::Result { + // The server route is `/api/media-providers/{kind}` with `?id=` or the + // kind path acts as the id when no kind is provided. We mirror the + // shape used by the dashboard delete button. + let path = format!( + "/api/media-providers/{}?id={}", + encode_kind(&kind), + urlencoding::encode(&id) + ); + match rt.delete_json(&path).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.providers.delete", payload)?; + } else { + humanln(ctx, format!("Deleted media provider id={id}")); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_combo_list(rt: &Runtime, ctx: OutputCtx) -> anyhow::Result { + // No dedicated list endpoint; use `/api/combos` filtered to media kinds. + match rt.get_json("/api/combos").await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.combo.list", payload)?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&payload).unwrap_or_default(), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_combo_create( + rt: &Runtime, + ctx: OutputCtx, + kind: String, + name: String, + members: Vec, +) -> anyhow::Result { + let body = json!({ + "name": name, + "kind": server_kind(&kind), + "providers": members, + "strategy": "fallback", + }); + match rt.post_json("/api/combos", &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.combo.create", payload)?; + } else { + humanln( + ctx, + format!( + "Created media combo id={}", + payload.get("id").and_then(Value::as_str).unwrap_or("?") + ), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_tts_voices( + rt: &Runtime, + ctx: OutputCtx, + provider: Option, + lang: Option, +) -> anyhow::Result { + let mut path = "/api/media-providers/tts/voices".to_string(); + let mut query = Vec::new(); + if let Some(p) = provider { + query.push(format!("provider={}", urlencoding::encode(&p))); + } + if let Some(l) = lang { + query.push(format!("lang={}", urlencoding::encode(&l))); + } + if !query.is_empty() { + path = format!("{}?{}", path, query.join("&")); + } + match rt.get_json(&path).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.tts.voices", payload)?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&payload).unwrap_or_default(), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_tts_speak( + rt: &Runtime, + ctx: OutputCtx, + provider: String, + model: String, + voice: String, + text: String, + format: String, +) -> anyhow::Result { + let input = read_input(&text)?; + let body = json!({ + "model": if model.is_empty() { provider.clone() } else { model.clone() }, + "voice": voice, + "input": input.trim_end(), + "response_format": format, + "provider": provider, + }); + match rt.post_json_bytes("/v1/audio/speech", &body).await { + Ok((bytes, content_type)) => { + if ctx.is_robot() { + emit_robot( + "openproxy.v1.media.tts.speak", + json!({ + "bytes": bytes.len(), + "content_type": content_type, + }), + )?; + } + // Write raw audio to stdout, even in --robot mode (the envelope + // gives metadata; the bytes are the payload). Agents reading + // both should split stdout into two channels. + let mut stdout = std::io::stdout().lock(); + stdout.write_all(&bytes)?; + stdout.flush()?; + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_stt_transcribe( + rt: &Runtime, + ctx: OutputCtx, + provider: String, + model: String, + file: String, +) -> anyhow::Result { + let raw = std::fs::read(&file).map_err(|e| anyhow::anyhow!("read --file {file}: {e}"))?; + use base64::{engine::general_purpose::STANDARD as B64, Engine}; + let encoded = B64.encode(&raw); + let body = json!({ + "provider": provider, + "model": if model.is_empty() { "whisper-1".to_string() } else { model }, + "file_b64": encoded, + "file_name": std::path::Path::new(&file) + .file_name() + .map(|s| s.to_string_lossy().to_string()) + .unwrap_or_else(|| "audio".to_string()), + }); + match rt.post_json("/v1/audio/transcriptions", &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.stt.transcribe", payload)?; + } else { + let text = payload.get("text").and_then(Value::as_str).unwrap_or(""); + println!("{text}"); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_embed( + rt: &Runtime, + ctx: OutputCtx, + provider: String, + model: String, + text: String, +) -> anyhow::Result { + let input = read_input(&text)?; + let body = json!({ + "provider": provider, + "model": if model.is_empty() { "text-embedding-3-small".to_string() } else { model }, + "input": input.trim_end(), + }); + match rt.post_json("/v1/embeddings", &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.embed", payload)?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&payload).unwrap_or_default(), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_image_generate( + rt: &Runtime, + ctx: OutputCtx, + provider: String, + model: String, + prompt: String, + size: String, +) -> anyhow::Result { + let prompt_text = read_input(&prompt)?; + let body = json!({ + "provider": provider, + "model": if model.is_empty() { "gpt-image-1".to_string() } else { model }, + "prompt": prompt_text.trim(), + "size": size, + }); + match rt.post_json("/v1/images/generations", &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.image.generate", payload)?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&payload).unwrap_or_default(), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_search( + rt: &Runtime, + ctx: OutputCtx, + provider: String, + query: String, +) -> anyhow::Result { + let q = read_input(&query)?; + let body = json!({ + "provider": provider, + "query": q.trim(), + }); + match rt.post_json("/v1/search", &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.search", payload)?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&payload).unwrap_or_default(), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_web_fetch( + rt: &Runtime, + ctx: OutputCtx, + provider: String, + url: String, + format: String, + max_chars: Option, +) -> anyhow::Result { + let mut body = json!({ + "provider": provider, + "url": url, + "format": format, + }); + if let Some(m) = max_chars { + if let Some(obj) = body.as_object_mut() { + obj.insert("maxCharacters".to_string(), json!(m)); + } + } + match rt.post_json("/v1/web/fetch", &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.media.web.fetch", payload)?; + } else { + let content = payload.get("content").and_then(Value::as_str).unwrap_or(""); + println!("{content}"); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +/// Map CLI-friendly kind names to what the server route expects. +fn server_kind(kind: &str) -> String { + match kind { + "embed" => "embedding".into(), + "web-search" | "websearch" => "webSearch".into(), + "web-fetch" | "webfetch" => "webFetch".into(), + other => other.to_string(), + } +} + +fn encode_kind(kind: &str) -> String { + urlencoding::encode(&server_kind(kind)).into_owned() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn server_kind_normalizes_aliases() { + assert_eq!(server_kind("embed"), "embedding"); + assert_eq!(server_kind("web-search"), "webSearch"); + assert_eq!(server_kind("tts"), "tts"); + } + + #[test] + fn encode_kind_keeps_camel_case() { + assert_eq!(encode_kind("webSearch"), "webSearch"); + } +} diff --git a/src/cli/mitm.rs b/src/cli/mitm.rs new file mode 100644 index 00000000..c4822d1a --- /dev/null +++ b/src/cli/mitm.rs @@ -0,0 +1,339 @@ +//! `openproxy mitm *` — manage the embedded MITM router (PLAN v3 mục 4.10). +//! +//! All commands talk to the running server's `/api/mitm/*` and +//! `/api/mitm-config` endpoints. The server is the source of truth for MITM +//! state (cert metadata, active routes); the CLI just adds the agent-friendly +//! envelope and bulk-apply ergonomics. + +use std::path::PathBuf; + +use clap::Subcommand; +use serde_json::{json, Map, Value}; + +use crate::cli::config::ResolvedConfig; +use crate::cli::output::{emit_error, emit_robot, humanln, OutputCtx}; +use crate::cli::runtime::{read_input, require_runtime, rt_error_to_exit, Runtime}; + +#[derive(Debug, Clone, Subcommand)] +pub enum MitmCmd { + /// Show whether MITM is enabled, route count, and cert status. + Status, + /// Activate every configured MITM route (server-side reload). + Start { + /// Currently ignored — the server uses the configured router port. + #[arg(long, default_value_t = 8080)] + port: u16, + }, + /// Deactivate all MITM routes (drops `mitm_alias`). + Stop, + /// Certificate management. + Cert { + #[command(subcommand)] + cmd: CertCmd, + }, + /// Read/write MITM configuration (routes + per-tool settings). + Config { + #[command(subcommand)] + cmd: ConfigCmd, + }, +} + +#[derive(Debug, Clone, Subcommand)] +pub enum CertCmd { + /// Generate a fresh CA certificate stored on the server. + Generate, + /// Print the local path where the CLI stores its cert copy. + Path, +} + +#[derive(Debug, Clone, Subcommand)] +pub enum ConfigCmd { + /// Fetch the current MITM config (router + routes + per-tool). + Get, + /// Set a single key on a single route. `key` is a dotted path under the + /// route, e.g. `routes.claude.upstreamUrl`. + Set { + #[arg(long)] + key: String, + #[arg(long)] + value: String, + }, + /// Bulk-apply a JSON document matching the PUT `/api/mitm-config` shape. + Apply { + /// Path to a JSON file, or `-` to read stdin. + #[arg(long = "from-file", default_value = "-")] + from_file: String, + }, +} + +pub async fn run(cmd: MitmCmd, cfg: &ResolvedConfig, ctx: OutputCtx) -> anyhow::Result { + let rt = match require_runtime(cfg).await { + Ok(rt) => rt, + Err(e) => return rt_error_to_exit(ctx, e), + }; + match cmd { + MitmCmd::Status => run_status(&rt, ctx).await, + MitmCmd::Start { .. } => run_start(&rt, ctx).await, + MitmCmd::Stop => run_stop(&rt, ctx).await, + MitmCmd::Cert { cmd } => match cmd { + CertCmd::Generate => run_cert_generate(&rt, ctx).await, + CertCmd::Path => run_cert_path(cfg, ctx), + }, + MitmCmd::Config { cmd } => match cmd { + ConfigCmd::Get => run_config_get(&rt, ctx).await, + ConfigCmd::Set { key, value } => run_config_set(&rt, ctx, key, value).await, + ConfigCmd::Apply { from_file } => run_config_apply(&rt, ctx, &from_file).await, + }, + } +} + +async fn run_status(rt: &Runtime, ctx: OutputCtx) -> anyhow::Result { + match rt.get_json("/api/mitm-config").await { + Ok(payload) => { + let enabled = payload + .get("enabled") + .and_then(Value::as_bool) + .unwrap_or(false); + let routes = payload + .get("routes") + .and_then(Value::as_object) + .map(|m| m.len()) + .unwrap_or(0); + let cert = payload + .get("certStatus") + .or_else(|| payload.get("cert_status")) + .cloned() + .unwrap_or(Value::Null); + if ctx.is_robot() { + emit_robot( + "openproxy.v1.mitm.status", + json!({ + "enabled": enabled, + "routes": routes, + "cert": cert, + }), + )?; + } else { + humanln( + ctx, + format!( + "MITM: {} ({} route{})", + if enabled { "enabled" } else { "disabled" }, + routes, + if routes == 1 { "" } else { "s" } + ), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_start(rt: &Runtime, ctx: OutputCtx) -> anyhow::Result { + match rt.post_empty("/api/mitm/start").await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.mitm.start", payload)?; + } else { + humanln(ctx, "MITM proxy started."); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_stop(rt: &Runtime, ctx: OutputCtx) -> anyhow::Result { + match rt.post_empty("/api/mitm/stop").await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.mitm.stop", payload)?; + } else { + humanln(ctx, "MITM proxy stopped."); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_cert_generate(rt: &Runtime, ctx: OutputCtx) -> anyhow::Result { + match rt.post_empty("/api/mitm/cert/generate").await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.mitm.cert.generate", payload)?; + } else { + let fp = payload + .get("fingerprint") + .and_then(Value::as_str) + .unwrap_or("?"); + humanln(ctx, format!("New MITM cert generated. fingerprint={fp}")); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +fn run_cert_path(cfg: &ResolvedConfig, ctx: OutputCtx) -> anyhow::Result { + let path = local_cert_path(&cfg.data_dir); + if ctx.is_robot() { + emit_robot( + "openproxy.v1.mitm.cert.path", + json!({"path": path.to_string_lossy()}), + )?; + } else { + println!("{}", path.display()); + } + Ok(0) +} + +/// Local path where `openproxy mitm cert` reads/writes the CA bundle copy. +/// The server owns the canonical cert; this is the agent-friendly export +/// location. +fn local_cert_path(data_dir: &std::path::Path) -> PathBuf { + data_dir.join("mitm-ca.pem") +} + +async fn run_config_get(rt: &Runtime, ctx: OutputCtx) -> anyhow::Result { + match rt.get_json("/api/mitm-config").await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.mitm.config", payload)?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&payload).unwrap_or_default(), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_config_set( + rt: &Runtime, + ctx: OutputCtx, + key: String, + value: String, +) -> anyhow::Result { + // The server PUT shape is `{routerBaseUrl?, routes?: {name: route}}`. + // We expose two kinds of writes: + // --key routerBaseUrl --value http://... (top-level) + // --key routes.. --value ... (per-route field) + let parts: Vec<&str> = key.split('.').collect(); + let body = if parts.len() == 1 && parts[0] == "routerBaseUrl" { + json!({"routerBaseUrl": value}) + } else if parts.len() == 3 && parts[0] == "routes" { + let name = parts[1].to_string(); + let field = parts[2].to_string(); + let mut entry = Map::new(); + // upstreamUrl is required by the server PUT — accept it as the + // primary write and forward any other field as-is. + if field == "upstreamUrl" { + entry.insert("upstreamUrl".to_string(), Value::String(value)); + } else { + // The server requires upstreamUrl on every route entry; load + // the current one first to avoid clearing it. + let current = rt.get_json("/api/mitm-config").await; + let upstream = match ¤t { + Ok(v) => v + .get("routes") + .and_then(|r| r.get(&name)) + .and_then(|r| r.get("upstreamUrl").or_else(|| r.get("upstream_url"))) + .and_then(Value::as_str) + .unwrap_or("") + .to_string(), + Err(_) => String::new(), + }; + entry.insert("upstreamUrl".to_string(), Value::String(upstream)); + let coerced = coerce_value(&field, &value); + entry.insert(field, coerced); + } + let mut routes = Map::new(); + routes.insert(name, Value::Object(entry)); + json!({ "routes": routes }) + } else { + return Ok(emit_error( + ctx, + "usage", + "key must be `routerBaseUrl` or `routes..`", + )?); + }; + + match rt.put_json("/api/mitm-config", &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.mitm.config.set", payload)?; + } else { + humanln(ctx, format!("Updated `{key}`.")); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_config_apply(rt: &Runtime, ctx: OutputCtx, from: &str) -> anyhow::Result { + let text = read_input(from)?; + let body: Value = serde_json::from_str(text.trim()).map_err(|e| { + anyhow::anyhow!("--from-file must be JSON matching the PUT /api/mitm-config shape: {e}") + })?; + match rt.put_json("/api/mitm-config", &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.mitm.config.apply", payload)?; + } else { + humanln(ctx, "MITM config applied."); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +/// Best-effort coercion of `--value` strings into typed JSON values for the +/// fields the server understands (`enabled`, `requestTransform`, +/// `responseTransform` are booleans). +fn coerce_value(field: &str, value: &str) -> Value { + let lower = value.to_ascii_lowercase(); + let is_bool_field = matches!( + field, + "enabled" + | "requestTransform" + | "responseTransform" + | "request_transform" + | "response_transform" + ); + if is_bool_field { + match lower.as_str() { + "true" | "yes" | "1" | "on" => return Value::Bool(true), + "false" | "no" | "0" | "off" => return Value::Bool(false), + _ => {} + } + } + Value::String(value.to_string()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn coerce_value_handles_booleans() { + assert_eq!(coerce_value("enabled", "true"), Value::Bool(true)); + assert_eq!(coerce_value("requestTransform", "0"), Value::Bool(false)); + assert_eq!( + coerce_value("pathPrefix", "/api"), + Value::String("/api".to_string()) + ); + } + + #[test] + fn local_cert_path_lives_in_data_dir() { + let p = local_cert_path(std::path::Path::new("/tmp/op")); + assert!(p.ends_with("mitm-ca.pem")); + } +} diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 2c285203..22636cc1 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -27,6 +27,8 @@ pub mod config; pub mod doctor; pub mod key_ext; pub mod logs; +pub mod media; +pub mod mitm; pub mod models; pub mod output; pub mod pool_ext; @@ -38,6 +40,9 @@ pub mod quota; pub mod runtime; pub mod schema; pub mod server; +pub mod tool; +pub mod translator; +pub mod tunnel_rt; pub mod usage; #[cfg(test)] @@ -165,6 +170,26 @@ pub enum Command { #[command(subcommand)] cmd: TunnelCmd, }, + /// Manage the in-process MITM router (PLAN v3 §4.10). + Mitm { + #[command(subcommand)] + cmd: mitm::MitmCmd, + }, + /// Manage CLI-tool integrations (claude, codex, copilot, ...). + Tool { + #[command(subcommand)] + cmd: tool::ToolCmd, + }, + /// Translate or pass requests through the format translator. + Translator { + #[command(subcommand)] + cmd: translator::TranslatorCmd, + }, + /// Media providers + TTS / STT / embed / image / web helpers. + Media { + #[command(subcommand)] + cmd: media::MediaCmd, + }, Route { /// Model ID (e.g. openai/gpt-4o-mini) #[arg(long)] @@ -472,14 +497,30 @@ pub enum PoolCmd { #[derive(Debug, Clone, Subcommand)] pub enum TunnelCmd { + /// Local-in-process tunnel start (M1 stub). Start { #[arg(long, default_value = "cloudflare")] provider: String, #[arg(long, default_value_t = 4623)] port: u16, }, + /// Local-in-process tunnel stop (M1 stub). Stop, + /// Local-in-process tunnel status (M1 stub). Status, + /// Enable a tunnel provider via the running server's `/api/tunnel/*`. + Enable { + provider: String, + #[arg(long)] + port: Option, + }, + /// Disable a tunnel provider via the running server's `/api/tunnel/*`. + Disable { provider: String }, + /// Tailscale-specific helpers (install / login / check / enable / disable). + Tailscale { + #[command(subcommand)] + cmd: tunnel_rt::TailscaleCmd, + }, } impl Cli { @@ -528,12 +569,53 @@ impl Cli { let rt = tokio::runtime::Runtime::new()?; rt.block_on(models::run(cmd, &db, ctx)) } - Command::Tunnel { cmd } => { - let db = rt.block_on(Db::load())?; - let db = std::sync::Arc::new(db); - let rt = tokio::runtime::Runtime::new()?; - rt.block_on(run_tunnel(cmd, db.clone(), ctx)) - } + Command::Tunnel { cmd } => match cmd { + TunnelCmd::Start { .. } | TunnelCmd::Stop | TunnelCmd::Status => { + let db = rt.block_on(Db::load())?; + let db = std::sync::Arc::new(db); + let rt = tokio::runtime::Runtime::new()?; + rt.block_on(run_tunnel(cmd, db.clone(), ctx)) + } + TunnelCmd::Enable { provider, port } => { + let resolved = config::ResolvedConfig::resolve(overrides)?; + let rt = tokio::runtime::Runtime::new()?; + let exit = rt.block_on(tunnel_rt::run( + tunnel_rt::TunnelRtCmd::Enable { provider, port }, + &resolved, + ctx, + ))?; + if exit != 0 { + std::process::exit(exit); + } + Ok(()) + } + TunnelCmd::Disable { provider } => { + let resolved = config::ResolvedConfig::resolve(overrides)?; + let rt = tokio::runtime::Runtime::new()?; + let exit = rt.block_on(tunnel_rt::run( + tunnel_rt::TunnelRtCmd::Disable { provider }, + &resolved, + ctx, + ))?; + if exit != 0 { + std::process::exit(exit); + } + Ok(()) + } + TunnelCmd::Tailscale { cmd } => { + let resolved = config::ResolvedConfig::resolve(overrides)?; + let rt = tokio::runtime::Runtime::new()?; + let exit = rt.block_on(tunnel_rt::run( + tunnel_rt::TunnelRtCmd::Tailscale { cmd }, + &resolved, + ctx, + ))?; + if exit != 0 { + std::process::exit(exit); + } + Ok(()) + } + }, Command::Route { model, combo, @@ -661,6 +743,38 @@ impl Cli { } Ok(()) } + Command::Mitm { cmd } => { + let resolved = config::ResolvedConfig::resolve(overrides)?; + let exit = rt.block_on(mitm::run(cmd, &resolved, ctx))?; + if exit != 0 { + std::process::exit(exit); + } + Ok(()) + } + Command::Tool { cmd } => { + let resolved = config::ResolvedConfig::resolve(overrides)?; + let exit = rt.block_on(tool::run(cmd, &resolved, ctx))?; + if exit != 0 { + std::process::exit(exit); + } + Ok(()) + } + Command::Translator { cmd } => { + let resolved = config::ResolvedConfig::resolve(overrides)?; + let exit = rt.block_on(translator::run(cmd, &resolved, ctx))?; + if exit != 0 { + std::process::exit(exit); + } + Ok(()) + } + Command::Media { cmd } => { + let resolved = config::ResolvedConfig::resolve(overrides)?; + let exit = rt.block_on(media::run(cmd, &resolved, ctx))?; + if exit != 0 { + std::process::exit(exit); + } + Ok(()) + } } } else { Ok(()) @@ -933,6 +1047,10 @@ pub async fn run_tunnel( output::humanln(ctx, "Tunnel is stopped"); } } + TunnelCmd::Enable { .. } | TunnelCmd::Disable { .. } | TunnelCmd::Tailscale { .. } => { + // Routed via `tunnel_rt` in `Cli::run`; unreachable here. + unreachable!("runtime tunnel commands dispatched separately"); + } } Ok(()) } diff --git a/src/cli/runtime.rs b/src/cli/runtime.rs index 777d06dd..1c7592ec 100644 --- a/src/cli/runtime.rs +++ b/src/cli/runtime.rs @@ -166,6 +166,63 @@ impl Runtime { decode_json(res).await } + /// PUT a JSON body and decode the JSON response. + pub async fn put_json(&self, path: &str, body: &Value) -> Result { + let res = self + .request(Method::PUT, path) + .json(body) + .send() + .await + .map_err(map_err)?; + decode_json(res).await + } + + /// PATCH a JSON body and decode the JSON response. + pub async fn patch_json(&self, path: &str, body: &Value) -> Result { + let res = self + .request(Method::PATCH, path) + .json(body) + .send() + .await + .map_err(map_err)?; + decode_json(res).await + } + + /// DELETE and decode the JSON response (empty body → `Value::Null`). + pub async fn delete_json(&self, path: &str) -> Result { + let res = self + .request(Method::DELETE, path) + .send() + .await + .map_err(map_err)?; + decode_json(res).await + } + + /// POST a JSON body and return the raw response bytes + content-type. + /// Used by `media tts speak`, which writes audio bytes to stdout. + pub async fn post_json_bytes( + &self, + path: &str, + body: &Value, + ) -> Result<(Bytes, Option), RuntimeError> { + let res = self + .request(Method::POST, path) + .json(body) + .send() + .await + .map_err(map_err)?; + if !res.status().is_success() { + return Err(map_http_status(res).await); + } + let ct = res + .headers() + .get(reqwest::header::CONTENT_TYPE) + .and_then(|v| v.to_str().ok()) + .map(str::to_string); + let bytes = res.bytes().await.map_err(map_err)?; + Ok((bytes, ct)) + } + /// Open an SSE / NDJSON stream against `path`. Each `data: ...` chunk is /// yielded as raw bytes; comment / ping lines (`: ...`) are skipped. /// The stream runs until the server closes or the caller drops it. diff --git a/src/cli/tool.rs b/src/cli/tool.rs new file mode 100644 index 00000000..21ee593f --- /dev/null +++ b/src/cli/tool.rs @@ -0,0 +1,500 @@ +//! `openproxy tool *` — manage CLI-tool integrations (claude, codex, copilot, +//! openclaw, hermes, cowork) via `/api/cli-tools/*` (PLAN v3 mục 4.12). +//! +//! `apply ` writes a tool's settings (model/api-key/base-url) by POSTing +//! to the per-tool `/-settings` endpoint. `revert ` resets it via +//! DELETE on the same endpoint. `execute` runs an arbitrary shell command via +//! the server. `run` invokes one of the named tools (`provider-list`, etc.). + +use clap::Subcommand; +use serde_json::{json, Map, Value}; + +use crate::cli::config::ResolvedConfig; +use crate::cli::output::{emit_error, emit_robot, humanln, OutputCtx}; +use crate::cli::runtime::{require_runtime, rt_error_to_exit, Runtime}; + +#[derive(Debug, Clone, Subcommand)] +pub enum ToolCmd { + /// List available built-in CLI tools (provider-list, key-list, ...). + List, + /// Show the saved settings for a per-tool integration. + Show { + /// One of `claude`, `codex`, `copilot`, `openclaw`, `hermes`, `cowork`. + name: String, + }, + /// Apply settings to a per-tool integration. Use `--dry-run` to preview + /// the JSON body without sending it. + Apply { + /// One of `claude`, `codex`, `copilot`, `openclaw`, `hermes`, `cowork`. + name: String, + /// Model id to set (passed as `model` to the server). + #[arg(long)] + model: Option, + /// API key to write (often the OpenProxy key, not a provider key). + #[arg(long, hide_env_values = true)] + api_key: Option, + /// Base URL (defaults to the running server's URL). + #[arg(long)] + endpoint: Option, + /// Print the JSON we would POST without actually sending it. + #[arg(long)] + dry_run: bool, + }, + /// Reset / revert a per-tool integration (DELETE the saved settings). + Revert { name: String }, + /// Execute an arbitrary command via `/api/cli-tools/execute`. + Execute { + /// Command and arguments. Pass `--` before args containing flags. + #[arg(trailing_var_arg = true, allow_hyphen_values = true)] + argv: Vec, + }, + /// Run a named built-in tool (`provider-list`, `key-list`, ...). + Run { + name: String, + /// Trailing arguments forwarded to the tool. + #[arg(trailing_var_arg = true, allow_hyphen_values = true)] + argv: Vec, + }, + /// Show help index from the server. + Doc, + /// Antigravity MITM helpers. + AntigravityMitm { + #[command(subcommand)] + cmd: AntigravityCmd, + }, +} + +#[derive(Debug, Clone, Subcommand)] +pub enum AntigravityCmd { + /// Enable antigravity MITM (optionally aliased). + Enable { + /// Optional alias name to register. + #[arg(long)] + alias: Option, + }, + /// Disable antigravity MITM (and any alias). + Disable { + /// Optional alias name to remove. + #[arg(long)] + alias: Option, + }, +} + +pub async fn run(cmd: ToolCmd, cfg: &ResolvedConfig, ctx: OutputCtx) -> anyhow::Result { + let rt = match require_runtime(cfg).await { + Ok(rt) => rt, + Err(e) => return rt_error_to_exit(ctx, e), + }; + match cmd { + ToolCmd::List => run_list(&rt, ctx).await, + ToolCmd::Show { name } => run_show(&rt, ctx, &name).await, + ToolCmd::Apply { + name, + model, + api_key, + endpoint, + dry_run, + } => run_apply(&rt, ctx, &name, model, api_key, endpoint, dry_run).await, + ToolCmd::Revert { name } => run_revert(&rt, ctx, &name).await, + ToolCmd::Execute { argv } => run_execute(&rt, ctx, argv).await, + ToolCmd::Run { name, argv } => run_run(&rt, ctx, &name, argv).await, + ToolCmd::Doc => run_help(&rt, ctx).await, + ToolCmd::AntigravityMitm { cmd } => match cmd { + AntigravityCmd::Enable { alias } => run_antigravity(&rt, ctx, true, alias).await, + AntigravityCmd::Disable { alias } => run_antigravity(&rt, ctx, false, alias).await, + }, + } +} + +/// Supported per-tool integration names. The server endpoint is +/// `/api/cli-tools/-settings`. +const SUPPORTED_TOOLS: &[&str] = &[ + "claude", "codex", "copilot", "openclaw", "hermes", "cowork", "opencode", "droid", +]; + +fn settings_path(name: &str) -> Option { + let lowered = name.to_ascii_lowercase(); + if SUPPORTED_TOOLS.contains(&lowered.as_str()) { + Some(format!("/api/cli-tools/{}-settings", lowered)) + } else { + None + } +} + +async fn run_list(rt: &Runtime, ctx: OutputCtx) -> anyhow::Result { + match rt.get_json("/api/cli-tools").await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.tool.list", payload)?; + } else { + let tools = payload + .get("tools") + .and_then(Value::as_array) + .cloned() + .unwrap_or_default(); + for tool in &tools { + humanln( + ctx, + format!( + "{:24} [{}] {}", + tool.get("name").and_then(Value::as_str).unwrap_or("?"), + tool.get("category").and_then(Value::as_str).unwrap_or("-"), + tool.get("description") + .and_then(Value::as_str) + .unwrap_or("") + ), + ); + } + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_show(rt: &Runtime, ctx: OutputCtx, name: &str) -> anyhow::Result { + let Some(path) = settings_path(name) else { + return Ok(emit_error( + ctx, + "usage", + &format!( + "unknown tool `{name}` — expected one of: {}", + SUPPORTED_TOOLS.join(", ") + ), + )?); + }; + match rt.get_json(&path).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.tool.show", payload)?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&payload).unwrap_or_default(), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_apply( + rt: &Runtime, + ctx: OutputCtx, + name: &str, + model: Option, + api_key: Option, + endpoint: Option, + dry_run: bool, +) -> anyhow::Result { + let Some(path) = settings_path(name) else { + return Ok(emit_error( + ctx, + "usage", + &format!( + "unknown tool `{name}` — expected one of: {}", + SUPPORTED_TOOLS.join(", ") + ), + )?); + }; + let body = build_apply_body(name, &model, api_key.as_deref(), endpoint.as_deref()); + + if dry_run { + let preview = json!({"path": path, "body": body}); + if ctx.is_robot() { + emit_robot("openproxy.v1.tool.apply.dry_run", preview)?; + } else { + humanln(ctx, "[dry-run] POST {path}".replace("{path}", &path)); + humanln(ctx, serde_json::to_string_pretty(&body).unwrap_or_default()); + } + return Ok(0); + } + + match rt.post_json(&path, &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.tool.apply", payload)?; + } else { + humanln(ctx, format!("Applied settings for `{name}`.")); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +/// Build the per-tool POST body. Each tool has a slightly different shape; +/// we cover the agent-friendly common case (model + api_key + base_url). +fn build_apply_body( + name: &str, + model: &Option, + api_key: Option<&str>, + endpoint: Option<&str>, +) -> Value { + let lower = name.to_ascii_lowercase(); + let default_url = "http://127.0.0.1:4623"; + let base_url = endpoint.unwrap_or(default_url); + match lower.as_str() { + "claude" => { + // /api/cli-tools/claude-settings expects {env: {ANTHROPIC_BASE_URL,...}}. + let mut env = Map::new(); + env.insert( + "ANTHROPIC_BASE_URL".to_string(), + Value::String(base_url.to_string()), + ); + if let Some(key) = api_key { + env.insert( + "ANTHROPIC_AUTH_TOKEN".to_string(), + Value::String(key.to_string()), + ); + } + if let Some(m) = model.as_deref() { + env.insert("ANTHROPIC_MODEL".to_string(), Value::String(m.to_string())); + } + json!({"env": Value::Object(env)}) + } + "codex" | "opencode" | "droid" => json!({ + "baseUrl": base_url, + "apiKey": api_key.unwrap_or(""), + "model": model.clone().unwrap_or_default(), + }), + "copilot" => json!({ + "baseUrl": base_url, + "apiKey": api_key, + "models": model + .as_ref() + .map(|m| vec![m.clone()]) + .unwrap_or_default(), + }), + // hermes / cowork / openclaw — `{baseUrl, apiKey, model|models}`. + _ => { + let mut obj = Map::new(); + obj.insert("baseUrl".to_string(), Value::String(base_url.to_string())); + if let Some(k) = api_key { + obj.insert("apiKey".to_string(), Value::String(k.to_string())); + } + if let Some(m) = model.as_deref() { + obj.insert("model".to_string(), Value::String(m.to_string())); + obj.insert( + "models".to_string(), + Value::Array(vec![Value::String(m.to_string())]), + ); + } + Value::Object(obj) + } + } +} + +async fn run_revert(rt: &Runtime, ctx: OutputCtx, name: &str) -> anyhow::Result { + let Some(path) = settings_path(name) else { + return Ok(emit_error(ctx, "usage", &format!("unknown tool `{name}`"))?); + }; + match rt.delete_json(&path).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.tool.revert", payload)?; + } else { + humanln(ctx, format!("Reverted `{name}` settings.")); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_execute(rt: &Runtime, ctx: OutputCtx, argv: Vec) -> anyhow::Result { + if argv.is_empty() { + return Ok(emit_error(ctx, "usage", "execute requires a command")?); + } + let mut iter = argv.into_iter(); + let command = iter.next().unwrap_or_default(); + let rest: Vec = iter.collect(); + let body = json!({ + "command": command, + "args": rest, + }); + match rt.post_json("/api/cli-tools/execute", &body).await { + Ok(payload) => emit_command_result(ctx, "openproxy.v1.tool.execute", payload), + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_run( + rt: &Runtime, + ctx: OutputCtx, + name: &str, + argv: Vec, +) -> anyhow::Result { + let path = format!("/api/cli-tools/run/{}", urlencoding::encode(name)); + let body = json!({ + "command": name, + "args": argv, + }); + match rt.post_json(&path, &body).await { + Ok(payload) => emit_command_result(ctx, "openproxy.v1.tool.run", payload), + Err(e) => rt_error_to_exit(ctx, e), + } +} + +fn emit_command_result(ctx: OutputCtx, schema: &str, payload: Value) -> anyhow::Result { + let exit = payload + .get("exit_code") + .or_else(|| payload.get("exitCode")) + .and_then(Value::as_i64) + .map(|c| c as i32) + .unwrap_or(0); + if ctx.is_robot() { + emit_robot(schema, payload)?; + } else { + let stdout = payload.get("stdout").and_then(Value::as_str).unwrap_or(""); + let stderr = payload.get("stderr").and_then(Value::as_str).unwrap_or(""); + if !stdout.is_empty() { + println!("{stdout}"); + } + if !stderr.is_empty() { + eprintln!("{stderr}"); + } + } + Ok(exit) +} + +async fn run_help(rt: &Runtime, ctx: OutputCtx) -> anyhow::Result { + match rt.get_json("/api/cli-tools/help").await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.tool.help", payload)?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&payload).unwrap_or_default(), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_antigravity( + rt: &Runtime, + ctx: OutputCtx, + enable: bool, + alias: Option, +) -> anyhow::Result { + let (alias_method, alias_body) = match (&alias, enable) { + (Some(name), true) => (Some("put"), json!({"alias": name})), + (Some(_), false) => (Some("delete"), Value::Null), + (None, _) => (None, Value::Null), + }; + + // First, toggle the underlying integration. + let core_result = if enable { + rt.post_empty("/api/cli-tools/antigravity-mitm").await + } else { + rt.delete_json("/api/cli-tools/antigravity-mitm").await + }; + let core_payload = match core_result { + Ok(v) => v, + Err(e) => return rt_error_to_exit(ctx, e), + }; + + // Then, optionally adjust the alias. + let alias_payload = match alias_method { + Some("put") => match rt + .put_json("/api/cli-tools/antigravity-mitm/alias", &alias_body) + .await + { + Ok(v) => Some(v), + Err(e) => return rt_error_to_exit(ctx, e), + }, + Some("delete") => match rt + .delete_json("/api/cli-tools/antigravity-mitm/alias") + .await + { + Ok(v) => Some(v), + Err(e) => return rt_error_to_exit(ctx, e), + }, + _ => None, + }; + + let combined = json!({ + "core": core_payload, + "alias": alias_payload, + "enabled": enable, + }); + let schema = if enable { + "openproxy.v1.tool.antigravity.enable" + } else { + "openproxy.v1.tool.antigravity.disable" + }; + if ctx.is_robot() { + emit_robot(schema, combined)?; + } else { + humanln( + ctx, + format!( + "Antigravity MITM {}{}.", + if enable { "enabled" } else { "disabled" }, + alias.map(|a| format!(" (alias `{a}`)")).unwrap_or_default(), + ), + ); + } + Ok(0) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn settings_path_known_tools() { + assert_eq!( + settings_path("Claude"), + Some("/api/cli-tools/claude-settings".to_string()) + ); + assert_eq!( + settings_path("hermes"), + Some("/api/cli-tools/hermes-settings".to_string()) + ); + assert!(settings_path("bogus").is_none()); + } + + #[test] + fn build_apply_body_claude_uses_env_block() { + let body = build_apply_body( + "claude", + &Some("sonnet-4".to_string()), + Some("op_key"), + Some("http://localhost:1234"), + ); + let env = body.get("env").unwrap(); + assert_eq!( + env.get("ANTHROPIC_BASE_URL").and_then(Value::as_str), + Some("http://localhost:1234") + ); + assert_eq!( + env.get("ANTHROPIC_AUTH_TOKEN").and_then(Value::as_str), + Some("op_key") + ); + } + + #[test] + fn build_apply_body_codex_uses_flat_shape() { + let body = build_apply_body( + "codex", + &Some("gpt-4o-mini".to_string()), + Some("op_key"), + None, + ); + assert_eq!( + body.get("model").and_then(Value::as_str), + Some("gpt-4o-mini") + ); + assert_eq!( + body.get("baseUrl").and_then(Value::as_str), + Some("http://127.0.0.1:4623") + ); + assert_eq!(body.get("apiKey").and_then(Value::as_str), Some("op_key")); + } +} diff --git a/src/cli/translator.rs b/src/cli/translator.rs new file mode 100644 index 00000000..f773bf29 --- /dev/null +++ b/src/cli/translator.rs @@ -0,0 +1,303 @@ +//! `openproxy translator *` — request translator pipeline (PLAN v3 mục 4.13). +//! +//! Talks to `/api/translator/*`. The server exposes a 3-step pipeline +//! (`/api/translator/translate?step=1..3`): step 1 detects source/target +//! format, step 2 translates source → OpenAI intermediate, step 3 translates +//! OpenAI intermediate → target plus builds the URL/headers. We expose: +//! +//! - `translator formats` → `/api/translator/formats` +//! - `translator translate --from --to ` → step 2 + step 3 combo. +//! - `translator send --provider

` → `/api/translator/send`. +//! - `translator preset list|save|load` — built on top of `/api/translator/save` +//! + `/api/translator/load` (translations dictionary). + +use clap::Subcommand; +use serde_json::{json, Map, Value}; + +use crate::cli::config::ResolvedConfig; +use crate::cli::output::{emit_error, emit_robot, humanln, OutputCtx}; +use crate::cli::runtime::{read_input, require_runtime, rt_error_to_exit, Runtime}; + +#[derive(Debug, Clone, Subcommand)] +pub enum TranslatorCmd { + /// List source/target formats the server supports. + Formats, + /// Translate a request body from one format to another. + Translate { + /// Source format (e.g. `openai`, `claude`, `gemini`). + #[arg(long)] + from: String, + /// Target format (e.g. `openai`, `claude`, `gemini`). + #[arg(long)] + to: String, + /// Path to a JSON request body, or `-` for stdin. + #[arg(long = "from-file", default_value = "-")] + from_file: String, + /// Model id to pass through to the translator. + #[arg(long, default_value = "")] + model: String, + }, + /// Send a translated request to a provider via `/api/translator/send`. + Send { + #[arg(long)] + provider: String, + /// Model id for the request. + #[arg(long, default_value = "")] + model: String, + /// Path to a JSON request body (already in target format), or `-`. + #[arg(long = "from-file", default_value = "-")] + from_file: String, + }, + /// Manage named translator presets persisted on the server. + Preset { + #[command(subcommand)] + cmd: PresetCmd, + }, +} + +#[derive(Debug, Clone, Subcommand)] +pub enum PresetCmd { + /// List saved presets. + List, + /// Save a preset under ``. Body is read from `--from-file` (`-` = stdin). + Save { + name: String, + #[arg(long = "from-file", default_value = "-")] + from_file: String, + }, + /// Load a preset by name. + Load { name: String }, +} + +pub async fn run(cmd: TranslatorCmd, cfg: &ResolvedConfig, ctx: OutputCtx) -> anyhow::Result { + let rt = match require_runtime(cfg).await { + Ok(rt) => rt, + Err(e) => return rt_error_to_exit(ctx, e), + }; + match cmd { + TranslatorCmd::Formats => run_formats(&rt, ctx).await, + TranslatorCmd::Translate { + from, + to, + from_file, + model, + } => run_translate(&rt, ctx, from, to, from_file, model).await, + TranslatorCmd::Send { + provider, + model, + from_file, + } => run_send(&rt, ctx, provider, model, from_file).await, + TranslatorCmd::Preset { cmd } => match cmd { + PresetCmd::List => run_preset_list(&rt, ctx).await, + PresetCmd::Save { name, from_file } => run_preset_save(&rt, ctx, name, from_file).await, + PresetCmd::Load { name } => run_preset_load(&rt, ctx, name).await, + }, + } +} + +async fn run_formats(rt: &Runtime, ctx: OutputCtx) -> anyhow::Result { + match rt.get_json("/api/translator/formats").await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.translator.formats", payload)?; + } else { + let arr = payload.as_array().cloned().unwrap_or_default(); + for f in &arr { + humanln( + ctx, + format!( + "{:20} {}", + f.get("id").and_then(Value::as_str).unwrap_or("?"), + f.get("description").and_then(Value::as_str).unwrap_or(""), + ), + ); + } + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_translate( + rt: &Runtime, + ctx: OutputCtx, + from: String, + to: String, + from_file: String, + model: String, +) -> anyhow::Result { + let raw = read_input(&from_file)?; + let mut body: Value = serde_json::from_str(raw.trim()) + .map_err(|e| anyhow::anyhow!("--from-file must be JSON: {e}"))?; + + // Ensure the model is on the body so the server can route it. + if !model.is_empty() { + if let Some(obj) = body.as_object_mut() { + obj.insert("model".to_string(), Value::String(model.clone())); + } + } + + // Step 2: translate → OpenAI intermediate. + let step2 = json!({"step": 2, "from": from, "to": "openai", "body": body}); + let openai_body = match rt.post_json("/api/translator/translate", &step2).await { + Ok(v) => v + .get("result") + .and_then(|r| r.get("body")) + .cloned() + .unwrap_or(Value::Null), + Err(e) => return rt_error_to_exit(ctx, e), + }; + + // Step 3: translate OpenAI intermediate → . + let step3 = json!({ + "step": 3, + "provider": &to, + "model": model, + "body": openai_body, + }); + match rt.post_json("/api/translator/translate", &step3).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.translator.translate", payload)?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&payload).unwrap_or_default(), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_send( + rt: &Runtime, + ctx: OutputCtx, + provider: String, + model: String, + from_file: String, +) -> anyhow::Result { + let raw = read_input(&from_file)?; + let body: Value = serde_json::from_str(raw.trim()) + .map_err(|e| anyhow::anyhow!("--from-file must be JSON: {e}"))?; + let send_body = json!({ + "provider": provider, + "model": model, + "body": body, + }); + match rt.post_json("/api/translator/send", &send_body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.translator.send", payload)?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&payload).unwrap_or_default(), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_preset_list(rt: &Runtime, ctx: OutputCtx) -> anyhow::Result { + // Server lacks a list endpoint, but `/api/translator/load` returns all + // translations when called with an empty body. + match rt.post_json("/api/translator/load", &json!({})).await { + Ok(payload) => { + let presets = payload + .get("translations") + .cloned() + .unwrap_or_else(|| json!({})); + if ctx.is_robot() { + emit_robot( + "openproxy.v1.translator.preset.list", + json!({"presets": presets}), + )?; + } else if let Some(obj) = presets.as_object() { + for name in obj.keys() { + humanln(ctx, name); + } + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_preset_save( + rt: &Runtime, + ctx: OutputCtx, + name: String, + from_file: String, +) -> anyhow::Result { + let raw = read_input(&from_file)?; + // The translator save endpoint stores opaque strings, so we encode the + // JSON document as a string keyed by `name`. + let mut map: Map = Map::new(); + map.insert(name.clone(), Value::String(raw.trim().to_string())); + let body = json!({"translations": Value::Object(map)}); + match rt.post_json("/api/translator/save", &body).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.translator.preset.save", payload)?; + } else { + humanln(ctx, format!("Saved preset `{name}`.")); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_preset_load(rt: &Runtime, ctx: OutputCtx, name: String) -> anyhow::Result { + match rt.post_json("/api/translator/load", &json!({})).await { + Ok(payload) => { + let presets = payload + .get("translations") + .cloned() + .unwrap_or_else(|| json!({})); + let raw = presets.get(&name).cloned(); + match raw { + Some(Value::String(s)) => { + let parsed: Value = serde_json::from_str(&s).unwrap_or(Value::String(s)); + if ctx.is_robot() { + emit_robot( + "openproxy.v1.translator.preset.load", + json!({"name": name, "body": parsed}), + )?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&parsed).unwrap_or_default(), + ); + } + Ok(0) + } + Some(other) => { + if ctx.is_robot() { + emit_robot( + "openproxy.v1.translator.preset.load", + json!({"name": name, "body": other}), + )?; + } else { + humanln( + ctx, + serde_json::to_string_pretty(&other).unwrap_or_default(), + ); + } + Ok(0) + } + None => Ok(emit_error( + ctx, + "not_found", + &format!("no preset named `{name}`"), + )?), + } + } + Err(e) => rt_error_to_exit(ctx, e), + } +} diff --git a/src/cli/tunnel_rt.rs b/src/cli/tunnel_rt.rs new file mode 100644 index 00000000..772d8f89 --- /dev/null +++ b/src/cli/tunnel_rt.rs @@ -0,0 +1,184 @@ +//! `openproxy tunnel *` — runtime tunnel commands (PLAN v3 mục 4.11). +//! +//! Distinct from the in-process `core::tunnel::TunnelManager` used by +//! `Command::Tunnel` (Start/Stop/Status), these commands hit the live +//! server's `/api/tunnel/*` endpoints so they can flip persisted settings +//! (cloudflare on/off, tailscale install/login/check, etc.). They are the +//! M5 extension of the M1 stub. + +use clap::Subcommand; +use serde_json::{json, Value}; + +use crate::cli::config::ResolvedConfig; +use crate::cli::output::{emit_error, emit_robot, humanln, OutputCtx}; +use crate::cli::runtime::{require_runtime, rt_error_to_exit, Runtime}; + +#[derive(Debug, Clone, Subcommand)] +pub enum TunnelRtCmd { + /// Turn on a tunnel provider (`cloudflare` is the default). + Enable { + /// Provider name. One of `cloudflare`, `tailscale`. + provider: String, + /// Bind port to forward (default: server port). + #[arg(long)] + port: Option, + }, + /// Turn off a tunnel provider. + Disable { + /// Provider name. One of `cloudflare`, `tailscale`. + provider: String, + }, + /// Tailscale-specific helpers. + Tailscale { + #[command(subcommand)] + cmd: TailscaleCmd, + }, +} + +#[derive(Debug, Clone, Subcommand)] +pub enum TailscaleCmd { + /// Install the tailscale binary (prints the script the server expects + /// the operator to run). + Install, + /// Open `tailscale login` on the server host. + Login, + /// Report whether tailscale is installed + logged in. + Check, + /// Enable the tailscale tunnel. + Enable { + /// Bind port to forward (default: server port). + #[arg(long)] + port: Option, + }, + /// Disable the tailscale tunnel. + Disable, +} + +pub async fn run(cmd: TunnelRtCmd, cfg: &ResolvedConfig, ctx: OutputCtx) -> anyhow::Result { + let rt = match require_runtime(cfg).await { + Ok(rt) => rt, + Err(e) => return rt_error_to_exit(ctx, e), + }; + match cmd { + TunnelRtCmd::Enable { provider, port } => { + run_enable_disable(&rt, ctx, &provider, port, true).await + } + TunnelRtCmd::Disable { provider } => { + run_enable_disable(&rt, ctx, &provider, None, false).await + } + TunnelRtCmd::Tailscale { cmd } => match cmd { + TailscaleCmd::Install => { + simple(&rt, ctx, "/api/tunnel/tailscale-install", "install").await + } + TailscaleCmd::Login => simple(&rt, ctx, "/api/tunnel/tailscale-login", "login").await, + TailscaleCmd::Check => run_check(&rt, ctx).await, + TailscaleCmd::Enable { port } => { + run_enable_disable(&rt, ctx, "tailscale", port, true).await + } + TailscaleCmd::Disable => run_enable_disable(&rt, ctx, "tailscale", None, false).await, + }, + } +} + +async fn run_enable_disable( + rt: &Runtime, + ctx: OutputCtx, + provider: &str, + port: Option, + enable: bool, +) -> anyhow::Result { + let provider = provider.to_ascii_lowercase(); + let (path, body) = match (provider.as_str(), enable) { + ("cloudflare", true) => ("/api/tunnel/enable", json!({"port": port})), + ("cloudflare", false) => ("/api/tunnel/disable", Value::Null), + ("tailscale", true) => ("/api/tunnel/tailscale-enable", json!({"port": port})), + ("tailscale", false) => ("/api/tunnel/tailscale-disable", Value::Null), + _ => { + return Ok(emit_error( + ctx, + "usage", + "provider must be `cloudflare` or `tailscale`", + )?); + } + }; + let result = if body.is_null() { + rt.post_empty(path).await + } else { + rt.post_json(path, &body).await + }; + match result { + Ok(payload) => { + let schema = if enable { + "openproxy.v1.tunnel.enable" + } else { + "openproxy.v1.tunnel.disable" + }; + if ctx.is_robot() { + emit_robot(schema, payload)?; + } else { + humanln( + ctx, + format!( + "Tunnel {} {} on `{provider}`.", + if enable { "enabled" } else { "disabled" }, + if enable { "started" } else { "stopped" }, + ), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn simple(rt: &Runtime, ctx: OutputCtx, path: &str, action: &str) -> anyhow::Result { + match rt.post_empty(path).await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot( + &format!("openproxy.v1.tunnel.tailscale.{}", action), + payload, + )?; + } else { + let msg = payload + .get("message") + .and_then(Value::as_str) + .unwrap_or("ok"); + humanln(ctx, msg); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} + +async fn run_check(rt: &Runtime, ctx: OutputCtx) -> anyhow::Result { + match rt.get_json("/api/tunnel/tailscale-check").await { + Ok(payload) => { + if ctx.is_robot() { + emit_robot("openproxy.v1.tunnel.tailscale.check", payload)?; + } else { + let installed = payload + .get("installed") + .and_then(Value::as_bool) + .unwrap_or(false); + let logged_in = payload + .get("loggedIn") + .and_then(Value::as_bool) + .unwrap_or(false); + let daemon = payload + .get("daemonRunning") + .and_then(Value::as_bool) + .unwrap_or(false); + humanln( + ctx, + format!( + "tailscale: installed={installed} daemon_running={daemon} logged_in={logged_in}" + ), + ); + } + Ok(0) + } + Err(e) => rt_error_to_exit(ctx, e), + } +} diff --git a/src/main.rs b/src/main.rs index b4d2572c..109d86ae 100644 --- a/src/main.rs +++ b/src/main.rs @@ -7,8 +7,9 @@ use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; use openproxy::cli::config::ResolvedConfig; use openproxy::cli::{ - chat as cli_chat, logs as cli_logs, provider_oauth, quota as cli_quota, usage as cli_usage, - AuthCmd, Cli, Command, ProviderCmd, SchemaCmd, ServerCmd, + chat as cli_chat, logs as cli_logs, media as cli_media, mitm as cli_mitm, provider_oauth, + quota as cli_quota, tool as cli_tool, translator as cli_translator, tunnel_rt as cli_tunnel_rt, + usage as cli_usage, AuthCmd, Cli, Command, ProviderCmd, SchemaCmd, ServerCmd, TunnelCmd, }; use openproxy::db::watcher::spawn_watcher; use openproxy::db::Db; @@ -65,12 +66,57 @@ async fn main() -> anyhow::Result<()> { openproxy::cli::models::run(cmd.clone(), &db, ctx).await?; return Ok(()); } - Command::Tunnel { cmd } => { - let db = Db::load().await?; - let db = Arc::new(db); - openproxy::cli::run_tunnel(cmd.clone(), db, ctx).await?; - return Ok(()); - } + Command::Tunnel { cmd } => match cmd { + TunnelCmd::Start { .. } | TunnelCmd::Stop | TunnelCmd::Status => { + let db = Db::load().await?; + let db = Arc::new(db); + openproxy::cli::run_tunnel(cmd.clone(), db, ctx).await?; + return Ok(()); + } + TunnelCmd::Enable { provider, port } => { + let exit = cli_tunnel_rt::run( + cli_tunnel_rt::TunnelRtCmd::Enable { + provider: provider.clone(), + port: *port, + }, + &resolved, + ctx, + ) + .await?; + if exit != 0 { + std::process::exit(exit); + } + return Ok(()); + } + TunnelCmd::Disable { provider } => { + let exit = cli_tunnel_rt::run( + cli_tunnel_rt::TunnelRtCmd::Disable { + provider: provider.clone(), + }, + &resolved, + ctx, + ) + .await?; + if exit != 0 { + std::process::exit(exit); + } + return Ok(()); + } + TunnelCmd::Tailscale { cmd: ts_cmd } => { + let exit = cli_tunnel_rt::run( + cli_tunnel_rt::TunnelRtCmd::Tailscale { + cmd: ts_cmd.clone(), + }, + &resolved, + ctx, + ) + .await?; + if exit != 0 { + std::process::exit(exit); + } + return Ok(()); + } + }, Command::Route { model, combo, @@ -232,6 +278,34 @@ async fn main() -> anyhow::Result<()> { } return Ok(()); } + Command::Mitm { cmd } => { + let exit = cli_mitm::run(cmd.clone(), &resolved, ctx).await?; + if exit != 0 { + std::process::exit(exit); + } + return Ok(()); + } + Command::Tool { cmd } => { + let exit = cli_tool::run(cmd.clone(), &resolved, ctx).await?; + if exit != 0 { + std::process::exit(exit); + } + return Ok(()); + } + Command::Translator { cmd } => { + let exit = cli_translator::run(cmd.clone(), &resolved, ctx).await?; + if exit != 0 { + std::process::exit(exit); + } + return Ok(()); + } + Command::Media { cmd } => { + let exit = cli_media::run(cmd.clone(), &resolved, ctx).await?; + if exit != 0 { + std::process::exit(exit); + } + return Ok(()); + } } } diff --git a/tests/cli_m5_robot_envelopes.rs b/tests/cli_m5_robot_envelopes.rs new file mode 100644 index 00000000..8f61afb6 --- /dev/null +++ b/tests/cli_m5_robot_envelopes.rs @@ -0,0 +1,470 @@ +//! M5 CLI integration tests — mitm / tunnel (runtime) / tool / translator / media. +//! +//! Exercises the `openproxy` binary against a wiremock server and asserts the +//! `--robot` JSON envelopes. We hit one happy-path per subcommand group; the +//! detailed handler tests live in unit tests inside each `cli/*.rs` module. + +#![cfg(test)] + +use assert_cmd::prelude::*; +use serde_json::{json, Value}; +use std::process::Command; +use wiremock::matchers::{method, path}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +const API_KEY: &str = "test-cli-key"; + +async fn boot_server() -> MockServer { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/api/health")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({"ok": true}))) + .mount(&server) + .await; + server +} + +fn op(server: &MockServer, args: &[&str]) -> std::process::Output { + Command::cargo_bin("openproxy") + .expect("locate openproxy binary") + .env("OPENPROXY_URL", server.uri()) + .env("OPENPROXY_API_KEY", API_KEY) + .env( + "DATA_DIR", + tempfile::tempdir() + .expect("tempdir") + .path() + .to_string_lossy() + .to_string(), + ) + .args(args) + .output() + .expect("run openproxy") +} + +fn op_stdin(server: &MockServer, args: &[&str], stdin: &str) -> std::process::Output { + use std::io::Write; + use std::process::Stdio; + + let mut child = Command::cargo_bin("openproxy") + .expect("locate openproxy binary") + .env("OPENPROXY_URL", server.uri()) + .env("OPENPROXY_API_KEY", API_KEY) + .env( + "DATA_DIR", + tempfile::tempdir() + .expect("tempdir") + .path() + .to_string_lossy() + .to_string(), + ) + .args(args) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .expect("spawn openproxy"); + child + .stdin + .as_mut() + .expect("stdin") + .write_all(stdin.as_bytes()) + .expect("write stdin"); + child.wait_with_output().expect("wait") +} + +fn parse_robot(stdout: &[u8]) -> Value { + let s = std::str::from_utf8(stdout).expect("utf8 stdout"); + serde_json::from_str(s.trim()).unwrap_or_else(|e| { + panic!("invalid robot envelope: {e}\nraw: {s}"); + }) +} + +// ─── mitm ─────────────────────────────────────────────────────────────────── + +#[tokio::test(flavor = "multi_thread")] +async fn mitm_status_emits_envelope() { + let server = boot_server().await; + Mock::given(method("GET")) + .and(path("/api/mitm-config")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "enabled": true, + "routes": {"claude": {"upstreamUrl": "https://api.anthropic.com"}}, + "certStatus": {"fingerprint": "abc"}, + }))) + .mount(&server) + .await; + + let out = op(&server, &["--robot", "mitm", "status"]); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.mitm.status"); + assert_eq!(env["ok"], true); + assert_eq!(env["data"]["enabled"], true); + assert_eq!(env["data"]["routes"], 1); +} + +#[tokio::test(flavor = "multi_thread")] +async fn mitm_start_emits_envelope() { + let server = boot_server().await; + Mock::given(method("POST")) + .and(path("/api/mitm/start")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({"started": true}))) + .mount(&server) + .await; + + let out = op(&server, &["--robot", "mitm", "start"]); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.mitm.start"); + assert_eq!(env["data"]["started"], true); +} + +#[tokio::test(flavor = "multi_thread")] +async fn mitm_cert_generate_emits_envelope() { + let server = boot_server().await; + Mock::given(method("POST")) + .and(path("/api/mitm/cert/generate")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({"fingerprint": "deadbeef"}))) + .mount(&server) + .await; + + let out = op(&server, &["--robot", "mitm", "cert", "generate"]); + assert!(out.status.success()); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.mitm.cert.generate"); + assert_eq!(env["data"]["fingerprint"], "deadbeef"); +} + +#[tokio::test(flavor = "multi_thread")] +async fn mitm_config_apply_reads_stdin() { + let server = boot_server().await; + Mock::given(method("PUT")) + .and(path("/api/mitm-config")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({"ok": true}))) + .mount(&server) + .await; + + let body = r#"{"routerBaseUrl":"http://router.example/"}"#; + let out = op_stdin( + &server, + &["--robot", "mitm", "config", "apply", "--from-file", "-"], + body, + ); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.mitm.config.apply"); +} + +// ─── tunnel (runtime) ─────────────────────────────────────────────────────── + +#[tokio::test(flavor = "multi_thread")] +async fn tunnel_enable_emits_envelope() { + let server = boot_server().await; + Mock::given(method("POST")) + .and(path("/api/tunnel/enable")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({"enabled": true}))) + .mount(&server) + .await; + + let out = op(&server, &["--robot", "tunnel", "enable", "cloudflare"]); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.tunnel.enable"); + assert_eq!(env["data"]["enabled"], true); +} + +#[tokio::test(flavor = "multi_thread")] +async fn tunnel_tailscale_check_emits_envelope() { + let server = boot_server().await; + Mock::given(method("GET")) + .and(path("/api/tunnel/tailscale-check")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "installed": true, + "loggedIn": false, + "daemonRunning": true, + }))) + .mount(&server) + .await; + + let out = op(&server, &["--robot", "tunnel", "tailscale", "check"]); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.tunnel.tailscale.check"); + assert_eq!(env["data"]["installed"], true); + assert_eq!(env["data"]["loggedIn"], false); +} + +// ─── tool ─────────────────────────────────────────────────────────────────── + +#[tokio::test(flavor = "multi_thread")] +async fn tool_list_emits_envelope() { + let server = boot_server().await; + Mock::given(method("GET")) + .and(path("/api/cli-tools")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "tools": [ + {"name": "provider-list", "description": "List providers", "category": "provider"}, + {"name": "key-list", "description": "List keys", "category": "key"}, + ], + }))) + .mount(&server) + .await; + + let out = op(&server, &["--robot", "tool", "list"]); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.tool.list"); + assert_eq!(env["data"]["tools"].as_array().map(Vec::len), Some(2)); +} + +#[tokio::test(flavor = "multi_thread")] +async fn tool_apply_dry_run_does_not_call_server() { + // No mock for POST /api/cli-tools/claude-settings — if the binary + // tries to hit it, wiremock will return 404 and we'll see a failure. + let server = boot_server().await; + let out = op( + &server, + &[ + "--robot", + "tool", + "apply", + "claude", + "--model", + "claude-sonnet-4", + "--api-key", + "op_test", + "--endpoint", + "http://router.example", + "--dry-run", + ], + ); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.tool.apply.dry_run"); + assert_eq!(env["data"]["path"], "/api/cli-tools/claude-settings"); + assert_eq!( + env["data"]["body"]["env"]["ANTHROPIC_BASE_URL"], + "http://router.example" + ); + assert_eq!( + env["data"]["body"]["env"]["ANTHROPIC_AUTH_TOKEN"], + "op_test" + ); +} + +#[tokio::test(flavor = "multi_thread")] +async fn tool_revert_calls_delete() { + let server = boot_server().await; + Mock::given(method("DELETE")) + .and(path("/api/cli-tools/codex-settings")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({"reverted": true}))) + .mount(&server) + .await; + + let out = op(&server, &["--robot", "tool", "revert", "codex"]); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.tool.revert"); +} + +// ─── translator ───────────────────────────────────────────────────────────── + +#[tokio::test(flavor = "multi_thread")] +async fn translator_formats_emits_envelope() { + let server = boot_server().await; + Mock::given(method("GET")) + .and(path("/api/translator/formats")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + {"id": "openai", "name": "OpenAI", "description": "Chat Completions"}, + {"id": "claude", "name": "Claude", "description": "Messages"}, + ]))) + .mount(&server) + .await; + + let out = op(&server, &["--robot", "translator", "formats"]); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.translator.formats"); + assert_eq!(env["data"].as_array().map(Vec::len), Some(2)); +} + +#[tokio::test(flavor = "multi_thread")] +async fn translator_preset_save_posts_to_translator_save() { + let server = boot_server().await; + Mock::given(method("POST")) + .and(path("/api/translator/save")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({"success": true}))) + .mount(&server) + .await; + + let out = op_stdin( + &server, + &["--robot", "translator", "preset", "save", "my-preset"], + r#"{"foo": "bar"}"#, + ); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.translator.preset.save"); +} + +// ─── media ────────────────────────────────────────────────────────────────── + +#[tokio::test(flavor = "multi_thread")] +async fn media_providers_list_emits_envelope() { + let server = boot_server().await; + Mock::given(method("GET")) + .and(path("/api/media-providers")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "tts": [], + "stt": [], + "embedding": [], + "image": [], + "search": [], + }))) + .mount(&server) + .await; + + let out = op(&server, &["--robot", "media", "providers", "list"]); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.media.providers.list"); +} + +#[tokio::test(flavor = "multi_thread")] +async fn media_tts_voices_emits_envelope() { + let server = boot_server().await; + Mock::given(method("GET")) + .and(path("/api/media-providers/tts/voices")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "voices": [{"id": "alloy", "name": "Alloy"}], + }))) + .mount(&server) + .await; + + let out = op(&server, &["--robot", "media", "tts", "voices"]); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.media.tts.voices"); +} + +#[tokio::test(flavor = "multi_thread")] +async fn media_tts_speak_writes_bytes_to_stdout() { + let server = boot_server().await; + let audio_bytes = b"FAKE_MP3_BYTES_PAYLOAD"; + Mock::given(method("POST")) + .and(path("/v1/audio/speech")) + .respond_with( + ResponseTemplate::new(200) + .set_body_bytes(audio_bytes.to_vec()) + .insert_header("content-type", "audio/mpeg"), + ) + .mount(&server) + .await; + + let out = op_stdin( + &server, + &[ + "media", + "tts", + "speak", + "--provider", + "elevenlabs", + "--model", + "eleven_turbo_v2", + "--voice", + "alice", + ], + "Hello world", + ); + assert!( + out.status.success(), + "stderr: {}", + String::from_utf8_lossy(&out.stderr) + ); + assert_eq!(&out.stdout[..], &audio_bytes[..]); +} + +#[tokio::test(flavor = "multi_thread")] +async fn media_web_fetch_emits_envelope() { + let server = boot_server().await; + Mock::given(method("POST")) + .and(path("/v1/web/fetch")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "content": "# Page title\n", + "format": "markdown", + }))) + .mount(&server) + .await; + + let out = op( + &server, + &[ + "--robot", + "media", + "web", + "fetch", + "https://example.com", + "--provider", + "firecrawl", + ], + ); + assert!( + out.status.success(), + "status: {:?}\nstdout: {}\nstderr: {}", + out.status.code(), + String::from_utf8_lossy(&out.stdout), + String::from_utf8_lossy(&out.stderr) + ); + let env = parse_robot(&out.stdout); + assert_eq!(env["schema"], "openproxy.v1.media.web.fetch"); + assert_eq!(env["data"]["content"], "# Page title\n"); +}