Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
//! CLI argument parsing for sensing-server
//!
//! Extracted from main.rs as part of ADR-051 Phase 1

use clap::Parser;
use std::path::PathBuf;

/// WiFi-DensePose sensing server
#[derive(Parser, Debug, Clone)]
#[command(name = "sensing-server", about = "WiFi-DensePose sensing server")]
pub struct Args {
/// HTTP port for UI and REST API
#[arg(long, default_value = "8080")]
pub http_port: u16,

/// WebSocket port for sensing stream
#[arg(long, default_value = "8765")]
pub ws_port: u16,

/// UDP port for ESP32 CSI frames
#[arg(long, default_value = "5005")]
pub udp_port: u16,

/// Path to UI static files
#[arg(long, default_value = "../../ui")]
pub ui_path: PathBuf,

/// Tick interval in milliseconds (default 100 ms = 10 fps for smooth pose animation)
#[arg(long, default_value = "100")]
pub tick_ms: u64,

/// Bind address (default 127.0.0.1; set to 0.0.0.0 for network access)
#[arg(long, default_value = "127.0.0.1", env = "SENSING_BIND_ADDR")]
pub bind_addr: String,

/// Data source: auto, wifi, esp32, simulate
#[arg(long, default_value = "auto")]
pub source: String,

/// Run vital sign detection benchmark (1000 frames) and exit
#[arg(long)]
pub benchmark: bool,

/// Load model config from an RVF container at startup
#[arg(long, value_name = "PATH")]
pub load_rvf: Option<PathBuf>,

/// Save current model state as an RVF container on shutdown
#[arg(long, value_name = "PATH")]
pub save_rvf: Option<PathBuf>,

/// Load a trained .rvf model for inference
#[arg(long, value_name = "PATH")]
pub model: Option<PathBuf>,

/// Training data directory
#[arg(long, value_name = "DIR")]
pub train_data: Option<PathBuf>,

/// Validate model on test set and exit
#[arg(long)]
pub validate: bool,

/// Logging verbosity (v=info, vv=debug, vvv=trace)
#[arg(short, long, action = clap::ArgAction::Count)]
pub verbose: u8,

/// Disable vital sign detection
#[arg(long)]
pub no_vitals: bool,

/// Enable trainer API endpoints
#[arg(long)]
pub enable_trainer: bool,

/// Embedding model for semantic search
#[arg(long, value_name = "PATH")]
pub embedding_model: Option<PathBuf>,

/// Number of recent CSI frames to keep for breathing detection
#[arg(long, default_value = "300")]
pub vitals_history: usize,

/// CSI frame rate for breathing detection (Hz)
#[arg(long, default_value = "100.0")]
pub vitals_fps: f64,
}

impl Args {
/// Parse CLI arguments
pub fn parse_args() -> Self {
Self::parse()
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
//! Application state for sensing-server
//!
//! Extracted from main.rs as part of ADR-051 Phase 1
//!
//! NOTE: This is a transitional module. The full AppStateInner (37 fields)
//! will be decomposed further in subsequent phases:
//! - Phase 2: Extract vitals state
//! - Phase 3: Extract recording state
//! - Phase 4: Extract training state
//! - Phase 5: Extract model management

use std::collections::VecDeque;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Instant;
use tokio::sync::{broadcast, RwLock};

use crate::adaptive_classifier::AdaptiveModel;
use crate::rvf_container::{RvfContainerInfo, ProgressiveLoader};
use crate::vital_signs::{VitalSignDetector, VitalSigns};

// Re-export types that will be moved in later phases
pub use super::{ClassificationInfo, FeatureInfo, SensingUpdate, Esp32VitalsPacket, WasmOutputPacket};

/// Number of frames retained in `frame_history` for temporal analysis.
/// At 500 ms ticks this covers ~50 seconds; at 100 ms ticks ~10 seconds.
pub const FRAME_HISTORY_CAPACITY: usize = 100;

/// Application state (transitional - will be decomposed further)
///
/// See ADR-051 for the full decomposition plan.
pub struct AppStateInner {
// Core sensing state
pub latest_update: Option<SensingUpdate>,
pub rssi_history: VecDeque<f64>,
pub frame_history: VecDeque<Vec<f64>>,
pub tick: u64,
pub source: String,
pub tx: broadcast::Sender<String>,
pub total_detections: u64,
pub start_time: Instant,

// Vital signs state (Phase 2: extract to VitalsState)
pub vital_detector: VitalSignDetector,
pub latest_vitals: VitalSigns,
pub smoothed_hr: f64,
pub smoothed_br: f64,
pub smoothed_hr_conf: f64,
pub smoothed_br_conf: f64,
pub hr_buffer: VecDeque<f64>,
pub br_buffer: VecDeque<f64>,
pub edge_vitals: Option<Esp32VitalsPacket>,
pub latest_wasm_events: Option<WasmOutputPacket>,

// Model state (Phase 5: extract to ModelState)
pub rvf_info: Option<RvfContainerInfo>,
pub save_rvf_path: Option<PathBuf>,
pub progressive_loader: Option<ProgressiveLoader>,
pub active_sona_profile: Option<String>,
pub model_loaded: bool,
pub discovered_models: Vec<serde_json::Value>,
pub active_model_id: Option<String>,
pub adaptive_model: Option<AdaptiveModel>,

// Motion classification state
pub smoothed_person_score: f64,
pub smoothed_motion: f64,
pub current_motion_level: String,
pub debounce_counter: u32,
pub debounce_candidate: String,
pub baseline_motion: f64,
pub baseline_frames: u64,

// Recording state (Phase 3: extract to RecordingState)
pub recordings: Vec<serde_json::Value>,
pub recording_active: bool,
pub recording_start_time: Option<Instant>,
pub recording_current_id: Option<String>,
pub recording_stop_tx: Option<tokio::sync::watch::Sender<bool>>,

// Training state (Phase 4: extract to TrainingState)
pub training_status: String,
pub training_config: Option<serde_json::Value>,
}

/// Shared state wrapper
pub type SharedState = Arc<RwLock<AppStateInner>>;

impl AppStateInner {
/// Create a new state instance with default values
pub fn new(tx: broadcast::Sender<String>, source: String) -> Self {
Self {
latest_update: None,
rssi_history: VecDeque::with_capacity(1000),
frame_history: VecDeque::with_capacity(FRAME_HISTORY_CAPACITY),
tick: 0,
source,
tx,
total_detections: 0,
start_time: Instant::now(),
vital_detector: VitalSignDetector::default(),
latest_vitals: VitalSigns::default(),
smoothed_hr: 0.0,
smoothed_br: 0.0,
smoothed_hr_conf: 0.0,
smoothed_br_conf: 0.0,
hr_buffer: VecDeque::with_capacity(15),
br_buffer: VecDeque::with_capacity(15),
edge_vitals: None,
latest_wasm_events: None,
rvf_info: None,
save_rvf_path: None,
progressive_loader: None,
active_sona_profile: None,
model_loaded: false,
discovered_models: Vec::new(),
active_model_id: None,
adaptive_model: None,
smoothed_person_score: 0.0,
smoothed_motion: 0.0,
current_motion_level: "unknown".to_string(),
debounce_counter: 0,
debounce_candidate: "unknown".to_string(),
baseline_motion: 0.0,
baseline_frames: 0,
recordings: Vec::new(),
recording_active: false,
recording_start_time: None,
recording_current_id: None,
recording_stop_tx: None,
training_status: "idle".to_string(),
training_config: None,
}
}
}