Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions src-tauri/src/agents/runner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1886,6 +1886,22 @@ fn openai_tool_definitions() -> Vec<Value> {
}
}
}),
json!({
"type": "function",
"function": {
"name": "patch_file",
"description": "Replace a unique string in a file with new content. Use this instead of write_file when you only need to change part of a file. old_string must match exactly one location in the file.",
"parameters": {
"type": "object",
"properties": {
"path": { "type": "string", "description": "File path to patch" },
"old_string": { "type": "string", "description": "Exact text to find (must be unique in file)" },
"new_string": { "type": "string", "description": "Replacement text" }
},
"required": ["path", "old_string", "new_string"]
}
}
}),
json!({
"type": "function",
"function": {
Expand Down Expand Up @@ -1968,6 +1984,7 @@ fn map_tool_name(name: &str) -> Option<ToolName> {
match name {
"read_file" => Some(ToolName::ReadFile),
"write_file" => Some(ToolName::WriteFile),
"patch_file" => Some(ToolName::PatchFile),
"list_dir" => Some(ToolName::ListDir),
"search_files" => Some(ToolName::SearchFiles),
"run_command" => Some(ToolName::RunCommand),
Expand Down
171 changes: 166 additions & 5 deletions src-tauri/src/tools/executor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ fn sensitive_globset() -> &'static GlobSet {
pub enum ToolName {
ReadFile,
WriteFile,
PatchFile,
ListDir,
SearchFiles,
RunCommand,
Expand All @@ -65,6 +66,25 @@ pub struct ToolResult {
pub is_error: bool,
}

/// Directories to always skip during traversal (common noise).
const SKIP_DIRS: &[&str] = &[
"node_modules",
".git",
"target",
"dist",
"build",
".next",
"__pycache__",
".venv",
"venv",
".turbo",
".cache",
];

fn should_skip_dir(name: &str) -> bool {
SKIP_DIRS.contains(&name)
}

#[derive(Debug, Clone)]
pub struct ToolExecutor {
pub sandbox: PathBuf,
Expand Down Expand Up @@ -139,6 +159,7 @@ impl ToolExecutor {
let result = match tool {
ToolName::ReadFile => self.read_file(&call.input).await,
ToolName::WriteFile => self.write_file(&call.input).await,
ToolName::PatchFile => self.patch_file(&call.input).await,
ToolName::ListDir => self.list_dir(&call.input).await,
ToolName::SearchFiles => self.search_files(&call.input).await,
ToolName::RunCommand => self.run_command(&call.input).await,
Expand Down Expand Up @@ -190,6 +211,50 @@ impl ToolExecutor {
Ok(format!("Written {} bytes to {}", content.len(), path_str))
}

async fn patch_file(&self, input: &serde_json::Value) -> AppResult<String> {
let path_str = input["path"]
.as_str()
.ok_or_else(|| AppError::Validation("Missing 'path' field".to_string()))?;
let old_string = input["old_string"]
.as_str()
.ok_or_else(|| AppError::Validation("Missing 'old_string' field".to_string()))?;
let new_string = input["new_string"]
.as_str()
.ok_or_else(|| AppError::Validation("Missing 'new_string' field".to_string()))?;

let safe_path = self.validate_path(path_str)?;

let content = tokio::fs::read_to_string(&safe_path)
.await
.map_err(AppError::from)?;

let occurrences = content.matches(old_string).count();
if occurrences == 0 {
return Err(AppError::Validation(format!(
"old_string not found in {}",
path_str
)));
}
if occurrences > 1 {
return Err(AppError::Validation(format!(
"old_string found {} times in {} — must be unique. Add more context lines to disambiguate.",
occurrences, path_str
)));
}

let new_content = content.replacen(old_string, new_string, 1);
tokio::fs::write(&safe_path, &new_content)
.await
.map_err(AppError::from)?;

let old_lines = old_string.lines().count();
let new_lines = new_string.lines().count();
Ok(format!(
"Patched {}: replaced {} lines with {} lines",
path_str, old_lines, new_lines
))
}

async fn list_dir(&self, input: &serde_json::Value) -> AppResult<String> {
let Some(path_str) = input["path"].as_str() else {
return Err(AppError::Validation("Missing 'path' field".to_string()));
Expand All @@ -198,7 +263,19 @@ impl ToolExecutor {
let sandbox_canonical = self.sandbox.canonicalize().map_err(AppError::from)?;

let mut entries = Vec::new();
for entry_result in WalkDir::new(&safe_path).max_depth(3) {
let walker = WalkDir::new(&safe_path)
.max_depth(3)
.into_iter()
.filter_entry(|e| {
if e.file_type().is_dir() {
if let Some(name) = e.file_name().to_str() {
return !should_skip_dir(name);
}
}
true
});

for entry_result in walker {
let entry = match entry_result {
Ok(entry) => entry,
Err(_) => continue,
Expand Down Expand Up @@ -239,7 +316,18 @@ impl ToolExecutor {
.map_err(|error| AppError::Validation(format!("Invalid regex: {error}")))?;

let mut results = Vec::new();
for entry_result in WalkDir::new(&safe_path) {
let walker = WalkDir::new(&safe_path)
.into_iter()
.filter_entry(|e| {
if e.file_type().is_dir() {
if let Some(name) = e.file_name().to_str() {
return !should_skip_dir(name);
}
}
true
});

for entry_result in walker {
let entry = match entry_result {
Ok(entry) => entry,
Err(_) => continue,
Expand Down Expand Up @@ -309,11 +397,38 @@ impl ToolExecutor {

let child = command.spawn().map_err(AppError::from)?;

const MAX_OUTPUT_BYTES: usize = 256 * 1024; // 256KB limit

match tokio::time::timeout(self.command_timeout, child.wait_with_output()).await {
Ok(Ok(output)) => {
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
let stdout_raw = &output.stdout;
let stderr_raw = &output.stderr;
let exit_code = output.status.code().unwrap_or(-1);

let stdout = if stdout_raw.len() > MAX_OUTPUT_BYTES {
let truncated = String::from_utf8_lossy(&stdout_raw[..MAX_OUTPUT_BYTES]);
format!(
"{}\n\n... [truncated: {} total bytes, showing first {}]",
truncated,
stdout_raw.len(),
MAX_OUTPUT_BYTES
)
} else {
String::from_utf8_lossy(stdout_raw).to_string()
};

let stderr = if stderr_raw.len() > MAX_OUTPUT_BYTES {
let truncated = String::from_utf8_lossy(&stderr_raw[..MAX_OUTPUT_BYTES]);
format!(
"{}\n\n... [truncated: {} total bytes, showing first {}]",
truncated,
stderr_raw.len(),
MAX_OUTPUT_BYTES
)
} else {
String::from_utf8_lossy(stderr_raw).to_string()
};

Ok(format!(
"exit_code: {}\nstdout:\n{}\nstderr:\n{}",
exit_code, stdout, stderr
Expand Down Expand Up @@ -344,7 +459,53 @@ impl ToolExecutor {
.await
.map_err(AppError::from)?;

response.text().await.map_err(AppError::from)
let body = response.text().await.map_err(AppError::from)?;

// Parse DuckDuckGo response into readable format
let parsed: Value = serde_json::from_str(&body).unwrap_or_default();
let mut results = Vec::new();

// Abstract/instant answer
if let Some(abstract_text) = parsed["AbstractText"].as_str() {
if !abstract_text.is_empty() {
results.push(format!("## Summary\n{}", abstract_text));
if let Some(url) = parsed["AbstractURL"].as_str() {
results.push(format!("Source: {}", url));
}
}
}

// Answer (direct)
if let Some(answer) = parsed["Answer"].as_str() {
if !answer.is_empty() {
results.push(format!("## Answer\n{}", answer));
}
}

// Related topics
if let Some(topics) = parsed["RelatedTopics"].as_array() {
let topic_entries: Vec<String> = topics
.iter()
.filter_map(|t| {
let text = t["Text"].as_str()?;
let url = t["FirstURL"].as_str().unwrap_or("");
if text.is_empty() {
return None;
}
Some(format!("- {} ({})", text, url))
})
.take(8)
.collect();
if !topic_entries.is_empty() {
results.push(format!("## Related\n{}", topic_entries.join("\n")));
}
}

if results.is_empty() {
Ok(format!("No results found for: {}", query))
} else {
Ok(results.join("\n\n"))
}
}

pub fn requires_permission(&self, path: &str) -> bool {
Expand Down
Loading