Skip to content

Commit 97166ea

Browse files
committed
feat(http): add timeouts and identifying user-agent to all provider HTTP calls
Every outbound HTTP client (chat, models, titles, tools, agent runner) was being created with reqwest::Client::new() — no timeouts of any kind. A stalled provider, dead TCP connection, or silently rate-limited upstream would freeze the agent loop indefinitely with no error path. Adds a small services::http_client module with two shared builders: - streaming_client(): used for SSE chat completions - 10s connect timeout - 60s per-read timeout (catches dead streams between chunks) - 600s total ceiling (long completions still finish) - request_client(): used for non-streaming calls (model lists, titles) - 10s connect timeout - 120s total timeout Both clients carry a 'enowX-Coder/<version>' User-Agent so providers, proxies, and the user's own firewall can attribute traffic correctly (several providers reject empty UAs). Migrates all 9 call sites: - services/chat_service.rs (5 sites) - services/model_service.rs (2 sites) - agents/runner.rs (2 sites) - tools/executor.rs (1 site, web_search) No behavior change for healthy paths — only adds bounded failure on unhealthy ones.
1 parent aa982f3 commit 97166ea

6 files changed

Lines changed: 101 additions & 12 deletions

File tree

src-tauri/src/agents/runner.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1071,7 +1071,7 @@ impl AgentRunner {
10711071
"stream": true,
10721072
});
10731073

1074-
let client = reqwest::Client::new();
1074+
let client = crate::services::http_client::streaming_client()?;
10751075
let mut request = client
10761076
.post(endpoint)
10771077
.header(CONTENT_TYPE, "application/json")
@@ -1113,7 +1113,7 @@ impl AgentRunner {
11131113
payload["system"] = Value::String(system_prompt);
11141114
}
11151115

1116-
let client = reqwest::Client::new();
1116+
let client = crate::services::http_client::streaming_client()?;
11171117
let mut request = client
11181118
.post("https://api.anthropic.com/v1/messages")
11191119
.header(CONTENT_TYPE, "application/json")

src-tauri/src/services/chat_service.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ use crate::{
1212
models::Message,
1313
};
1414

15-
use super::{now_rfc3339, provider_service};
15+
use super::{http_client, now_rfc3339, provider_service};
1616

1717
pub async fn get_messages(db: &SqlitePool, session_id: &str) -> AppResult<Vec<Message>> {
1818
let messages = sqlx::query_as::<_, Message>(
@@ -163,7 +163,7 @@ async fn send_openai_compatible(
163163
on_token: &Channel<String>,
164164
cancel_token: &CancellationToken,
165165
) -> AppResult<String> {
166-
let client = reqwest::Client::new();
166+
let client = http_client::streaming_client()?;
167167
let endpoint = format!("{}/chat/completions", base_url.trim_end_matches('/'));
168168

169169
let messages: Vec<Value> = history
@@ -231,7 +231,7 @@ async fn send_anthropic(
231231
on_token: &Channel<String>,
232232
cancel_token: &CancellationToken,
233233
) -> AppResult<String> {
234-
let client = reqwest::Client::new();
234+
let client = http_client::streaming_client()?;
235235

236236
let (system_msgs, chat_msgs): (Vec<_>, Vec<_>) =
237237
history.iter().partition(|m| m.role == "system");
@@ -525,7 +525,7 @@ async fn generate_title_openai(
525525
model: &str,
526526
messages: &[Value],
527527
) -> AppResult<String> {
528-
let client = reqwest::Client::new();
528+
let client = http_client::request_client()?;
529529
let endpoint = format!(
530530
"{}/chat/completions",
531531
provider.base_url.trim_end_matches('/')
@@ -589,7 +589,7 @@ async fn generate_title_anthropic(
589589
"temperature": 0.3,
590590
});
591591

592-
let client = reqwest::Client::new();
592+
let client = http_client::request_client()?;
593593
let mut request = client
594594
.post("https://api.anthropic.com/v1/messages")
595595
.header(CONTENT_TYPE, "application/json")
@@ -697,7 +697,7 @@ pub async fn generate_excalidraw(
697697

698698
messages.push(serde_json::json!({"role": "user", "content": prompt}));
699699

700-
let client = reqwest::Client::new();
700+
let client = http_client::request_client()?;
701701
let endpoint = format!("{}/chat/completions", provider.base_url.trim_end_matches('/'));
702702

703703
let payload = serde_json::json!({
Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
//! Shared HTTP client builder for outbound LLM provider requests.
2+
//!
3+
//! All provider-facing HTTP calls (chat completions, model listings,
4+
//! title generation, agent tool runs) go through these constructors.
5+
//! This guarantees:
6+
//!
7+
//! - **Connect timeout** so a dead provider host fails fast (10s).
8+
//! - **Streaming-aware request timeouts** — streaming endpoints get a
9+
//! long ceiling (10 min) so SSE doesn't get cut, while non-streaming
10+
//! calls get a sane upper bound (2 min).
11+
//! - **Read timeout** to detect stalled streams between chunks (60s).
12+
//! - **Identifying User-Agent** so providers (and the user's own
13+
//! proxy/firewall) can attribute traffic to the app.
14+
//!
15+
//! Without this, `reqwest::Client::new()` produces a client with no
16+
//! timeouts at all — a network blip or a silently-rate-limited provider
17+
//! freezes the agent indefinitely.
18+
19+
use std::time::Duration;
20+
21+
use reqwest::Client;
22+
23+
use crate::error::{AppError, AppResult};
24+
25+
const USER_AGENT: &str = concat!("enowX-Coder/", env!("CARGO_PKG_VERSION"));
26+
27+
/// Connect timeout for all outbound HTTP — applies to TCP + TLS handshake.
28+
const CONNECT_TIMEOUT: Duration = Duration::from_secs(10);
29+
30+
/// Per-chunk read timeout for streaming responses. If we don't see a byte
31+
/// from the upstream provider in this window, the stream is considered dead.
32+
const STREAM_READ_TIMEOUT: Duration = Duration::from_secs(60);
33+
34+
/// Total request timeout for non-streaming calls (model listings,
35+
/// title generation, etc.). Generous, but bounded.
36+
const NON_STREAMING_TIMEOUT: Duration = Duration::from_secs(120);
37+
38+
/// Hard upper bound for streaming requests. SSE streams shouldn't outlast
39+
/// this — if they do, something is wrong upstream.
40+
const STREAMING_TIMEOUT: Duration = Duration::from_secs(600);
41+
42+
/// Build the shared `reqwest::Client` for streaming LLM responses.
43+
///
44+
/// Keeps a long total ceiling so multi-minute completions can finish,
45+
/// but still bounds connect + per-read so dead connections fail fast.
46+
pub fn streaming_client() -> AppResult<Client> {
47+
Client::builder()
48+
.user_agent(USER_AGENT)
49+
.connect_timeout(CONNECT_TIMEOUT)
50+
.read_timeout(STREAM_READ_TIMEOUT)
51+
.timeout(STREAMING_TIMEOUT)
52+
.build()
53+
.map_err(|e| AppError::Internal(format!("Failed to build streaming HTTP client: {e}")))
54+
}
55+
56+
/// Build the shared `reqwest::Client` for short, non-streaming requests
57+
/// (listing models, generating titles, single-shot completions).
58+
pub fn request_client() -> AppResult<Client> {
59+
Client::builder()
60+
.user_agent(USER_AGENT)
61+
.connect_timeout(CONNECT_TIMEOUT)
62+
.timeout(NON_STREAMING_TIMEOUT)
63+
.build()
64+
.map_err(|e| AppError::Internal(format!("Failed to build HTTP client: {e}")))
65+
}
66+
67+
#[cfg(test)]
68+
mod tests {
69+
use super::*;
70+
71+
#[test]
72+
fn streaming_client_builds() {
73+
let client = streaming_client();
74+
assert!(client.is_ok(), "streaming_client should build cleanly");
75+
}
76+
77+
#[test]
78+
fn request_client_builds() {
79+
let client = request_client();
80+
assert!(client.is_ok(), "request_client should build cleanly");
81+
}
82+
83+
#[test]
84+
fn user_agent_includes_version() {
85+
assert!(USER_AGENT.starts_with("enowX-Coder/"));
86+
assert!(USER_AGENT.len() > "enowX-Coder/".len());
87+
}
88+
}

src-tauri/src/services/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
pub mod agent_service;
22
pub mod chat_service;
33
pub mod drawing_service;
4+
pub mod http_client;
45
pub mod model_service;
56
pub mod project_service;
67
pub mod provider_model_service;

src-tauri/src/services/model_service.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
1-
use reqwest::Client;
21
use serde::Deserialize;
32

43
use crate::error::{AppError, AppResult};
4+
use crate::services::http_client;
55

66
#[derive(Debug, Deserialize)]
77
struct OpenAiModelList {
@@ -42,7 +42,7 @@ pub async fn list_models(
4242

4343
async fn fetch_openai_models(base_url: &str, api_key: Option<&str>) -> AppResult<Vec<String>> {
4444
let url = format!("{}/models", base_url.trim_end_matches('/'));
45-
let client = Client::new();
45+
let client = http_client::request_client()?;
4646
let mut req = client.get(&url);
4747

4848
if let Some(key) = api_key {
@@ -75,7 +75,7 @@ async fn fetch_openai_models(base_url: &str, api_key: Option<&str>) -> AppResult
7575
}
7676

7777
async fn fetch_anthropic_models(api_key: Option<&str>) -> AppResult<Vec<String>> {
78-
let client = Client::new();
78+
let client = http_client::request_client()?;
7979
let mut req = client
8080
.get("https://api.anthropic.com/v1/models")
8181
.header("anthropic-version", "2023-06-01");

src-tauri/src/tools/executor.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -331,7 +331,7 @@ impl ToolExecutor {
331331
let query = input["query"]
332332
.as_str()
333333
.ok_or_else(|| AppError::Validation("Missing 'query' field".to_string()))?;
334-
let client = reqwest::Client::new();
334+
let client = crate::services::http_client::request_client()?;
335335
let url = format!(
336336
"https://api.duckduckgo.com/?q={}&format=json&no_html=1&skip_disambig=1",
337337
urlencoding::encode(query)

0 commit comments

Comments
 (0)