From debd6454ad283ff0a0d5b4412139cbc4540414aa Mon Sep 17 00:00:00 2001 From: Hubert Shelley <46239302+hubertshelley@users.noreply.github.com> Date: Fri, 13 Feb 2026 20:43:52 +0800 Subject: [PATCH 1/2] feat(deps): add chat thinking support add ChatThinking request fields for chat completions and add reasoning_content fields in chat response/message delta. also make workspace rust-version explicit for path usage and fix clippy let_underscore_future warnings in config tests. --- async-openai-macros/Cargo.toml | 2 +- async-openai/Cargo.toml | 2 +- async-openai/src/config.rs | 12 ++++++++---- async-openai/src/types/chat/chat_.rs | 25 +++++++++++++++++++++++++ 4 files changed, 35 insertions(+), 6 deletions(-) diff --git a/async-openai-macros/Cargo.toml b/async-openai-macros/Cargo.toml index e842726d..826837d2 100644 --- a/async-openai-macros/Cargo.toml +++ b/async-openai-macros/Cargo.toml @@ -8,7 +8,7 @@ edition = "2021" license = "MIT" homepage = "https://github.com/64bit/async-openai" repository = "https://github.com/64bit/async-openai" -rust-version = { workspace = true } +rust-version = "1.75" readme = "README.md" [lib] diff --git a/async-openai/Cargo.toml b/async-openai/Cargo.toml index c7b4bee0..afa7f145 100644 --- a/async-openai/Cargo.toml +++ b/async-openai/Cargo.toml @@ -6,7 +6,7 @@ categories = ["api-bindings", "web-programming", "asynchronous"] keywords = ["openai", "async", "openapi", "ai"] description = "Rust library for OpenAI" edition = "2021" -rust-version = { workspace = true } +rust-version = "1.75" license = "MIT" readme = "README.md" homepage = "https://github.com/64bit/async-openai" diff --git a/async-openai/src/config.rs b/async-openai/src/config.rs index e654f02a..5dd5a982 100644 --- a/async-openai/src/config.rs +++ b/async-openai/src/config.rs @@ -335,7 +335,7 @@ mod test { } async fn dynamic_dispatch_compiles(client: &Client>) { - let _ = client.chat().create(CreateChatCompletionRequest { + std::mem::drop(client.chat().create(CreateChatCompletionRequest { model: "gpt-4o".to_string(), messages: vec![ChatCompletionRequestMessage::User( ChatCompletionRequestUserMessage { @@ -344,7 +344,7 @@ mod test { }, )], ..Default::default() - }); + })); } #[tokio::test] @@ -358,7 +358,11 @@ mod test { let _ = dynamic_dispatch_compiles(&azure_client).await; let _ = dynamic_dispatch_compiles(&oai_client).await; - let _ = tokio::spawn(async move { dynamic_dispatch_compiles(&azure_client).await }); - let _ = tokio::spawn(async move { dynamic_dispatch_compiles(&oai_client).await }); + std::mem::drop(tokio::spawn(async move { + dynamic_dispatch_compiles(&azure_client).await + })); + std::mem::drop(tokio::spawn(async move { + dynamic_dispatch_compiles(&oai_client).await + })); } } diff --git a/async-openai/src/types/chat/chat_.rs b/async-openai/src/types/chat/chat_.rs index 9da8368b..d2af6f47 100644 --- a/async-openai/src/types/chat/chat_.rs +++ b/async-openai/src/types/chat/chat_.rs @@ -440,6 +440,9 @@ pub struct ChatCompletionResponseMessage { /// The contents of the message. #[serde(skip_serializing_if = "Option::is_none")] pub content: Option, + /// Provider-specific reasoning content (for example BigModel `reasoning_content`). + #[serde(skip_serializing_if = "Option::is_none")] + pub reasoning_content: Option, /// The refusal message generated by the model. #[serde(skip_serializing_if = "Option::is_none")] pub refusal: Option, @@ -736,6 +739,21 @@ pub struct ChatCompletionAudio { pub format: ChatCompletionAudioFormat, } +#[derive(Clone, Serialize, Debug, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ChatThinkingType { + Enabled, + Disabled, +} + +#[derive(Clone, Serialize, Default, Debug, Deserialize, PartialEq)] +pub struct ChatThinking { + #[serde(rename = "type", skip_serializing_if = "Option::is_none")] + pub r#type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub clear_thinking: Option, +} + #[derive(Clone, Serialize, Default, Debug, Builder, Deserialize, PartialEq)] #[builder(name = "CreateChatCompletionRequestArgs")] #[builder(pattern = "mutable")] @@ -784,6 +802,10 @@ pub struct CreateChatCompletionRequest { #[serde(skip_serializing_if = "Option::is_none")] pub reasoning_effort: Option, + /// Provider-specific thinking controls (for example BigModel `thinking`). + #[serde(skip_serializing_if = "Option::is_none")] + pub thinking: Option, + /// An upper bound for the number of tokens that can be generated for a completion, including /// visible output tokens and [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). #[serde(skip_serializing_if = "Option::is_none")] @@ -1138,6 +1160,9 @@ pub enum FunctionType { pub struct ChatCompletionStreamResponseDelta { /// The contents of the chunk message. pub content: Option, + /// Provider-specific reasoning delta (for example BigModel `reasoning_content`). + #[serde(skip_serializing_if = "Option::is_none")] + pub reasoning_content: Option, /// Deprecated and replaced by `tool_calls`. The name and arguments of a function that should be called, as generated by the model. #[deprecated] pub function_call: Option, From b922b8899a0709b0d28f6d3ceae5ded46fbf13a5 Mon Sep 17 00:00:00 2001 From: Hubert Shelley <46239302+hubertshelley@users.noreply.github.com> Date: Tue, 24 Feb 2026 10:34:47 +0800 Subject: [PATCH 2/2] chore: Revoke configuration: rust-version --- async-openai-macros/Cargo.toml | 2 +- async-openai/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/async-openai-macros/Cargo.toml b/async-openai-macros/Cargo.toml index 826837d2..e842726d 100644 --- a/async-openai-macros/Cargo.toml +++ b/async-openai-macros/Cargo.toml @@ -8,7 +8,7 @@ edition = "2021" license = "MIT" homepage = "https://github.com/64bit/async-openai" repository = "https://github.com/64bit/async-openai" -rust-version = "1.75" +rust-version = { workspace = true } readme = "README.md" [lib] diff --git a/async-openai/Cargo.toml b/async-openai/Cargo.toml index afa7f145..c7b4bee0 100644 --- a/async-openai/Cargo.toml +++ b/async-openai/Cargo.toml @@ -6,7 +6,7 @@ categories = ["api-bindings", "web-programming", "asynchronous"] keywords = ["openai", "async", "openapi", "ai"] description = "Rust library for OpenAI" edition = "2021" -rust-version = "1.75" +rust-version = { workspace = true } license = "MIT" readme = "README.md" homepage = "https://github.com/64bit/async-openai"