diff --git a/docs/pyagentspec/source/_components/all_components.json b/docs/pyagentspec/source/_components/all_components.json
index 44394b59..a0ce07bd 100644
--- a/docs/pyagentspec/source/_components/all_components.json
+++ b/docs/pyagentspec/source/_components/all_components.json
@@ -119,6 +119,22 @@
"path": "pyagentspec.llms.ocigenaiconfig.OciGenAiConfig",
"name": "pyagentspec.llms.OciGenAiConfig"
},
+ {
+ "path": "pyagentspec.llms.geminiconfig.GeminiConfig",
+ "name": "pyagentspec.llms.GeminiConfig"
+ },
+ {
+ "path": "pyagentspec.llms.geminiauthconfig.GeminiAuthConfig",
+ "name": "pyagentspec.llms.GeminiAuthConfig"
+ },
+ {
+ "path": "pyagentspec.llms.geminiauthconfig.GeminiAIStudioAuthConfig",
+ "name": "pyagentspec.llms.GeminiAIStudioAuthConfig"
+ },
+ {
+ "path": "pyagentspec.llms.geminiauthconfig.GeminiVertexAIAuthConfig",
+ "name": "pyagentspec.llms.GeminiVertexAIAuthConfig"
+ },
{
"path": "pyagentspec.llms.ociclientconfig.OciClientConfig",
"name": "pyagentspec.llms.OciClientConfig"
diff --git a/docs/pyagentspec/source/_components/llm_config_tabs.rst b/docs/pyagentspec/source/_components/llm_config_tabs.rst
index d26b9ce0..7053601c 100644
--- a/docs/pyagentspec/source/_components/llm_config_tabs.rst
+++ b/docs/pyagentspec/source/_components/llm_config_tabs.rst
@@ -37,6 +37,20 @@
client_config=client_config,
)
+ .. tab:: Gemini
+
+ .. code-block:: python
+
+ from pyagentspec.llms import GeminiConfig
+ from pyagentspec.llms.geminiauthconfig import GeminiAIStudioAuthConfig
+
+ llm_config = GeminiConfig(
+ name="Gemini Flash",
+ model_id="gemini-2.5-flash",
+ # The runtime can read GEMINI_API_KEY from the environment.
+ auth=GeminiAIStudioAuthConfig(name="gemini-aistudio-auth"),
+ )
+
.. tab:: vLLM
.. code-block:: python
diff --git a/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json b/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json
index 9df93bda..127bce73 100644
--- a/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json
+++ b/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json
@@ -243,6 +243,46 @@
}
]
},
+ "GeminiAIStudioAuthConfig": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/BaseGeminiAIStudioAuthConfig"
+ },
+ {
+ "$ref": "#/$defs/ComponentReference"
+ }
+ ]
+ },
+ "GeminiAuthConfig": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/BaseGeminiAuthConfig"
+ },
+ {
+ "$ref": "#/$defs/ComponentReference"
+ }
+ ]
+ },
+ "GeminiConfig": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/BaseGeminiConfig"
+ },
+ {
+ "$ref": "#/$defs/ComponentReference"
+ }
+ ]
+ },
+ "GeminiVertexAIAuthConfig": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/BaseGeminiVertexAIAuthConfig"
+ },
+ {
+ "$ref": "#/$defs/ComponentReference"
+ }
+ ]
+ },
"HandoffMode": {
"description": "Controls how agents in a Swarm may delegate work to one another.\n\nThis setting determines whether an agent is equipped with:\n\n * *send_message* \u2014 a tool for asking another agent to perform a sub-task and reply back.\n\n * *handoff_conversation* \u2014 a tool for transferring the full user\u2013agent conversation to another agent.\n\nDepending on the selected mode, agents have different capabilities for delegation and collaboration.",
"enum": [
@@ -2856,6 +2896,243 @@
"type": "object",
"x-abstract-component": false
},
+ "BaseGeminiAIStudioAuthConfig": {
+ "additionalProperties": false,
+ "description": "Authentication settings for Gemini via Google AI Studio.",
+ "properties": {
+ "id": {
+ "title": "Id",
+ "type": "string"
+ },
+ "name": {
+ "title": "Name",
+ "type": "string"
+ },
+ "description": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Description"
+ },
+ "metadata": {
+ "anyOf": [
+ {
+ "additionalProperties": true,
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Metadata"
+ },
+ "api_key": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Api Key"
+ },
+ "$referenced_components": {
+ "$ref": "#/$defs/ReferencedComponents"
+ },
+ "component_type": {
+ "const": "GeminiAIStudioAuthConfig"
+ }
+ },
+ "required": [
+ "name"
+ ],
+ "title": "GeminiAIStudioAuthConfig",
+ "type": "object",
+ "x-abstract-component": false
+ },
+ "BaseGeminiAuthConfig": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/GeminiAIStudioAuthConfig"
+ },
+ {
+ "$ref": "#/$defs/GeminiVertexAIAuthConfig"
+ }
+ ],
+ "x-abstract-component": true
+ },
+ "BaseGeminiConfig": {
+ "additionalProperties": false,
+ "description": "Configure a connection to a Gemini LLM (AI Studio or Vertex AI).",
+ "properties": {
+ "id": {
+ "title": "Id",
+ "type": "string"
+ },
+ "name": {
+ "title": "Name",
+ "type": "string"
+ },
+ "description": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Description"
+ },
+ "metadata": {
+ "anyOf": [
+ {
+ "additionalProperties": true,
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Metadata"
+ },
+ "default_generation_parameters": {
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "$ref": "#/$defs/LlmGenerationConfig"
+ }
+ ],
+ "default": null
+ },
+ "retry_policy": {
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "$ref": "#/$defs/RetryPolicy"
+ }
+ ],
+ "default": null
+ },
+ "model_id": {
+ "title": "Model Id",
+ "type": "string"
+ },
+ "auth": {
+ "$ref": "#/$defs/GeminiAuthConfig"
+ },
+ "$referenced_components": {
+ "$ref": "#/$defs/ReferencedComponents"
+ },
+ "component_type": {
+ "const": "GeminiConfig"
+ }
+ },
+ "required": [
+ "auth",
+ "model_id",
+ "name"
+ ],
+ "title": "GeminiConfig",
+ "type": "object",
+ "x-abstract-component": false
+ },
+ "BaseGeminiVertexAIAuthConfig": {
+ "additionalProperties": false,
+ "description": "Authentication settings for Gemini via Google Vertex AI.",
+ "properties": {
+ "id": {
+ "title": "Id",
+ "type": "string"
+ },
+ "name": {
+ "title": "Name",
+ "type": "string"
+ },
+ "description": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Description"
+ },
+ "metadata": {
+ "anyOf": [
+ {
+ "additionalProperties": true,
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Metadata"
+ },
+ "project_id": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Project Id"
+ },
+ "location": {
+ "default": "global",
+ "title": "Location",
+ "type": "string"
+ },
+ "credentials": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "additionalProperties": true,
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Credentials"
+ },
+ "$referenced_components": {
+ "$ref": "#/$defs/ReferencedComponents"
+ },
+ "component_type": {
+ "const": "GeminiVertexAIAuthConfig"
+ }
+ },
+ "required": [
+ "name"
+ ],
+ "title": "GeminiVertexAIAuthConfig",
+ "type": "object",
+ "x-abstract-component": false
+ },
"BaseInMemoryCollectionDatastore": {
"additionalProperties": false,
"description": "In-memory datastore for testing and development purposes.",
@@ -3015,6 +3292,9 @@
},
"BaseLlmConfig": {
"anyOf": [
+ {
+ "$ref": "#/$defs/GeminiConfig"
+ },
{
"$ref": "#/$defs/OciGenAiConfig"
},
@@ -7080,6 +7360,18 @@
{
"$ref": "#/$defs/BaseFlowNode"
},
+ {
+ "$ref": "#/$defs/BaseGeminiAIStudioAuthConfig"
+ },
+ {
+ "$ref": "#/$defs/BaseGeminiAuthConfig"
+ },
+ {
+ "$ref": "#/$defs/BaseGeminiConfig"
+ },
+ {
+ "$ref": "#/$defs/BaseGeminiVertexAIAuthConfig"
+ },
{
"$ref": "#/$defs/BaseInMemoryCollectionDatastore"
},
@@ -7564,6 +7856,66 @@
}
}
},
+ "VersionedGeminiAIStudioAuthConfig": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/BaseGeminiAIStudioAuthConfig"
+ },
+ {
+ "$ref": "#/$defs/ComponentReference"
+ }
+ ],
+ "properties": {
+ "agentspec_version": {
+ "$ref": "#/$defs/AgentSpecVersionEnum"
+ }
+ }
+ },
+ "VersionedGeminiAuthConfig": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/BaseGeminiAuthConfig"
+ },
+ {
+ "$ref": "#/$defs/ComponentReference"
+ }
+ ],
+ "properties": {
+ "agentspec_version": {
+ "$ref": "#/$defs/AgentSpecVersionEnum"
+ }
+ }
+ },
+ "VersionedGeminiConfig": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/BaseGeminiConfig"
+ },
+ {
+ "$ref": "#/$defs/ComponentReference"
+ }
+ ],
+ "properties": {
+ "agentspec_version": {
+ "$ref": "#/$defs/AgentSpecVersionEnum"
+ }
+ }
+ },
+ "VersionedGeminiVertexAIAuthConfig": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/BaseGeminiVertexAIAuthConfig"
+ },
+ {
+ "$ref": "#/$defs/ComponentReference"
+ }
+ ],
+ "properties": {
+ "agentspec_version": {
+ "$ref": "#/$defs/AgentSpecVersionEnum"
+ }
+ }
+ },
"VersionedInMemoryCollectionDatastore": {
"anyOf": [
{
@@ -8353,6 +8705,18 @@
{
"$ref": "#/$defs/VersionedFlowNode"
},
+ {
+ "$ref": "#/$defs/VersionedGeminiAIStudioAuthConfig"
+ },
+ {
+ "$ref": "#/$defs/VersionedGeminiAuthConfig"
+ },
+ {
+ "$ref": "#/$defs/VersionedGeminiConfig"
+ },
+ {
+ "$ref": "#/$defs/VersionedGeminiVertexAIAuthConfig"
+ },
{
"$ref": "#/$defs/VersionedInMemoryCollectionDatastore"
},
diff --git a/docs/pyagentspec/source/agentspec/language_spec_nightly.rst b/docs/pyagentspec/source/agentspec/language_spec_nightly.rst
index 62ef7e28..fdbfa2b8 100644
--- a/docs/pyagentspec/source/agentspec/language_spec_nightly.rst
+++ b/docs/pyagentspec/source/agentspec/language_spec_nightly.rst
@@ -837,6 +837,58 @@ Client configuration that should be used if users want to use resource principal
class OciClientConfigWithResourcePrincipal(OciClientConfig):
auth_type: Literal["RESOURCE_PRINCIPAL"] = "RESOURCE_PRINCIPAL"
+Gemini
+^^^^^^
+
+This class of LLMs refers to the Gemini family of models offered by `Google `_.
+Google offers Gemini through two services: `AI Studio `_ and `Vertex AI `_.
+
+.. code-block:: python
+
+ class GeminiConfig(LlmConfig):
+ model_id: str
+ auth: SerializeAsAny[GeminiAuthConfig]
+
+.. code-block:: python
+
+ class GeminiAuthConfig(Component, abstract=True):
+ pass
+
+AI Studio uses API key-based authentication:
+
+.. code-block:: python
+
+ class GeminiAIStudioAuthConfig(GeminiAuthConfig):
+ api_key: SensitiveField[Optional[str]] = None
+
+When ``api_key`` is not specified, runtimes may try to load it from the ``GEMINI_API_KEY``
+environment variable.
+
+Meanwhile, the Vertex AI service can be authenticated with Google Cloud credentials. These credentials can be provided with a `service account JSON key `_
+either inline or through a local file path. When omitted, runtimes may rely on Google Application Default Credentials (ADC), such as
+the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable, credentials made available through the local Google Cloud environment,
+or an attached service account. Even with ADC, the ``project_id`` may still need to be provided explicitly when it cannot be
+resolved from the local Google Cloud configuration:
+
+.. code-block:: python
+
+ class GeminiVertexAIAuthConfig(GeminiAuthConfig):
+ project_id: Optional[str] = None
+ location: str = "global"
+ credentials: SensitiveField[Optional[Union[str, Dict[str, Any]]]] = None
+
+Here, ``credentials`` accepts either a local file path (``str``) to a Google Cloud JSON
+credential file, such as a service-account key file, or an inline ``dict`` containing the
+parsed JSON contents of that file.
+
+See `Using Gemini API keys `_,
+`Application Default Credentials `_,
+and `Create and delete service account keys `_
+for more details.
+``GeminiConfig.auth`` stays inline when serialized. If ``api_key`` or ``credentials`` of the auth object is
+specified, only that sensitive field is externalized. Otherwise, if ``api_key`` or ``credentials`` is
+unset, that field serializes as ``null``.
+
Tools
~~~~~
@@ -2867,6 +2919,10 @@ See all the fields below that are considered sensitive fields:
+----------------------------------+--------------------+
| OpenAiConfig | api_key |
+----------------------------------+--------------------+
+| GeminiAIStudioAuthConfig | api_key |
++----------------------------------+--------------------+
+| GeminiVertexAIAuthConfig | credentials |
++----------------------------------+--------------------+
| OciClientConfigWithSecurityToken | auth_file_location |
+----------------------------------+--------------------+
| OciClientConfigWithApiKey | auth_file_location |
@@ -2890,6 +2946,10 @@ See all the fields below that are considered sensitive fields:
| StreamableHTTPmTLSTransport | ca_file |
+----------------------------------+--------------------+
+For example, ``GeminiAIStudioAuthConfig.api_key`` or
+``GeminiVertexAIAuthConfig.credentials`` may become references while the enclosing
+``auth`` component remains inline.
+
For example, the following component produced using the `pyagentspec` SDK:
diff --git a/docs/pyagentspec/source/api/llmmodels.rst b/docs/pyagentspec/source/api/llmmodels.rst
index b4eb69ec..c1db63fd 100644
--- a/docs/pyagentspec/source/api/llmmodels.rst
+++ b/docs/pyagentspec/source/api/llmmodels.rst
@@ -66,6 +66,25 @@ OpenAI Models
.. autoclass:: pyagentspec.llms.openaiconfig.OpenAiConfig
:exclude-members: model_post_init, model_config
+Gemini Models
+^^^^^^^^^^^^^
+
+.. _geminiauthconfig:
+.. autoclass:: pyagentspec.llms.geminiauthconfig.GeminiAuthConfig
+ :exclude-members: model_post_init, model_config
+
+.. _geminiaistudioauthconfig:
+.. autoclass:: pyagentspec.llms.geminiauthconfig.GeminiAIStudioAuthConfig
+ :exclude-members: model_post_init, model_config
+
+.. _geminivertexaiauthconfig:
+.. autoclass:: pyagentspec.llms.geminiauthconfig.GeminiVertexAIAuthConfig
+ :exclude-members: model_post_init, model_config
+
+.. _geminiconfig:
+.. autoclass:: pyagentspec.llms.geminiconfig.GeminiConfig
+ :exclude-members: model_post_init, model_config
+
OciGenAi Models
^^^^^^^^^^^^^^^
diff --git a/docs/pyagentspec/source/changelog.rst b/docs/pyagentspec/source/changelog.rst
index d879ad1d..55190ee0 100644
--- a/docs/pyagentspec/source/changelog.rst
+++ b/docs/pyagentspec/source/changelog.rst
@@ -105,6 +105,13 @@ New features
For more information read the :doc:`adapter page `.
+* **Gemini LLM configuration support**
+
+ Added ``GeminiConfig`` together with ``GeminiAIStudioAuthConfig`` and
+ ``GeminiVertexAIAuthConfig`` to represent Gemini models in Agent Spec.
+
+ For more information read the :doc:`API Reference `.
+
* **Added Microsoft Agent Framework adapter to pyagentspec:**
The Microsoft Agent Framework adapter is now available as part of ``pyagentspec``.
diff --git a/docs/pyagentspec/source/code_examples/howto_llm_from_different_providers.py b/docs/pyagentspec/source/code_examples/howto_llm_from_different_providers.py
index 5374d3d1..0909a509 100644
--- a/docs/pyagentspec/source/code_examples/howto_llm_from_different_providers.py
+++ b/docs/pyagentspec/source/code_examples/howto_llm_from_different_providers.py
@@ -85,6 +85,42 @@
)
# .. openai-end
+# .. gemini-aistudio-start
+from pyagentspec.llms import GeminiConfig
+from pyagentspec.llms.geminiauthconfig import GeminiAIStudioAuthConfig
+
+generation_config = LlmGenerationConfig(max_tokens=256, temperature=0.7, top_p=0.9)
+
+llm = GeminiConfig(
+ name="gemini-aistudio-flash",
+ model_id="gemini-2.5-flash",
+ auth=GeminiAIStudioAuthConfig(
+ name="gemini-aistudio-auth"
+ # Optional: if api_key is omitted, runtimes may load GEMINI_API_KEY from the environment.
+ ),
+ default_generation_parameters=generation_config,
+)
+# .. gemini-aistudio-end
+
+# .. gemini-vertex-start
+from pyagentspec.llms.geminiauthconfig import GeminiVertexAIAuthConfig
+
+generation_config = LlmGenerationConfig(max_tokens=256, temperature=0.4, top_p=0.95)
+
+llm = GeminiConfig(
+ name="gemini-vertex-flash",
+ model_id="gemini-2.0-flash-lite",
+ auth=GeminiVertexAIAuthConfig(
+ name="gemini-vertex-auth",
+ # Often still required even when ADC supplies the credentials.
+ project_id="my-gcp-project",
+ location="global",
+ # Optional: explicit credentials can be provided when ADC is not available.
+ ),
+ default_generation_parameters=generation_config,
+)
+# .. gemini-vertex-end
+
# .. ollama-start
from pyagentspec.llms import OllamaConfig
@@ -168,6 +204,34 @@
default_generation_parameters=generation_config,
)
+from pyagentspec.llms import GeminiConfig
+from pyagentspec.llms.geminiauthconfig import GeminiAIStudioAuthConfig, GeminiVertexAIAuthConfig
+
+generation_config = LlmGenerationConfig(max_tokens=256, temperature=0.7, top_p=0.9)
+
+llm = GeminiConfig(
+ name="gemini-aistudio-flash",
+ model_id="gemini-2.5-flash",
+ auth=GeminiAIStudioAuthConfig(
+ name="gemini-aistudio-auth"
+ # Optional: if api_key is omitted, runtimes may load GEMINI_API_KEY from the environment.
+ ),
+ default_generation_parameters=generation_config,
+)
+
+llm = GeminiConfig(
+ name="gemini-vertex-flash",
+ model_id="gemini-2.0-flash-lite",
+ auth=GeminiVertexAIAuthConfig(
+ name="gemini-vertex-auth",
+ # Often still required even when ADC supplies the credentials.
+ project_id="my-gcp-project",
+ location="global",
+ # Optional: explicit credentials can be provided when ADC is not available.
+ ),
+ default_generation_parameters=generation_config,
+)
+
from pyagentspec.llms import OllamaConfig
generation_config = LlmGenerationConfig(max_tokens=512, temperature=0.9, top_p=0.9)
diff --git a/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst b/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst
index 18be11e9..2252d3bb 100644
--- a/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst
+++ b/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst
@@ -6,6 +6,7 @@ Agent Spec supports several LLM providers, each one having its own LlmConfig com
The available LLMs are:
- :ref:`OpenAiConfig `
+- :ref:`GeminiConfig `
- :ref:`OciGenAiConfig `
- :ref:`OpenAiCompatibleConfig `
- :ref:`VllmConfig `
@@ -183,6 +184,97 @@ You can refer to one of those models by using the ``OpenAiConfig`` Component.
.. _howto-openaicompatibleconfig:
+GeminiConfig
+============
+
+`Gemini `_ models can be configured through ``GeminiConfig``.
+Agent Spec supports both Google AI Studio and Google Vertex AI authentication modes.
+
+Gemini authentication is modeled as a nested auth component, similar to OCI ``client_config``.
+The auth component itself remains inline during serialization. When ``api_key`` or
+``credentials`` is provided explicitly, only that sensitive field is externalized and must
+be supplied through ``components_registry`` when loading the configuration back.
+
+**Parameters**
+
+.. option:: model_id: str
+
+ Name of the model to use, for example ``gemini-2.5-flash`` or
+ ``gemini-2.0-flash-lite``.
+
+.. option:: auth: GeminiAuthConfig
+
+ Required authentication component for Gemini. As with other Agent Spec components,
+ auth configs need a ``name``. Use ``GeminiAIStudioAuthConfig(name="gemini-aistudio-auth")``
+ if you want runtimes to load ``GEMINI_API_KEY`` from the environment, or
+ ``GeminiVertexAIAuthConfig(name="gemini-vertex-auth", ...)`` for Vertex AI.
+ The auth component remains inline when serialized. If ``api_key`` or ``credentials``
+ is set explicitly, only that sensitive field is serialized as a reference.
+
+.. option:: default_generation_parameters: dict, null
+
+ Default parameters for text generation with this model.
+
+Google AI Studio authentication
+-------------------------------
+
+Use ``GeminiAIStudioAuthConfig`` when connecting through Google AI Studio.
+
+**Parameters**
+
+.. option:: api_key: str, null
+
+ Optional Gemini API key. If omitted, runtimes may load it from ``GEMINI_API_KEY``.
+ If provided explicitly, only the ``api_key`` field is externalized during
+ serialization and must be supplied separately when deserializing.
+
+**Example**
+
+.. literalinclude:: ../code_examples/howto_llm_from_different_providers.py
+ :language: python
+ :start-after: .. gemini-aistudio-start
+ :end-before: .. gemini-aistudio-end
+
+Vertex AI authentication
+------------------------
+
+Use ``GeminiVertexAIAuthConfig`` when connecting through Google Vertex AI.
+
+**Parameters**
+
+.. option:: project_id: str, null
+
+ Optional Google Cloud project identifier.
+ In practice, you may still need to set this explicitly when ADC provides
+ credentials but does not expose a default project.
+
+.. option:: location: str
+
+ Vertex AI location or region. Defaults to ``global``.
+
+.. option:: credentials: str | dict, null
+
+ Optional local file path (``str``) to a Google Cloud JSON credential file, such as a
+ service-account key file, or an inline ``dict`` containing the parsed JSON contents of
+ that file.
+ When omitted, runtimes may rely on Google Application Default Credentials (ADC), such as
+ ``GOOGLE_APPLICATION_CREDENTIALS``, credentials made available through the local
+ Google Cloud environment, or an attached service account.
+ See `Google Cloud authentication docs `_
+ for details.
+ This does not guarantee that ``project_id`` can also be inferred automatically.
+ If provided explicitly, only the ``credentials`` field is externalized during
+ serialization. Non-secret auth settings such as ``project_id`` and ``location``
+ remain inline in the main config.
+
+**Example**
+
+.. literalinclude:: ../code_examples/howto_llm_from_different_providers.py
+ :language: python
+ :start-after: .. gemini-vertex-start
+ :end-before: .. gemini-vertex-end
+
+
OpenAiCompatibleConfig
======================
diff --git a/pyagentspec/src/pyagentspec/_component_registry.py b/pyagentspec/src/pyagentspec/_component_registry.py
index 82edffcb..be871a30 100644
--- a/pyagentspec/src/pyagentspec/_component_registry.py
+++ b/pyagentspec/src/pyagentspec/_component_registry.py
@@ -44,12 +44,18 @@
ToolNode,
)
from pyagentspec.llms import (
+ GeminiConfig,
OciGenAiConfig,
OllamaConfig,
OpenAiCompatibleConfig,
OpenAiConfig,
VllmConfig,
)
+from pyagentspec.llms.geminiauthconfig import (
+ GeminiAIStudioAuthConfig,
+ GeminiAuthConfig,
+ GeminiVertexAIAuthConfig,
+)
from pyagentspec.llms.llmconfig import LlmConfig
from pyagentspec.llms.ociclientconfig import (
OciClientConfig,
@@ -117,6 +123,10 @@
"OciClientConfigWithInstancePrincipal": OciClientConfigWithInstancePrincipal,
"OciClientConfigWithResourcePrincipal": OciClientConfigWithResourcePrincipal,
"OciClientConfigWithSecurityToken": OciClientConfigWithSecurityToken,
+ "GeminiAuthConfig": GeminiAuthConfig,
+ "GeminiAIStudioAuthConfig": GeminiAIStudioAuthConfig,
+ "GeminiVertexAIAuthConfig": GeminiVertexAIAuthConfig,
+ "GeminiConfig": GeminiConfig,
"OciGenAiConfig": OciGenAiConfig,
"OllamaConfig": OllamaConfig,
"OpenAiCompatibleConfig": OpenAiCompatibleConfig,
diff --git a/pyagentspec/src/pyagentspec/llms/__init__.py b/pyagentspec/src/pyagentspec/llms/__init__.py
index 4f5469eb..c7170151 100644
--- a/pyagentspec/src/pyagentspec/llms/__init__.py
+++ b/pyagentspec/src/pyagentspec/llms/__init__.py
@@ -4,8 +4,9 @@
# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
-"""Define LLM configurations abstraction and concrete classes for connecting to vLLM or OCI."""
+"""Define LLM configuration abstractions and provider-specific implementations."""
+from .geminiconfig import GeminiConfig
from .llmconfig import LlmConfig
from .llmgenerationconfig import LlmGenerationConfig
from .ocigenaiconfig import OciGenAiConfig
@@ -17,6 +18,7 @@
__all__ = [
"LlmConfig",
"LlmGenerationConfig",
+ "GeminiConfig",
"VllmConfig",
"OciGenAiConfig",
"OllamaConfig",
diff --git a/pyagentspec/src/pyagentspec/llms/geminiauthconfig.py b/pyagentspec/src/pyagentspec/llms/geminiauthconfig.py
new file mode 100644
index 00000000..5290a1a5
--- /dev/null
+++ b/pyagentspec/src/pyagentspec/llms/geminiauthconfig.py
@@ -0,0 +1,58 @@
+# Copyright © 2026 Oracle and/or its affiliates.
+#
+# This software is under the Apache License 2.0
+# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
+# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
+
+"""Defines the classes for configuring Gemini authentication."""
+
+from typing import Any, Dict, Optional
+
+from pydantic import Field
+from pydantic.json_schema import SkipJsonSchema
+
+from pyagentspec.component import Component
+from pyagentspec.sensitive_field import SensitiveField
+from pyagentspec.versioning import AgentSpecVersionEnum
+
+
+class GeminiAuthConfig(Component, abstract=True):
+ """Base class for Gemini authentication configuration."""
+
+ min_agentspec_version: SkipJsonSchema[AgentSpecVersionEnum] = Field(
+ default=AgentSpecVersionEnum.v26_2_0,
+ init=False,
+ exclude=True,
+ )
+
+
+class GeminiAIStudioAuthConfig(GeminiAuthConfig):
+ """Authentication settings for Gemini via Google AI Studio."""
+
+ api_key: SensitiveField[Optional[str]] = None
+ """API key to use. If unset, runtimes may load it from ``GEMINI_API_KEY``."""
+
+
+class GeminiVertexAIAuthConfig(GeminiAuthConfig):
+ """Authentication settings for Gemini via Google Vertex AI."""
+
+ project_id: Optional[str] = None
+ """Optional Google Cloud project identifier.
+
+ This may still need to be set explicitly when the runtime cannot infer the
+ project from Application Default Credentials (ADC) or other local Google
+ Cloud configuration.
+ """
+ location: str = "global"
+ """Vertex AI location/region."""
+ credentials: SensitiveField[Optional[str | Dict[str, Any]]] = None
+ """Optional local file path to a Google Cloud JSON credential file, such as a
+ service-account key file, or an inline dictionary containing the parsed JSON
+ contents of that file.
+
+ When unset, runtimes may rely on Application Default Credentials (ADC), such as
+ ``GOOGLE_APPLICATION_CREDENTIALS``, credentials made available through the local
+ Google Cloud environment, or an attached service account.
+ Even then, ``project_id`` may still need to be provided separately if it
+ cannot be resolved from the environment.
+ """
diff --git a/pyagentspec/src/pyagentspec/llms/geminiconfig.py b/pyagentspec/src/pyagentspec/llms/geminiconfig.py
new file mode 100644
index 00000000..4c3dc36c
--- /dev/null
+++ b/pyagentspec/src/pyagentspec/llms/geminiconfig.py
@@ -0,0 +1,29 @@
+# Copyright © 2026 Oracle and/or its affiliates.
+#
+# This software is under the Apache License 2.0
+# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
+# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
+
+"""Defines the class for configuring how to connect to Gemini LLMs."""
+
+from pydantic import Field, SerializeAsAny
+from pydantic.json_schema import SkipJsonSchema
+
+from pyagentspec.llms.geminiauthconfig import GeminiAuthConfig
+from pyagentspec.llms.llmconfig import LlmConfig
+from pyagentspec.versioning import AgentSpecVersionEnum
+
+
+class GeminiConfig(LlmConfig):
+ """Configure a connection to a Gemini LLM (AI Studio or Vertex AI)."""
+
+ min_agentspec_version: SkipJsonSchema[AgentSpecVersionEnum] = Field(
+ default=AgentSpecVersionEnum.v26_2_0,
+ init=False,
+ exclude=True,
+ )
+
+ model_id: str
+ """Identifier of the Gemini model to use."""
+ auth: SerializeAsAny[GeminiAuthConfig]
+ """Authentication configuration used to connect to the Gemini service."""
diff --git a/pyagentspec/tests/adapters/langgraph/llms/test_ocigenai_conversion.py b/pyagentspec/tests/adapters/langgraph/llms/test_ocigenai_conversion.py
index 296af353..0bdf4f51 100644
--- a/pyagentspec/tests/adapters/langgraph/llms/test_ocigenai_conversion.py
+++ b/pyagentspec/tests/adapters/langgraph/llms/test_ocigenai_conversion.py
@@ -70,11 +70,12 @@ def auth_profile_contains_security_token(client_config):
# Evaluate once at import-time for skip decorator
+_SEC_TOKEN_PROFILE_NAME = OCI_AUTH_PROFILE_WITH_SECURITY_TOKEN or "DEFAULT"
_SEC_TOKEN_PRESENT = auth_profile_contains_security_token(
OciClientConfigWithSecurityToken(
name="with_security_token",
service_endpoint=OCI_SERVICE_ENDPOINT,
- auth_profile=OCI_AUTH_PROFILE_WITH_SECURITY_TOKEN or "DEFAULT",
+ auth_profile=_SEC_TOKEN_PROFILE_NAME,
auth_file_location=_oci_user_config_path(),
)
)
@@ -121,7 +122,7 @@ def test_ocigenai_llm_conversion_security_token(default_generation_parameters):
client_config = OciClientConfigWithSecurityToken(
name="with_security_token",
service_endpoint=OCI_SERVICE_ENDPOINT,
- auth_profile="WEBAUTH",
+ auth_profile=_SEC_TOKEN_PROFILE_NAME,
auth_file_location=_oci_user_config_path(),
)
llm_cfg = OciGenAiConfig(
diff --git a/pyagentspec/tests/serialization/test_gemini_config.py b/pyagentspec/tests/serialization/test_gemini_config.py
new file mode 100644
index 00000000..6f90926d
--- /dev/null
+++ b/pyagentspec/tests/serialization/test_gemini_config.py
@@ -0,0 +1,317 @@
+# Copyright © 2026 Oracle and/or its affiliates.
+#
+# This software is under the Apache License 2.0
+# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
+# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
+
+import json
+from typing import Any
+
+import pytest
+from pydantic import ValidationError
+
+from pyagentspec.llms import GeminiConfig
+from pyagentspec.llms.geminiauthconfig import GeminiAIStudioAuthConfig, GeminiVertexAIAuthConfig
+from pyagentspec.serialization import AgentSpecDeserializer, AgentSpecSerializer
+from pyagentspec.versioning import AgentSpecVersionEnum
+
+GEMINI_CONFIG_ID = "gemini-config-id"
+GEMINI_CONFIG_NAME = "gemini"
+GEMINI_AISTUDIO_AUTH_ID = "gemini-aistudio-auth-id"
+GEMINI_AISTUDIO_AUTH_NAME = "gemini-aistudio-auth"
+GEMINI_VERTEX_AUTH_ID = "gemini-vertex-auth-id"
+GEMINI_VERTEX_AUTH_NAME = "gemini-vertex-auth"
+
+
+def _assert_serialized_geminiconfig_fields(
+ serialized_llm_as_dict: dict[str, Any], *, model_id: str
+) -> None:
+ assert serialized_llm_as_dict["component_type"] == "GeminiConfig"
+ assert serialized_llm_as_dict["id"] == GEMINI_CONFIG_ID
+ assert serialized_llm_as_dict["name"] == GEMINI_CONFIG_NAME
+ assert serialized_llm_as_dict["description"] is None
+ assert serialized_llm_as_dict["metadata"] == {}
+ assert serialized_llm_as_dict["default_generation_parameters"] is None
+ assert serialized_llm_as_dict["model_id"] == model_id
+ assert serialized_llm_as_dict["agentspec_version"] == AgentSpecVersionEnum.v26_2_0.value
+
+
+@pytest.mark.parametrize(
+ ("auth", "model_id", "expected_auth", "expected_auth_type"),
+ [
+ (
+ GeminiAIStudioAuthConfig(
+ id=GEMINI_AISTUDIO_AUTH_ID,
+ name=GEMINI_AISTUDIO_AUTH_NAME,
+ ),
+ "gemini-2.5-flash",
+ {
+ "component_type": "GeminiAIStudioAuthConfig",
+ "id": GEMINI_AISTUDIO_AUTH_ID,
+ "name": GEMINI_AISTUDIO_AUTH_NAME,
+ "description": None,
+ "metadata": {},
+ "api_key": None,
+ },
+ GeminiAIStudioAuthConfig,
+ ),
+ (
+ GeminiVertexAIAuthConfig(
+ id=GEMINI_VERTEX_AUTH_ID,
+ name=GEMINI_VERTEX_AUTH_NAME,
+ ),
+ "gemini-2.0-flash-lite",
+ {
+ "component_type": "GeminiVertexAIAuthConfig",
+ "id": GEMINI_VERTEX_AUTH_ID,
+ "name": GEMINI_VERTEX_AUTH_NAME,
+ "description": None,
+ "metadata": {},
+ "project_id": None,
+ "location": "global",
+ "credentials": None,
+ },
+ GeminiVertexAIAuthConfig,
+ ),
+ (
+ GeminiVertexAIAuthConfig(
+ id=GEMINI_VERTEX_AUTH_ID,
+ name=GEMINI_VERTEX_AUTH_NAME,
+ project_id="project-id",
+ location="us-central1",
+ ),
+ "gemini-2.0-flash-lite",
+ {
+ "component_type": "GeminiVertexAIAuthConfig",
+ "id": GEMINI_VERTEX_AUTH_ID,
+ "name": GEMINI_VERTEX_AUTH_NAME,
+ "description": None,
+ "metadata": {},
+ "project_id": "project-id",
+ "location": "us-central1",
+ "credentials": None,
+ },
+ GeminiVertexAIAuthConfig,
+ ),
+ ],
+ ids=["aistudio-empty", "vertex-empty", "vertex-without-credentials"],
+)
+def test_can_serialize_and_deserialize_gemini_config_with_inline_auth(
+ auth: GeminiAIStudioAuthConfig | GeminiVertexAIAuthConfig,
+ model_id: str,
+ expected_auth: dict[str, Any],
+ expected_auth_type: type[object],
+) -> None:
+ llm_config = GeminiConfig(
+ id=GEMINI_CONFIG_ID,
+ name=GEMINI_CONFIG_NAME,
+ model_id=model_id,
+ auth=auth,
+ )
+
+ serialized_llm = AgentSpecSerializer().to_json(llm_config)
+ serialized_llm_as_dict = json.loads(serialized_llm)
+
+ assert '"$component_ref"' not in serialized_llm
+ _assert_serialized_geminiconfig_fields(serialized_llm_as_dict, model_id=model_id)
+ assert serialized_llm_as_dict["auth"] == expected_auth
+
+ deserialized_llm = AgentSpecDeserializer().from_json(serialized_llm)
+
+ assert deserialized_llm == llm_config
+ assert deserialized_llm.min_agentspec_version == AgentSpecVersionEnum.v26_2_0
+ assert isinstance(deserialized_llm.auth, expected_auth_type)
+
+
+@pytest.mark.parametrize(
+ (
+ "auth",
+ "model_id",
+ "expected_auth",
+ "components_registry",
+ "missing_component_ref",
+ "hidden_values",
+ "expected_auth_type",
+ ),
+ [
+ (
+ GeminiAIStudioAuthConfig(
+ id=GEMINI_AISTUDIO_AUTH_ID,
+ name=GEMINI_AISTUDIO_AUTH_NAME,
+ api_key="THIS_IS_SECRET",
+ ),
+ "gemini-2.5-flash",
+ {
+ "component_type": "GeminiAIStudioAuthConfig",
+ "id": GEMINI_AISTUDIO_AUTH_ID,
+ "name": GEMINI_AISTUDIO_AUTH_NAME,
+ "description": None,
+ "metadata": {},
+ "api_key": {"$component_ref": f"{GEMINI_AISTUDIO_AUTH_ID}.api_key"},
+ },
+ {f"{GEMINI_AISTUDIO_AUTH_ID}.api_key": "THIS_IS_SECRET"},
+ rf"{GEMINI_AISTUDIO_AUTH_ID}\.api_key",
+ ("THIS_IS_SECRET",),
+ GeminiAIStudioAuthConfig,
+ ),
+ (
+ GeminiVertexAIAuthConfig(
+ id=GEMINI_VERTEX_AUTH_ID,
+ name=GEMINI_VERTEX_AUTH_NAME,
+ project_id="project-id",
+ location="global",
+ credentials={
+ "type": "service_account",
+ "client_email": "agent@example.com",
+ "private_key": "line1\\nline2",
+ },
+ ),
+ "gemini-2.0-flash-lite",
+ {
+ "component_type": "GeminiVertexAIAuthConfig",
+ "id": GEMINI_VERTEX_AUTH_ID,
+ "name": GEMINI_VERTEX_AUTH_NAME,
+ "description": None,
+ "metadata": {},
+ "project_id": "project-id",
+ "location": "global",
+ "credentials": {"$component_ref": f"{GEMINI_VERTEX_AUTH_ID}.credentials"},
+ },
+ {
+ f"{GEMINI_VERTEX_AUTH_ID}.credentials": {
+ "type": "service_account",
+ "client_email": "agent@example.com",
+ "private_key": "line1\\nline2",
+ },
+ },
+ rf"{GEMINI_VERTEX_AUTH_ID}\.credentials",
+ ("service_account", "agent@example.com"),
+ GeminiVertexAIAuthConfig,
+ ),
+ ],
+ ids=["aistudio-api-key", "vertex-credentials"],
+)
+def test_can_serialize_and_deserialize_gemini_config_with_sensitive_auth_fields(
+ auth: GeminiAIStudioAuthConfig | GeminiVertexAIAuthConfig,
+ model_id: str,
+ expected_auth: dict[str, Any],
+ components_registry: dict[str, Any],
+ missing_component_ref: str,
+ hidden_values: tuple[str, ...],
+ expected_auth_type: type[object],
+) -> None:
+ llm_config = GeminiConfig(
+ id=GEMINI_CONFIG_ID,
+ name=GEMINI_CONFIG_NAME,
+ model_id=model_id,
+ auth=auth,
+ )
+
+ serialized_llm = AgentSpecSerializer().to_json(llm_config)
+ serialized_llm_as_dict = json.loads(serialized_llm)
+
+ for hidden_value in hidden_values:
+ assert hidden_value not in serialized_llm
+
+ _assert_serialized_geminiconfig_fields(serialized_llm_as_dict, model_id=model_id)
+ assert serialized_llm_as_dict["auth"] == expected_auth
+
+ with pytest.raises(ValueError, match=missing_component_ref):
+ AgentSpecDeserializer().from_json(serialized_llm)
+
+ deserialized_llm = AgentSpecDeserializer().from_json(
+ serialized_llm,
+ components_registry=components_registry,
+ )
+
+ assert deserialized_llm == llm_config
+ assert isinstance(deserialized_llm.auth, expected_auth_type)
+
+
+def test_geminiconfig_requires_auth() -> None:
+ with pytest.raises(ValidationError, match="auth"):
+ GeminiConfig(name=GEMINI_CONFIG_NAME, model_id="gemini-2.5-flash")
+
+
+def test_can_deserialize_gemini_config_with_inline_vertex_auth_component() -> None:
+ serialized_llm = json.dumps(
+ {
+ "component_type": "GeminiConfig",
+ "id": GEMINI_CONFIG_ID,
+ "name": GEMINI_CONFIG_NAME,
+ "model_id": "gemini-2.0-flash-lite",
+ "auth": {
+ "component_type": "GeminiVertexAIAuthConfig",
+ "id": GEMINI_VERTEX_AUTH_ID,
+ "name": GEMINI_VERTEX_AUTH_NAME,
+ },
+ "agentspec_version": AgentSpecVersionEnum.v26_2_0.value,
+ }
+ )
+
+ deserialized_llm = AgentSpecDeserializer().from_json(serialized_llm)
+
+ assert isinstance(deserialized_llm, GeminiConfig)
+ assert isinstance(deserialized_llm.auth, GeminiVertexAIAuthConfig)
+ assert deserialized_llm.auth == GeminiVertexAIAuthConfig(
+ id=GEMINI_VERTEX_AUTH_ID,
+ name=GEMINI_VERTEX_AUTH_NAME,
+ )
+
+
+def test_deserializing_gemini_config_without_auth_raises_error() -> None:
+ serialized_llm = json.dumps(
+ {
+ "component_type": "GeminiConfig",
+ "id": GEMINI_CONFIG_ID,
+ "name": GEMINI_CONFIG_NAME,
+ "model_id": "gemini-2.0-flash-lite",
+ "agentspec_version": AgentSpecVersionEnum.v26_2_0.value,
+ }
+ )
+
+ with pytest.raises(ValidationError, match="auth"):
+ AgentSpecDeserializer().from_json(serialized_llm)
+
+
+@pytest.mark.parametrize(
+ "model_id",
+ [
+ "gemini/gemini-2.5-flash",
+ "vertex_ai/gemini-2.0-flash-lite",
+ ],
+)
+def test_gemini_config_preserves_prefixed_model_id(model_id: str) -> None:
+ llm_config = GeminiConfig(
+ id=GEMINI_CONFIG_ID,
+ name=GEMINI_CONFIG_NAME,
+ model_id=model_id,
+ auth=GeminiAIStudioAuthConfig(
+ id=GEMINI_AISTUDIO_AUTH_ID,
+ name=GEMINI_AISTUDIO_AUTH_NAME,
+ ),
+ )
+
+ serialized_llm = AgentSpecSerializer().to_json(llm_config)
+ serialized_llm_as_dict = json.loads(serialized_llm)
+ _assert_serialized_geminiconfig_fields(serialized_llm_as_dict, model_id=model_id)
+ assert serialized_llm_as_dict["auth"]["component_type"] == "GeminiAIStudioAuthConfig"
+
+ deserialized_llm = AgentSpecDeserializer().from_json(serialized_llm)
+
+ assert llm_config.model_id == model_id
+ assert deserialized_llm.model_id == model_id
+
+
+def test_serializing_gemini_config_with_unsupported_version_raises_error() -> None:
+ llm_config = GeminiConfig(
+ name=GEMINI_CONFIG_NAME,
+ model_id="gemini-2.5-flash",
+ auth=GeminiAIStudioAuthConfig(name=GEMINI_AISTUDIO_AUTH_NAME),
+ )
+
+ with pytest.raises(ValueError, match="Invalid agentspec_version"):
+ AgentSpecSerializer().to_dict(
+ llm_config,
+ agentspec_version=AgentSpecVersionEnum.v26_1_0,
+ )