diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index bc13c5c..2f39c00 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -15,7 +15,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.9", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] steps: - name: Check out code diff --git a/README.md b/README.md index 6fd3be6..2dfa787 100644 --- a/README.md +++ b/README.md @@ -163,14 +163,25 @@ The `commitai` command (which is an alias for `commitai generate`) accepts the f * Example: `commitai -c "Fix typo in documentation"` (for minor changes) * Can be combined with `-a`: `commitai -a -c "Quick fix and commit all"` +* `-p `, `--provider `: + * Specifies which LLM provider to use. + * One can use the environment variable `LLM_PROVIDER` instead. + * Defaults to `google`. + * Available options are: + * `openai` + * `anthropic` + * `google` + * `ollama` + * `-m `, `--model `: * Specifies which AI model to use. + * One can use the environment variable `LLM_MODEL` instead. * Defaults to `gemini-2.5-pro-preview-03-25`. * Ensure the corresponding API key environment variable is set. * Examples: - * `commitai -m gpt-4 "Use OpenAI's GPT-4"` - * `commitai -m claude-3-opus-20240229 "Use Anthropic's Claude 3 Opus"` - * `commitai -m gemini-2.5-flash-preview-04-17 "Use Google's Gemini 1.5 Flash"` + * `commitai -p openai -m gpt-4 "Use OpenAI's GPT-4"` + * `commitai -p anthropic -m claude-3-opus-20240229 "Use Anthropic's Claude 3 Opus"` + * `commitai -p google -m gemini-2.5-flash-preview-04-17 "Use Google's Gemini 1.5 Flash"` ### Creating Repository Templates @@ -210,7 +221,7 @@ commitai "Implement password reset functionality using email tokens" ```bash # Stage all changes and commit immediately using GPT-4 -commitai -a -c -m gpt-4 "Minor refactoring and cleanup" +commitai -a -c -p openai -m gpt-4 "Minor refactoring and cleanup" ``` *(Commit is created directly)* diff --git a/commitai/cli.py b/commitai/cli.py index 4618704..df79002 100644 --- a/commitai/cli.py +++ b/commitai/cli.py @@ -44,51 +44,48 @@ def _get_google_api_key() -> Optional[str]: ) -def _initialize_llm(model: str) -> BaseChatModel: +def _initialize_llm(provider: str, model: str) -> BaseChatModel: """Initializes and returns the LangChain chat model based on the model name.""" google_api_key_str = _get_google_api_key() try: - if model.startswith("gpt-"): - api_key = os.getenv("OPENAI_API_KEY") - if not api_key: - raise click.ClickException( - "Error: OPENAI_API_KEY environment variable not set." + match provider: + case "openai": + api_key = os.getenv("OPENAI_API_KEY") + if not api_key: + raise click.ClickException( + "Error: OPENAI_API_KEY environment variable not set." + ) + return ChatOpenAI(model=model, api_key=api_key, temperature=0.7) + case "anthropic": + api_key = os.getenv("ANTHROPIC_API_KEY") + if not api_key: + raise click.ClickException( + "Error: ANTHROPIC_API_KEY environment variable not set." + ) + return ChatAnthropic(model_name=model, api_key=api_key, temperature=0.7) + case "google": + if ChatGoogleGenerativeAI is None: + raise click.ClickException( + "Error: 'langchain-google-genai' is not installed. " + "Run 'pip install commitai[test]' or " + "'pip install langchain-google-genai'" + ) + if not google_api_key_str: + raise click.ClickException( + "Error: Google API Key not found. Set GOOGLE_API_KEY, " + "GEMINI_API_KEY, or GOOGLE_GENERATIVE_AI_API_KEY." + ) + return ChatGoogleGenerativeAI( + model=model, + google_api_key=google_api_key_str, + temperature=0.7, + convert_system_message_to_human=True, ) - return ChatOpenAI(model=model, api_key=api_key, temperature=0.7) - - elif model.startswith("claude-"): - api_key = os.getenv("ANTHROPIC_API_KEY") - if not api_key: - raise click.ClickException( - "Error: ANTHROPIC_API_KEY environment variable not set." - ) - return ChatAnthropic(model_name=model, api_key=api_key, temperature=0.7) - - elif model.startswith("gemini-"): - if ChatGoogleGenerativeAI is None: - raise click.ClickException( - "Error: 'langchain-google-genai' is not installed. " - "Run 'pip install commitai[test]' or " - "'pip install langchain-google-genai'" - ) - if not google_api_key_str: - raise click.ClickException( - "Error: Google API Key not found. Set GOOGLE_API_KEY, " - "GEMINI_API_KEY, or GOOGLE_GENERATIVE_AI_API_KEY." - ) - return ChatGoogleGenerativeAI( - model=model, - google_api_key=google_api_key_str, - temperature=0.7, - convert_system_message_to_human=True, - ) - elif model.startswith("llama"): - # Ollama models (e.g., llama2, llama3) - return cast(BaseChatModel, ChatOllama(model=model, temperature=0.7)) - else: - raise click.ClickException(f"🚫 Unsupported model: {model}") - + case "ollama": + return cast(BaseChatModel, ChatOllama(model=model, temperature=0.7)) + case _: + raise click.ClickException(f"🚫 Unsupported provider: {provider}.") except Exception as e: raise click.ClickException(f"Error initializing AI model: {e}") from e @@ -187,9 +184,21 @@ def cli() -> None: is_flag=True, help="Stage all changes before generating the commit message", ) +@click.option( + "--provider", + "-p", + type=click.Choice(["openai", "anthropic", "google", "ollama"]), + envvar="LLM_PROVIDER", + default="google", + help=( + "Set the engine model provider" + " (e.g., 'openai', 'anthropic', 'google', 'ollama')." + ), +) @click.option( "--model", "-m", + envvar="LLM_MODEL", default="gemini-2.5-pro-preview-03-25", help=( "Set the engine model (e.g., 'gpt-4', 'claude-3-opus-20240229', " @@ -203,11 +212,12 @@ def generate_message( commit: bool, template: Optional[str], add: bool, + provider: str, model: str, ) -> None: explanation = " ".join(description) - llm = _initialize_llm(model) + llm = _initialize_llm(provider, model) if add: stage_all_changes() @@ -283,9 +293,21 @@ def create_template_command(template_content: Tuple[str, ...]) -> None: is_flag=True, help="Commit the changes with the generated message", ) +@click.option( + "--provider", + "-p", + type=click.Choice(["openai", "anthropic", "google", "ollama"]), + envvar="LLM_PROVIDER", + default="google", + help=( + "Set the engine model provider" + " (e.g., 'openai', 'anthropic', 'google', 'ollama')." + ), +) @click.option( "--model", "-m", + envvar="LLM_MODEL", default="gemini-2.5-pro-preview-03-25", help="Set the engine model to be used.", ) diff --git a/pyproject.toml b/pyproject.toml index cf78fb9..41cb6da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ version = "1.0.5" description = "Commitai helps you generate git commit messages using AI" readme = "README.md" -requires-python = ">=3.9" +requires-python = ">=3.10" license = { file = "LICENSE" } authors = [ { name = "Luis Guilherme", email = "lgpelin92@gmail.com" }, @@ -22,7 +22,6 @@ classifiers = [ "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -93,7 +92,7 @@ skip-magic-trailing-comma = false line-ending = "auto" [tool.mypy] -python_version = "3.9" # Consistent with requires-python +python_version = "3.10" # Consistent with requires-python warn_return_any = true warn_unused_configs = true ignore_missing_imports = true diff --git a/tests/test_cli.py b/tests/test_cli.py index 8d15e51..59ce61b 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -86,6 +86,10 @@ def getenv_side_effect(key, default=None): return "fake_anthropic_key" if key == "TEMPLATE_COMMIT": return None + if key == "LLM_PROVIDER": + return "fake_llm_provider" + if key == "LLM_MODEL": + return "fake_llm_model" if key == "OLLAMA_HOST": return "fake_ollama_host" return os.environ.get(key, default) @@ -165,7 +169,9 @@ def test_generate_select_gpt4(mock_generate_deps): mock_generate_deps[ "file_open" ].return_value.read.return_value = "Generated commit message" - result = runner.invoke(cli, ["generate", "-m", "gpt-4", "Test explanation"]) + result = runner.invoke( + cli, ["generate", "-p", "openai", "-m", "gpt-4", "Test explanation"] + ) assert result.exit_code == 0, result.output mock_generate_deps["openai_class"].assert_called_once_with( @@ -182,7 +188,15 @@ def test_generate_select_claude(mock_generate_deps): "file_open" ].return_value.read.return_value = "Generated commit message" result = runner.invoke( - cli, ["generate", "-m", "claude-3-opus-20240229", "Test explanation"] + cli, + [ + "generate", + "-p", + "anthropic", + "-m", + "claude-3-opus-20240229", + "Test explanation", + ], ) assert result.exit_code == 0, result.output @@ -201,7 +215,9 @@ def test_generate_select_ollama(mock_generate_deps): mock_generate_deps[ "file_open" ].return_value.read.return_value = "Generated commit message" - result = runner.invoke(cli, ["generate", "-m", "llama3", "Test explanation"]) + result = runner.invoke( + cli, ["generate", "-p", "ollama", "-m", "llama3", "Test explanation"] + ) assert result.exit_code == 0, result.output mock_generate_deps["ollama_class"].assert_called_once_with( @@ -268,7 +284,9 @@ def test_generate_missing_openai_key(mock_generate_deps): """Test generate command with missing OpenAI API key.""" mock_generate_deps["getenv"].side_effect = lambda key, default=None: None runner = CliRunner() - result = runner.invoke(cli, ["generate", "-m", "gpt-4", "Test explanation"]) + result = runner.invoke( + cli, ["generate", "-p", "openai", "-m", "gpt-4", "Test explanation"] + ) assert result.exit_code == 1, result.output assert "OPENAI_API_KEY environment variable not set" in result.output @@ -279,7 +297,9 @@ def test_generate_missing_anthropic_key(mock_generate_deps): """Test generate command with missing Anthropic API key.""" mock_generate_deps["getenv"].side_effect = lambda key, default=None: None runner = CliRunner() - result = runner.invoke(cli, ["generate", "-m", "claude-3", "Test explanation"]) + result = runner.invoke( + cli, ["generate", "-p", "anthropic", "-m", "claude-3", "Test explanation"] + ) assert result.exit_code == 1, result.output assert "ANTHROPIC_API_KEY environment variable not set" in result.output