diff --git a/README.md b/README.md index 68f69cf..94d8caf 100644 --- a/README.md +++ b/README.md @@ -91,7 +91,7 @@ use { ```lua require('comment-translate').setup({ target_language = 'ja', -- Target language (default: auto-detected from system locale, fallback 'en') - translate_service = 'google', -- Currently only 'google' is supported + translate_service = 'google', -- 'google' or 'llm' hover = { enabled = true, -- Enable hover translation delay = 500, -- Additional delay (ms) after CursorHold before showing hover @@ -109,6 +109,14 @@ require('comment-translate').setup({ comment = true, -- Include comments as translation targets string = true, -- Include strings as translation targets }, + llm = { + provider = 'openai', -- 'openai' | 'anthropic' | 'gemini' | 'ollama' + api_key = nil, -- Required except provider='ollama' (can also use provider-specific env vars) + model = 'gpt-5.2', + endpoint = nil, -- Optional custom endpoint (default depends on provider) + system_prompt = nil, -- Optional custom system prompt + timeout = 20, -- curl max-time in seconds + }, keymaps = { hover = 'th', -- Hover translation hover_manual = 'tc', -- Manual hover trigger (when auto is disabled) @@ -122,6 +130,52 @@ require('comment-translate').setup({ * **target_language**: Automatically detected from system locale (`LANG`, `LANGUAGE`, `LC_ALL`) or Vim language settings. Falls back to `'en'` if detection fails. * **hover.delay**: Applied after the `CursorHold` event. Total delay is `updatetime` (Neovim option) plus `hover.delay`. +* **translate_service = 'llm'**: Supports `openai`, `anthropic`, `gemini`, and `ollama`. +* **API key env vars**: + * `openai`: `OPENAI_API_KEY` + * `anthropic`: `ANTHROPIC_API_KEY` + * `gemini`: `GEMINI_API_KEY` + * `ollama`: API key not required + +### LLM Translation Example + +```lua +require('comment-translate').setup({ + translate_service = 'llm', + target_language = 'ja', + llm = { + provider = 'openai', + api_key = vim.env.OPENAI_API_KEY, -- or a literal key string + model = 'gpt-5.2', + }, +}) +``` + +### Provider Examples + +```lua +-- Anthropic +llm = { + provider = 'anthropic', + model = 'claude-sonnet-4-0', + api_key = vim.env.ANTHROPIC_API_KEY, +} + +-- Gemini +llm = { + provider = 'gemini', + model = 'gemini-2.5-flash', + api_key = vim.env.GEMINI_API_KEY, +} + +-- Ollama (local) +llm = { + provider = 'ollama', + model = 'translategemma:4b', + endpoint = 'http://localhost:11434/api/chat', -- optional (default shown) +} + +``` ## Commands @@ -137,7 +191,7 @@ require('comment-translate').setup({ This plugin prioritizes transparency. To provide real-time translation, text is sent to an external translation service. -* **External transmission**: Translation uses the unofficial Google Translate HTTP endpoint (`translate.googleapis.com`) via `curl`. +* **External transmission**: Translation sends text to the configured external service (`google` or `llm`) via `curl`. * **What is sent**: The selected text or detected comment/string content. If it contains **personal data**, **credentials**, **internal code**, or other sensitive information, it may be transmitted outside your environment. * **Cache behavior**: The built-in cache is **in-memory only**. No files are written by the plugin, and the cache is cleared when Neovim exits. * **Control**: Automatic hover translation can be disabled, and all translation features are user-controlled. @@ -177,6 +231,12 @@ make fmt make fmt-check ``` +* Run tests: + +```sh +make test +``` + ## License MIT diff --git a/doc/comment-translate.txt b/doc/comment-translate.txt index 81e3c92..a9c56bf 100644 --- a/doc/comment-translate.txt +++ b/doc/comment-translate.txt @@ -14,17 +14,17 @@ REQUIREMENTS *comment-translate-requirements* - nvim-treesitter (recommended) - curl command (for translation API) - Internet connection -- Translation uses the unofficial Google Translate HTTP endpoint via curl. - Network policy changes (proxy/firewall) or upstream API changes may break - translation; inform users in restricted environments. +- Translation uses the configured external service (`google` or `llm`) via + curl. Network policy changes (proxy/firewall) or upstream API changes may + break translation; inform users in restricted environments. PRIVACY / DATA HANDLING *comment-translate-privacy* {comment-translate.nvim} sends the text you translate (comments, strings, or visual selections) to an external translation service over the network. -- External transmission: Translation uses the unofficial Google Translate HTTP - endpoint (translate.googleapis.com) via curl. +- External transmission: Translation sends text to the configured external + service (`google` or `llm`) via curl. - What is sent: The target text content itself. If it contains personal data, credentials, internal code, or other sensitive information, it may be sent outside your environment. @@ -106,9 +106,39 @@ Options: Default: System locale or 'en' *comment-translate.translate_service* - Translation service to use: currently only 'google' is available. + Translation service to use: 'google' or 'llm'. Default: 'google' +*comment-translate.llm.provider* + LLM provider when translate_service = 'llm'. + Supported: 'openai', 'anthropic', 'gemini', 'ollama' + Default: 'openai' + +*comment-translate.llm.api_key* + API key for LLM translation service. + Required for all providers except 'ollama'. + Environment variable fallback: + - openai: OPENAI_API_KEY + - anthropic: ANTHROPIC_API_KEY + - gemini: GEMINI_API_KEY + Default: nil + +*comment-translate.llm.model* + Model name for LLM translation. + Default: 'gpt-5.2' + +*comment-translate.llm.endpoint* + Optional endpoint override. If nil, provider-specific defaults are used. + Default: nil + +*comment-translate.llm.system_prompt* + Optional system prompt for translation behavior. + Default: nil (plugin uses internal prompt) + +*comment-translate.llm.timeout* + HTTP timeout (seconds) used by curl for LLM translation requests. + Default: 20 + *comment-translate.hover.enabled* Enable automatic hover translation. Default: true diff --git a/lua/comment-translate/config.lua b/lua/comment-translate/config.lua index 0ec9a2c..5557740 100644 --- a/lua/comment-translate/config.lua +++ b/lua/comment-translate/config.lua @@ -6,6 +6,7 @@ ---@field cache CommentTranslateCacheConfig ---@field max_length number ---@field targets CommentTranslateTargetsConfig +---@field llm CommentTranslateLLMConfig ---@class CommentTranslateHoverConfig ---@field enabled boolean @@ -23,6 +24,14 @@ ---@field comment boolean ---@field string boolean +---@class CommentTranslateLLMConfig +---@field provider string +---@field api_key string? +---@field model string +---@field endpoint string? +---@field system_prompt string? +---@field timeout number + ---@class CommentTranslateKeymapsConfig ---@field hover? string|false ---@field hover_manual? string|false Keymap for manual hover when auto-hover is disabled @@ -30,6 +39,16 @@ ---@field toggle? string|false local M = {} +local SUPPORTED_SERVICES = { + google = true, + llm = true, +} +local SUPPORTED_LLM_PROVIDERS = { + openai = true, + anthropic = true, + gemini = true, + ollama = true, +} ---@return string local function get_default_language() @@ -67,6 +86,14 @@ local default_config = { comment = true, string = true, }, + llm = { + provider = 'openai', + api_key = nil, + model = 'gpt-5.2', + endpoint = nil, + system_prompt = nil, + timeout = 20, + }, keymaps = { hover = 'th', hover_manual = 'tc', @@ -102,9 +129,21 @@ local function validate(user_config) cache = { user_config.cache, 'table', true }, max_length = { user_config.max_length, 'number', true }, targets = { user_config.targets, 'table', true }, + llm = { user_config.llm, 'table', true }, keymaps = { user_config.keymaps, 'table', true }, }) + if user_config.translate_service and not SUPPORTED_SERVICES[user_config.translate_service] then + vim.notify( + string.format( + "comment-translate: unsupported translate_service '%s', defaulting to 'google'", + tostring(user_config.translate_service) + ), + vim.log.levels.WARN + ) + user_config.translate_service = 'google' + end + if user_config.hover then warn_unknown('hover', user_config.hover, { enabled = true, delay = true, auto = true }) vim.validate({ @@ -145,6 +184,44 @@ local function validate(user_config) }) end + if user_config.llm then + warn_unknown('llm', user_config.llm, { + provider = true, + api_key = true, + model = true, + endpoint = true, + system_prompt = true, + timeout = true, + }) + vim.validate({ + ['llm.provider'] = { user_config.llm.provider, 'string', true }, + ['llm.api_key'] = { user_config.llm.api_key, 'string', true }, + ['llm.model'] = { user_config.llm.model, 'string', true }, + ['llm.endpoint'] = { user_config.llm.endpoint, 'string', true }, + ['llm.system_prompt'] = { user_config.llm.system_prompt, 'string', true }, + ['llm.timeout'] = { user_config.llm.timeout, 'number', true }, + }) + + if user_config.llm.provider and not SUPPORTED_LLM_PROVIDERS[user_config.llm.provider] then + vim.notify( + string.format( + "comment-translate: unsupported llm.provider '%s', defaulting to 'openai'", + tostring(user_config.llm.provider) + ), + vim.log.levels.WARN + ) + user_config.llm.provider = 'openai' + end + + if user_config.llm.timeout and user_config.llm.timeout <= 0 then + vim.notify( + 'comment-translate: llm.timeout must be > 0, defaulting to 20', + vim.log.levels.WARN + ) + user_config.llm.timeout = 20 + end + end + if user_config.keymaps then warn_unknown( 'keymaps', diff --git a/lua/comment-translate/health.lua b/lua/comment-translate/health.lua index 4fbcff0..70bf719 100644 --- a/lua/comment-translate/health.lua +++ b/lua/comment-translate/health.lua @@ -2,6 +2,7 @@ ---Run with :checkhealth comment-translate local M = {} +local utils = require('comment-translate.utils') ---@param module_name string ---@return boolean @@ -64,6 +65,40 @@ function M.check() vim.health.ok('Plugin is configured') vim.health.info('Target language: ' .. (config.config.target_language or 'not set')) vim.health.info('Translate service: ' .. (config.config.translate_service or 'not set')) + + if config.config.translate_service == 'llm' then + local provider = (config.config.llm and config.config.llm.provider) or 'openai' + vim.health.info('LLM provider: ' .. provider) + + if provider == 'ollama' then + vim.health.ok('LLM API key is not required for ollama provider') + else + local env_keys = { + openai = { 'OPENAI_API_KEY' }, + anthropic = { 'ANTHROPIC_API_KEY' }, + gemini = { 'GEMINI_API_KEY' }, + } + local configured = config.config.llm and config.config.llm.api_key + local has_key = configured and utils.trim(configured) ~= '' + if not has_key then + for _, env_name in ipairs(env_keys[provider] or {}) do + local value = vim.env[env_name] + if value and utils.trim(value) ~= '' then + has_key = true + break + end + end + end + + if has_key then + vim.health.ok('LLM API key is configured') + else + vim.health.error('LLM API key is missing', { + 'Set `llm.api_key` in setup() or required env var for provider', + }) + end + end + end else vim.health.warn('Plugin setup() has not been called', { "Call require('comment-translate').setup({}) in your config", diff --git a/lua/comment-translate/translate/init.lua b/lua/comment-translate/translate/init.lua index 2fb27d3..04589ab 100644 --- a/lua/comment-translate/translate/init.lua +++ b/lua/comment-translate/translate/init.lua @@ -2,6 +2,7 @@ local M = {} M.SERVICES = { google = 'google', + llm = 'llm', } ---@param service_name? string @@ -13,6 +14,8 @@ local function get_service(service_name) if service_name == M.SERVICES.google then return require('comment-translate.translate.google'), nil + elseif service_name == M.SERVICES.llm then + return require('comment-translate.translate.llm'), nil else return nil, 'Unknown translate service: ' .. tostring(service_name) end @@ -47,7 +50,7 @@ end ---@return string[] function M.get_available_services() - return { M.SERVICES.google } + return { M.SERVICES.google, M.SERVICES.llm } end return M diff --git a/lua/comment-translate/translate/llm.lua b/lua/comment-translate/translate/llm.lua new file mode 100644 index 0000000..ebf3817 --- /dev/null +++ b/lua/comment-translate/translate/llm.lua @@ -0,0 +1,411 @@ +local M = {} +local cache = require('comment-translate.translate.cache') +local utils = require('comment-translate.utils') + +local SUPPORTED_PROVIDERS = { + openai = true, + anthropic = true, + gemini = true, + ollama = true, +} + +---@return boolean, table? +local function get_plenary_job() + local ok, Job = pcall(require, 'plenary.job') + if not ok then + return false, nil + end + return true, Job +end + +---@return boolean +local curl_available = nil +local function check_curl() + if curl_available == nil then + curl_available = vim.fn.executable('curl') == 1 + end + return curl_available +end + +---@param provider string +---@param llm_config table +---@return string? +local function resolve_api_key(provider, llm_config) + local api_key = llm_config.api_key + + if api_key and utils.trim(api_key) ~= '' then + return api_key + end + + local env_candidates = { + openai = { 'OPENAI_API_KEY' }, + anthropic = { 'ANTHROPIC_API_KEY' }, + gemini = { 'GEMINI_API_KEY' }, + ollama = {}, + } + + for _, env_name in ipairs(env_candidates[provider] or {}) do + local value = vim.env[env_name] + if value and utils.trim(value) ~= '' then + return value + end + end + + return nil +end + +---@param provider string +---@param llm_config table +---@return string +local function resolve_endpoint(provider, llm_config) + if llm_config.endpoint and utils.trim(llm_config.endpoint) ~= '' then + return llm_config.endpoint + end + + if provider == 'openai' then + return 'https://api.openai.com/v1/chat/completions' + end + if provider == 'anthropic' then + return 'https://api.anthropic.com/v1/messages' + end + if provider == 'gemini' then + return string.format( + 'https://generativelanguage.googleapis.com/v1beta/models/%s:generateContent', + llm_config.model + ) + end + return 'http://localhost:11434/api/chat' +end + +---@param provider string +---@param response table +---@return string? +local function extract_translated_text(provider, response) + if type(response) ~= 'table' then + return nil + end + + if provider == 'anthropic' then + local content = response.content + if type(content) ~= 'table' then + return nil + end + local chunks = {} + for _, item in ipairs(content) do + if type(item) == 'table' and type(item.text) == 'string' and item.text ~= '' then + table.insert(chunks, item.text) + end + end + if #chunks > 0 then + return utils.trim(table.concat(chunks, '')) + end + return nil + end + + if provider == 'gemini' then + local candidates = response.candidates + if type(candidates) ~= 'table' or type(candidates[1]) ~= 'table' then + return nil + end + local content = candidates[1].content + if type(content) ~= 'table' or type(content.parts) ~= 'table' then + return nil + end + local chunks = {} + for _, part in ipairs(content.parts) do + if type(part) == 'table' and type(part.text) == 'string' and part.text ~= '' then + table.insert(chunks, part.text) + end + end + if #chunks > 0 then + return utils.trim(table.concat(chunks, '')) + end + return nil + end + + if provider == 'ollama' then + local message = response.message + if type(message) ~= 'table' or type(message.content) ~= 'string' then + return nil + end + return utils.trim(message.content) + end + + -- OpenAI response format + local choices = response.choices + if type(choices) ~= 'table' or type(choices[1]) ~= 'table' then + return nil + end + + local message = choices[1].message + if type(message) ~= 'table' then + return nil + end + + local content = message.content + if type(content) == 'string' then + return utils.trim(content) + end + + if type(content) == 'table' then + local chunks = {} + for _, item in ipairs(content) do + if type(item) == 'table' and type(item.text) == 'string' and item.text ~= '' then + table.insert(chunks, item.text) + end + end + if #chunks > 0 then + return utils.trim(table.concat(chunks, '')) + end + end + + return nil +end + +---@param provider string +---@param model string +---@param system_prompt string +---@param user_prompt string +---@return table +local function build_payload(provider, model, system_prompt, user_prompt) + if provider == 'anthropic' then + return { + model = model, + max_tokens = 1024, + temperature = 0, + system = system_prompt, + messages = { + { role = 'user', content = user_prompt }, + }, + } + end + + if provider == 'gemini' then + return { + systemInstruction = { + parts = { + { text = system_prompt }, + }, + }, + contents = { + { + parts = { + { text = user_prompt }, + }, + }, + }, + generationConfig = { + temperature = 0, + }, + } + end + + if provider == 'ollama' then + return { + model = model, + stream = false, + messages = { + { role = 'system', content = system_prompt }, + { role = 'user', content = user_prompt }, + }, + options = { + temperature = 0, + }, + } + end + + return { + model = model, + temperature = 0, + messages = { + { role = 'system', content = system_prompt }, + { role = 'user', content = user_prompt }, + }, + } +end + +---@param provider string +---@param api_key string? +---@return string[] +local function build_headers(provider, api_key) + local headers = { + 'Content-Type: application/json', + } + + if provider == 'openai' then + if api_key then + table.insert(headers, 'Authorization: Bearer ' .. api_key) + end + elseif provider == 'anthropic' then + if api_key then + table.insert(headers, 'x-api-key: ' .. api_key) + end + table.insert(headers, 'anthropic-version: 2023-06-01') + elseif provider == 'gemini' then + if api_key then + table.insert(headers, 'x-goog-api-key: ' .. api_key) + end + end + + return headers +end + +---@param text string +---@param target_lang string +---@param source_lang? string +---@param callback fun(result: string?) +function M.translate(text, target_lang, source_lang, callback) + if not callback then + error('callback is required') + end + + local cached = cache.get(text, target_lang, source_lang) + if cached then + vim.schedule(function() + callback(cached) + end) + return + end + + if utils.is_empty(text) then + vim.schedule(function() + callback('') + end) + return + end + + local config = require('comment-translate.config') + if #text > config.config.max_length then + vim.schedule(function() + callback(nil) + end) + return + end + + local provider = config.config.llm.provider or 'openai' + if not SUPPORTED_PROVIDERS[provider] then + vim.schedule(function() + vim.notify( + 'comment-translate: Unsupported LLM provider: ' .. tostring(provider), + vim.log.levels.ERROR + ) + callback(nil) + end) + return + end + + local api_key = resolve_api_key(provider, config.config.llm) + if provider ~= 'ollama' and not api_key then + vim.schedule(function() + vim.notify( + 'comment-translate: LLM API key is missing for provider ' .. provider, + vim.log.levels.ERROR + ) + callback(nil) + end) + return + end + + if not check_curl() then + vim.schedule(function() + vim.notify('comment-translate: curl is required for translation', vim.log.levels.ERROR) + callback(nil) + end) + return + end + + local ok, Job = get_plenary_job() + if not ok then + vim.schedule(function() + vim.notify( + 'comment-translate: plenary.nvim is required for translation', + vim.log.levels.ERROR + ) + callback(nil) + end) + return + end + + local normalized_target_lang = utils.normalize_lang_code(target_lang) + local normalized_source_lang = source_lang and utils.normalize_lang_code(source_lang) or 'auto' + local system_prompt = config.config.llm.system_prompt + or 'You are a translation engine. Return only the translated text. Do not add explanations.' + local user_prompt = string.format( + 'Translate the following text from %s to %s. Return only translated text.\n\n%s', + normalized_source_lang, + normalized_target_lang, + text + ) + local endpoint = resolve_endpoint(provider, config.config.llm) + local payload = build_payload(provider, config.config.llm.model, system_prompt, user_prompt) + local headers = build_headers(provider, api_key) + + local request_body = vim.fn.json_encode(payload) + + local stderr_output = {} + local curl_args = { + '--silent', + '--show-error', + '--fail', + '--max-time', + tostring(config.config.llm.timeout), + '-X', + 'POST', + } + + for _, header in ipairs(headers) do + table.insert(curl_args, '-H') + table.insert(curl_args, header) + end + + table.insert(curl_args, '-d') + table.insert(curl_args, request_body) + table.insert(curl_args, endpoint) + + Job:new({ + command = 'curl', + args = curl_args, + on_stderr = function(_, data) + if data and data ~= '' then + table.insert(stderr_output, data) + end + end, + on_exit = function(j, exit_code) + vim.schedule(function() + if exit_code ~= 0 then + local err_msg = 'comment-translate: LLM translation failed (curl error)' + if #stderr_output > 0 then + err_msg = err_msg .. ': ' .. table.concat(stderr_output, ' ') + end + vim.notify(err_msg, vim.log.levels.WARN) + callback(nil) + return + end + + local result = table.concat(j:result(), '') + if not result or result == '' then + callback(nil) + return + end + + local parse_ok, json = pcall(vim.fn.json_decode, result) + if not parse_ok or not json then + vim.notify('comment-translate: Failed to parse LLM response', vim.log.levels.WARN) + callback(nil) + return + end + + local translated_text = extract_translated_text(provider, json) + if not translated_text or translated_text == '' then + callback(nil) + return + end + + cache.set(text, translated_text, normalized_target_lang, normalized_source_lang) + callback(translated_text) + end) + end, + }):start() +end + +return M diff --git a/tests/commands_spec.lua b/tests/commands_spec.lua index b6fd3ec..28f17ec 100644 --- a/tests/commands_spec.lua +++ b/tests/commands_spec.lua @@ -16,6 +16,7 @@ describe('commands', function() package.loaded['comment-translate.translate'] = nil package.loaded['comment-translate.translate.cache'] = nil package.loaded['comment-translate.translate.google'] = nil + package.loaded['comment-translate.translate.llm'] = nil package.loaded['comment-translate.ui'] = nil package.loaded['comment-translate.ui.hover'] = nil package.loaded['comment-translate.ui.virtual_text'] = nil @@ -288,6 +289,7 @@ describe('immersive multi-buffer behavior', function() package.loaded['comment-translate.translate'] = nil package.loaded['comment-translate.translate.cache'] = nil package.loaded['comment-translate.translate.google'] = nil + package.loaded['comment-translate.translate.llm'] = nil package.loaded['comment-translate.ui'] = nil package.loaded['comment-translate.ui.hover'] = nil package.loaded['comment-translate.ui.virtual_text'] = nil diff --git a/tests/config_spec.lua b/tests/config_spec.lua index 744c8f5..bf78385 100644 --- a/tests/config_spec.lua +++ b/tests/config_spec.lua @@ -20,6 +20,11 @@ describe('config', function() assert.is_false(config.config.immersive.enabled) assert.is_true(config.config.cache.enabled) assert.equals(1000, config.config.cache.max_entries) + assert.equals('openai', config.config.llm.provider) + assert.is_nil(config.config.llm.api_key) + assert.equals('gpt-5.2', config.config.llm.model) + assert.is_nil(config.config.llm.endpoint) + assert.equals(20, config.config.llm.timeout) end) it('should merge user config with defaults', function() @@ -29,13 +34,20 @@ describe('config', function() hover = { delay = 1000, }, + llm = { + provider = 'anthropic', + model = 'claude-sonnet-4-0', + }, }) assert.equals('ja', config.config.target_language) assert.equals(1000, config.config.hover.delay) + assert.equals('anthropic', config.config.llm.provider) + assert.equals('claude-sonnet-4-0', config.config.llm.model) -- Default values should be preserved assert.is_true(config.config.hover.enabled) assert.is_true(config.config.hover.auto) + assert.equals(20, config.config.llm.timeout) end) it('should handle nested config correctly', function() @@ -55,6 +67,24 @@ describe('config', function() assert.is_not_nil(config.config.hover) assert.is_not_nil(config.config.immersive) end) + + it('should fallback unsupported translate_service to google', function() + config.setup({ + translate_service = 'invalid-service', + }) + + assert.equals('google', config.config.translate_service) + end) + + it('should fallback unsupported llm.provider to openai', function() + config.setup({ + llm = { + provider = 'invalid-provider', + }, + }) + + assert.equals('openai', config.config.llm.provider) + end) end) describe('get', function() diff --git a/tests/health_spec.lua b/tests/health_spec.lua new file mode 100644 index 0000000..9a0c007 --- /dev/null +++ b/tests/health_spec.lua @@ -0,0 +1,147 @@ +---@diagnostic disable: undefined-global +describe('health', function() + local health + local config + local captured + local original_health + local original_env = {} + + local function restore_env() + for key, value in pairs(original_env) do + vim.env[key] = value + end + original_env = {} + end + + local function set_env(key, value) + if original_env[key] == nil then + original_env[key] = vim.env[key] + end + vim.env[key] = value + end + + before_each(function() + package.loaded['comment-translate.health'] = nil + package.loaded['comment-translate.config'] = nil + package.loaded['comment-translate'] = nil + package.loaded['plenary'] = true + package.loaded['nvim-treesitter'] = true + + config = require('comment-translate.config') + config.reset() + health = require('comment-translate.health') + + captured = { + ok = {}, + error = {}, + warn = {}, + info = {}, + } + original_health = vim.health + vim.health = { + start = function() end, + ok = function(msg) + table.insert(captured.ok, msg) + end, + error = function(msg) + table.insert(captured.error, msg) + end, + warn = function(msg) + table.insert(captured.warn, msg) + end, + info = function(msg) + table.insert(captured.info, msg) + end, + } + end) + + after_each(function() + vim.health = original_health + restore_env() + end) + + it('should report missing api key for llm openai provider', function() + config.setup({ + translate_service = 'llm', + llm = { + provider = 'openai', + api_key = nil, + }, + }) + set_env('OPENAI_API_KEY', nil) + + health.check() + + local found = false + for _, msg in ipairs(captured.error) do + if msg:match('LLM API key is missing') then + found = true + break + end + end + assert.is_true(found) + end) + + it('should report ok when ollama provider is used without api key', function() + config.setup({ + translate_service = 'llm', + llm = { + provider = 'ollama', + }, + }) + + health.check() + + local found = false + for _, msg in ipairs(captured.ok) do + if msg:match('not required for ollama') then + found = true + break + end + end + assert.is_true(found) + end) + + it('should report ok when provider key exists in env', function() + config.setup({ + translate_service = 'llm', + llm = { + provider = 'anthropic', + }, + }) + set_env('ANTHROPIC_API_KEY', 'anthropic-env-key') + + health.check() + + local found = false + for _, msg in ipairs(captured.ok) do + if msg == 'LLM API key is configured' then + found = true + break + end + end + assert.is_true(found) + end) + + it('should treat whitespace api key as missing', function() + config.setup({ + translate_service = 'llm', + llm = { + provider = 'openai', + api_key = ' ', + }, + }) + set_env('OPENAI_API_KEY', nil) + + health.check() + + local found = false + for _, msg in ipairs(captured.error) do + if msg:match('LLM API key is missing') then + found = true + break + end + end + assert.is_true(found) + end) +end) diff --git a/tests/llm_spec.lua b/tests/llm_spec.lua new file mode 100644 index 0000000..8d7f898 --- /dev/null +++ b/tests/llm_spec.lua @@ -0,0 +1,420 @@ +---@diagnostic disable: undefined-global +describe('translate.llm', function() + local config + local llm + local cache + local original_notify + local original_executable + local original_env = {} + local notify_messages + local job_state + + local function restore_env() + for key, value in pairs(original_env) do + vim.env[key] = value + end + original_env = {} + end + + local function set_env(key, value) + if original_env[key] == nil then + original_env[key] = vim.env[key] + end + vim.env[key] = value + end + + local function setup_fake_job() + job_state = { + new_calls = 0, + exit_code = 0, + stdout = '', + stderr_lines = {}, + last_opts = nil, + } + + local FakeJob = {} + function FakeJob:new(opts) + job_state.new_calls = job_state.new_calls + 1 + job_state.last_opts = opts + return setmetatable({ + _opts = opts, + }, { + __index = { + result = function() + return { job_state.stdout } + end, + start = function(self) + for _, line in ipairs(job_state.stderr_lines or {}) do + self._opts.on_stderr(nil, line) + end + self._opts.on_exit(self, job_state.exit_code) + end, + }, + }) + end + + package.loaded['plenary.job'] = FakeJob + end + + local function await_callback(fn) + local done = false + local value = nil + fn(function(result) + value = result + done = true + end) + assert.is_true(vim.wait(1000, function() + return done + end)) + return value + end + + local function extract_request_body(args) + for i = 1, #args do + if args[i] == '-d' then + return vim.fn.json_decode(args[i + 1]) + end + end + return nil + end + + local function has_arg(args, expected) + for _, arg in ipairs(args) do + if arg == expected then + return true + end + end + return false + end + + local function last_arg(args) + return args[#args] + end + + before_each(function() + package.loaded['comment-translate.config'] = nil + package.loaded['comment-translate.translate.cache'] = nil + package.loaded['comment-translate.translate.llm'] = nil + package.loaded['plenary.job'] = nil + + setup_fake_job() + + config = require('comment-translate.config') + config.reset() + cache = require('comment-translate.translate.cache') + cache.clear() + llm = require('comment-translate.translate.llm') + + notify_messages = {} + original_notify = vim.notify + original_executable = vim.fn.executable + vim.notify = function(msg, level) + table.insert(notify_messages, { msg = msg, level = level }) + end + end) + + after_each(function() + vim.notify = original_notify + vim.fn.executable = original_executable + restore_env() + end) + + it('should build openai request and parse response', function() + config.setup({ + llm = { + provider = 'openai', + api_key = 'openai-key', + model = 'gpt-5.2', + timeout = 12, + }, + }) + job_state.stdout = vim.fn.json_encode({ + choices = { + { message = { content = 'こんにちは' } }, + }, + }) + + local result = await_callback(function(cb) + llm.translate('hello', 'ja', 'en', cb) + end) + + assert.equals('こんにちは', result) + assert.equals(1, job_state.new_calls) + assert.equals('curl', job_state.last_opts.command) + local args = job_state.last_opts.args + assert.equals('https://api.openai.com/v1/chat/completions', last_arg(args)) + assert.is_true(has_arg(args, 'Authorization: Bearer openai-key')) + local body = extract_request_body(args) + assert.equals('gpt-5.2', body.model) + assert.equals('system', body.messages[1].role) + assert.equals('user', body.messages[2].role) + end) + + it('should use OPENAI_API_KEY for openai provider', function() + set_env('OPENAI_API_KEY', 'env-openai-key') + config.setup({ + llm = { + provider = 'openai', + model = 'my-model', + endpoint = 'https://example.com/v1/chat/completions', + }, + }) + job_state.stdout = vim.fn.json_encode({ + choices = { + { message = { content = '訳文' } }, + }, + }) + + local result = await_callback(function(cb) + llm.translate('text', 'ja', nil, cb) + end) + + assert.equals('訳文', result) + local args = job_state.last_opts.args + assert.equals('https://example.com/v1/chat/completions', last_arg(args)) + assert.is_true(has_arg(args, 'Authorization: Bearer env-openai-key')) + end) + + it('should build anthropic request and parse response', function() + config.setup({ + llm = { + provider = 'anthropic', + api_key = 'anthropic-key', + model = 'claude-sonnet-4-0', + }, + }) + job_state.stdout = vim.fn.json_encode({ + content = { + { type = 'text', text = '翻訳結果' }, + }, + }) + + local result = await_callback(function(cb) + llm.translate('source', 'ja', 'en', cb) + end) + + assert.equals('翻訳結果', result) + local args = job_state.last_opts.args + assert.equals('https://api.anthropic.com/v1/messages', last_arg(args)) + assert.is_true(has_arg(args, 'x-api-key: anthropic-key')) + assert.is_true(has_arg(args, 'anthropic-version: 2023-06-01')) + local body = extract_request_body(args) + assert.equals('claude-sonnet-4-0', body.model) + assert.equals('user', body.messages[1].role) + end) + + it('should build gemini request and parse response', function() + config.setup({ + llm = { + provider = 'gemini', + api_key = 'gemini-key', + model = 'gemini-2.5-flash', + }, + }) + job_state.stdout = vim.fn.json_encode({ + candidates = { + { + content = { + parts = { + { text = 'Gemini訳' }, + }, + }, + }, + }, + }) + + local result = await_callback(function(cb) + llm.translate('source', 'ja', nil, cb) + end) + + assert.equals('Gemini訳', result) + local args = job_state.last_opts.args + assert.matches( + '^https://generativelanguage%.googleapis%.com/.+:generateContent$', + last_arg(args) + ) + assert.is_true(has_arg(args, 'x-goog-api-key: gemini-key')) + assert.is_false(has_arg(args, 'Authorization: Bearer gemini-key')) + assert.is_false(has_arg(args, 'x-api-key: gemini-key')) + local body = extract_request_body(args) + assert.equals( + 'Translate the following text from auto to ja. Return only translated text.\n\nsource', + body.contents[1].parts[1].text + ) + end) + + it('should use GEMINI_API_KEY for gemini provider', function() + set_env('GEMINI_API_KEY', 'env-gemini-key') + config.setup({ + llm = { + provider = 'gemini', + model = 'gemini-2.5-flash', + }, + }) + job_state.stdout = vim.fn.json_encode({ + candidates = { + { + content = { + parts = { + { text = 'Gemini訳' }, + }, + }, + }, + }, + }) + + local result = await_callback(function(cb) + llm.translate('source', 'ja', nil, cb) + end) + + assert.equals('Gemini訳', result) + local args = job_state.last_opts.args + assert.matches( + '^https://generativelanguage%.googleapis%.com/.+:generateContent$', + last_arg(args) + ) + assert.is_true(has_arg(args, 'x-goog-api-key: env-gemini-key')) + end) + + it('should allow ollama without api key', function() + config.setup({ + llm = { + provider = 'ollama', + model = 'translategemma:4b', + }, + }) + job_state.stdout = vim.fn.json_encode({ + message = { + content = 'ローカル翻訳', + }, + }) + + local result = await_callback(function(cb) + llm.translate('source', 'ja', nil, cb) + end) + + assert.equals('ローカル翻訳', result) + local args = job_state.last_opts.args + assert.equals('http://localhost:11434/api/chat', last_arg(args)) + assert.is_false(has_arg(args, 'Authorization: Bearer ')) + end) + + it('should fallback unsupported provider to openai and fail without api key', function() + set_env('OPENAI_API_KEY', nil) + config.setup({ + llm = { + provider = 'invalid-provider', + }, + }) + + local result = await_callback(function(cb) + llm.translate('source', 'ja', nil, cb) + end) + + assert.is_nil(result) + assert.equals(0, job_state.new_calls) + assert.matches("unsupported llm.provider 'invalid%-provider'", notify_messages[1].msg) + assert.matches('API key is missing for provider openai', notify_messages[2].msg) + end) + + it('should fail when api key is missing for non-ollama provider', function() + config.setup({ + llm = { + provider = 'anthropic', + }, + }) + + local result = await_callback(function(cb) + llm.translate('source', 'ja', nil, cb) + end) + + assert.is_nil(result) + assert.equals(0, job_state.new_calls) + assert.matches('API key is missing', notify_messages[1].msg) + end) + + it('should fail when curl is not installed', function() + config.setup({ + llm = { + provider = 'openai', + api_key = 'openai-key', + }, + }) + vim.fn.executable = function(bin) + if bin == 'curl' then + return 0 + end + return original_executable(bin) + end + + local result = await_callback(function(cb) + llm.translate('source', 'ja', nil, cb) + end) + + assert.is_nil(result) + assert.equals(0, job_state.new_calls) + assert.matches('curl is required for translation', notify_messages[1].msg) + end) + + it('should return nil when curl exits with error', function() + config.setup({ + llm = { + provider = 'openai', + api_key = 'openai-key', + }, + }) + job_state.exit_code = 22 + job_state.stderr_lines = { 'curl failed' } + job_state.stdout = '' + + local result = await_callback(function(cb) + llm.translate('source', 'ja', nil, cb) + end) + + assert.is_nil(result) + assert.matches('LLM translation failed', notify_messages[1].msg) + assert.matches('curl failed', notify_messages[1].msg) + end) + + it('should return nil for invalid json response', function() + config.setup({ + llm = { + provider = 'openai', + api_key = 'openai-key', + }, + }) + job_state.stdout = 'not-json' + + local result = await_callback(function(cb) + llm.translate('source', 'ja', nil, cb) + end) + + assert.is_nil(result) + assert.matches('Failed to parse LLM response', notify_messages[1].msg) + end) + + it('should reuse cache and avoid duplicate requests', function() + config.setup({ + llm = { + provider = 'openai', + api_key = 'openai-key', + }, + }) + job_state.stdout = vim.fn.json_encode({ + choices = { + { message = { content = 'cached-translation' } }, + }, + }) + + local first = await_callback(function(cb) + llm.translate('cache-me', 'ja', nil, cb) + end) + local second = await_callback(function(cb) + llm.translate('cache-me', 'ja', nil, cb) + end) + + assert.equals('cached-translation', first) + assert.equals('cached-translation', second) + assert.equals(1, job_state.new_calls) + end) +end) diff --git a/tests/translate_spec.lua b/tests/translate_spec.lua new file mode 100644 index 0000000..cd43ee2 --- /dev/null +++ b/tests/translate_spec.lua @@ -0,0 +1,34 @@ +---@diagnostic disable: undefined-global +describe('translate', function() + local translate + + before_each(function() + package.loaded['comment-translate.config'] = nil + package.loaded['comment-translate.translate'] = nil + package.loaded['comment-translate.translate.google'] = nil + package.loaded['comment-translate.translate.llm'] = nil + package.loaded['comment-translate.translate.cache'] = nil + + local config = require('comment-translate.config') + config.reset() + translate = require('comment-translate.translate') + end) + + it('should include google and llm in available services', function() + local services = translate.get_available_services() + local found_google = false + local found_llm = false + + for _, service in ipairs(services) do + if service == 'google' then + found_google = true + end + if service == 'llm' then + found_llm = true + end + end + + assert.is_true(found_google) + assert.is_true(found_llm) + end) +end)