From d64773572cf2f9ef43984b79d3cf6e96ed02f181 Mon Sep 17 00:00:00 2001 From: liyi Date: Fri, 27 Mar 2026 15:49:00 +0800 Subject: [PATCH] feat: add ModelScope as a new LLM provider Add ModelScope (modelscope.cn) as a provider for accessing open-source models including Qwen/Qwen3-14B, Qwen/Qwen3-32B, and Qwen/Qwen3-235B-A22B via an OpenAI-compatible API at api.modelscope.cn/v1. Supports both static model list and dynamic model discovery via the /v1/models endpoint. --- .env.example | 4 + app/lib/modules/llm/providers/modelscope.ts | 121 ++++++++++++++++++++ app/lib/modules/llm/registry.ts | 2 + worker-configuration.d.ts | 1 + 4 files changed, 128 insertions(+) create mode 100644 app/lib/modules/llm/providers/modelscope.ts diff --git a/.env.example b/.env.example index b724838845..312de91b9a 100644 --- a/.env.example +++ b/.env.example @@ -83,6 +83,10 @@ HYPERBOLIC_API_KEY=your_hyperbolic_api_key_here # Get your API key from: https://openrouter.ai/keys OPEN_ROUTER_API_KEY=your_openrouter_api_key_here +# ModelScope (Open-source model platform by Alibaba) +# Get your API key from: https://modelscope.cn/my/myaccesstoken +MODELSCOPE_API_KEY=your_modelscope_api_key_here + # ====================================== # CUSTOM PROVIDER BASE URLS (Optional) # ====================================== diff --git a/app/lib/modules/llm/providers/modelscope.ts b/app/lib/modules/llm/providers/modelscope.ts new file mode 100644 index 0000000000..9d3b2353ee --- /dev/null +++ b/app/lib/modules/llm/providers/modelscope.ts @@ -0,0 +1,121 @@ +import { BaseProvider } from '~/lib/modules/llm/base-provider'; +import type { ModelInfo } from '~/lib/modules/llm/types'; +import type { IProviderSetting } from '~/types/model'; +import type { LanguageModelV1 } from 'ai'; +import { createOpenAI } from '@ai-sdk/openai'; + +export default class ModelScopeProvider extends BaseProvider { + name = 'ModelScope'; + getApiKeyLink = 'https://modelscope.cn/my/myaccesstoken'; + + config = { + apiTokenKey: 'MODELSCOPE_API_KEY', + }; + + staticModels: ModelInfo[] = [ + // Qwen3-14B via ModelScope: 128k context + { + name: 'Qwen/Qwen3-14B', + label: 'Qwen/Qwen3-14B', + provider: 'ModelScope', + maxTokenAllowed: 128000, + }, + + // Qwen3-32B via ModelScope: 128k context + { + name: 'Qwen/Qwen3-32B', + label: 'Qwen/Qwen3-32B', + provider: 'ModelScope', + maxTokenAllowed: 128000, + }, + + // Qwen/Qwen3-235B-A22B via ModelScope: 128k context + { + name: 'Qwen/Qwen3-235B-A22B', + label: 'Qwen/Qwen3-235B-A22B', + provider: 'ModelScope', + maxTokenAllowed: 128000, + }, + ]; + + async getDynamicModels( + apiKeys?: Record, + settings?: IProviderSetting, + serverEnv?: Record, + ): Promise { + const { apiKey } = this.getProviderBaseUrlAndKey({ + apiKeys, + providerSettings: settings, + serverEnv: serverEnv as any, + defaultBaseUrlKey: '', + defaultApiTokenKey: 'MODELSCOPE_API_KEY', + }); + console.error(`apiKey: ${apiKey}`); + + if (!apiKey) { + return []; + } + + try { + const response = await fetch('https://api-inference.modelscope.cn/v1/models', { + headers: { + Authorization: `Bearer ${apiKey}`, + }, + signal: this.createTimeoutSignal(5000), + }); + + if (!response.ok) { + console.error(`ModelScope API error: ${response.statusText}`); + return []; + } + + const data = (await response.json()) as any; + const staticModelIds = this.staticModels.map((m) => m.name); + + // Filter out models we already have in staticModels + const dynamicModels = + data.data + ?.filter((model: any) => !staticModelIds.includes(model.id)) + .map((m: any) => ({ + name: m.id, + label: `${m.id} (Dynamic)`, + provider: this.name, + maxTokenAllowed: 64000, // Default, adjust per model if available + maxCompletionTokens: 8192, + })) || []; + + return dynamicModels; + } catch (error) { + console.error(`Failed to fetch ModelScope models:`, error); + return []; + } + } + + getModelInstance(options: { + model: string; + serverEnv: Env; + apiKeys?: Record; + providerSettings?: Record; + }): LanguageModelV1 { + const { model, serverEnv, apiKeys, providerSettings } = options; + + const { apiKey } = this.getProviderBaseUrlAndKey({ + apiKeys, + providerSettings: providerSettings?.[this.name], + serverEnv: serverEnv as any, + defaultBaseUrlKey: '', + defaultApiTokenKey: 'MODELSCOPE_API_KEY', + }); + + if (!apiKey) { + throw new Error(`Missing API key for ${this.name} provider`); + } + + const openai = createOpenAI({ + apiKey, + baseURL: 'https://api-inference.modelscope.cn/v1/', + }); + + return openai(model); + } +} diff --git a/app/lib/modules/llm/registry.ts b/app/lib/modules/llm/registry.ts index 01bbe81140..f9dd361803 100644 --- a/app/lib/modules/llm/registry.ts +++ b/app/lib/modules/llm/registry.ts @@ -20,6 +20,7 @@ import AmazonBedrockProvider from './providers/amazon-bedrock'; import GithubProvider from './providers/github'; import MoonshotProvider from './providers/moonshot'; import ZaiProvider from './providers/z-ai'; +import ModelScope from './providers/modelscope'; export { AnthropicProvider, @@ -44,4 +45,5 @@ export { AmazonBedrockProvider, GithubProvider, ZaiProvider, + ModelScope, }; diff --git a/worker-configuration.d.ts b/worker-configuration.d.ts index 6dd21f9487..2a5b2e0ce7 100644 --- a/worker-configuration.d.ts +++ b/worker-configuration.d.ts @@ -19,4 +19,5 @@ interface Env { XAI_API_KEY: string; PERPLEXITY_API_KEY: string; AWS_BEDROCK_CONFIG: string; + MODELSCOPE_API_KEY: string; }