Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions packages/app/server/src/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ export const env = createEnv({
GROQ_API_KEY: z.string().optional(),
XAI_API_KEY: z.string().optional(),
OPENROUTER_API_KEY: z.string().optional(),
AI_GATEWAY_API_KEY: z.string().optional(),
VERCEL_OIDC_TOKEN: z.string().optional(),
TAVILY_API_KEY: z.string().optional(),
E2B_API_KEY: z.string().optional(),
GOOGLE_SERVICE_ACCOUNT_KEY_ENCODED: z.string().optional(),
Expand Down
6 changes: 6 additions & 0 deletions packages/app/server/src/providers/ProviderFactory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import { GPTProvider } from './GPTProvider';
import { OpenAIImageProvider } from './OpenAIImageProvider';
import { OpenAIResponsesProvider } from './OpenAIResponsesProvider';
import { OpenRouterProvider } from './OpenRouterProvider';
import { VercelAIGatewayProvider } from './VercelAIGatewayProvider';
import { ProviderType } from './ProviderType';
import { XAIProvider } from './XAIProvider';
import {
Expand Down Expand Up @@ -50,6 +51,9 @@ const createChatModelToProviderMapping = (): Record<string, ProviderType> => {
case 'OpenRouter':
mapping[modelConfig.model_id] = ProviderType.OPENROUTER;
break;
case 'VercelAIGateway':
mapping[modelConfig.model_id] = ProviderType.VERCEL_AI_GATEWAY;
break;
case 'Groq':
mapping[modelConfig.model_id] = ProviderType.GROQ;
break;
Expand Down Expand Up @@ -192,6 +196,8 @@ export const getProvider = (
return new GroqProvider(stream, model);
case ProviderType.XAI:
return new XAIProvider(stream, model);
case ProviderType.VERCEL_AI_GATEWAY:
return new VercelAIGatewayProvider(stream, model);
default:
throw new Error(`Unknown provider type: ${type}`);
}
Expand Down
1 change: 1 addition & 0 deletions packages/app/server/src/providers/ProviderType.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,5 @@ export enum ProviderType {
OPENAI_VIDEOS = 'OPENAI_VIDEOS',
GROQ = 'GROQ',
XAI = 'XAI',
VERCEL_AI_GATEWAY = 'VERCEL_AI_GATEWAY',
}
136 changes: 136 additions & 0 deletions packages/app/server/src/providers/VercelAIGatewayProvider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
import { LlmTransactionMetadata, Transaction } from '../types';
import { BaseProvider } from './BaseProvider';
import { ProviderType } from './ProviderType';
import { getCostPerToken } from '../services/AccountingService';
import logger from '../logger';
import { env } from '../env';

interface TokenUsage {
inputTokens: number;
outputTokens: number;
totalTokens: number;
}

const VERCEL_AI_GATEWAY_BASE_URL = 'https://ai-gateway.vercel.sh/v1/ai';

const toNumber = (value: unknown): number =>
typeof value === 'number' && Number.isFinite(value) ? value : 0;

const readUsage = (value: unknown): TokenUsage => {
if (!value || typeof value !== 'object') {
return { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
}

const record = value as Record<string, unknown>;
const usage =
record.usage && typeof record.usage === 'object'
? (record.usage as Record<string, unknown>)
: record;

const inputTokens =
toNumber(usage.inputTokens) ||
toNumber(usage.promptTokens) ||
toNumber(usage.prompt_tokens) ||
toNumber(usage.input_tokens);
const outputTokens =
toNumber(usage.outputTokens) ||
toNumber(usage.completionTokens) ||
toNumber(usage.completion_tokens) ||
toNumber(usage.output_tokens);
const totalTokens =
toNumber(usage.totalTokens) ||
toNumber(usage.total_tokens) ||
inputTokens + outputTokens;

return { inputTokens, outputTokens, totalTokens };
};

const parseStreamUsage = (data: string): TokenUsage => {
let inputTokens = 0;
let outputTokens = 0;
let totalTokens = 0;

for (const line of data.split('\n')) {
const trimmed = line.trim();
if (!trimmed || trimmed === 'data: [DONE]') continue;

const jsonLine = trimmed.startsWith('data: ')
? trimmed.slice('data: '.length)
: trimmed;

try {
const parsed = JSON.parse(jsonLine);
const usage = readUsage(parsed);
inputTokens += usage.inputTokens;
outputTokens += usage.outputTokens;
totalTokens += usage.totalTokens;
} catch {
// The AI SDK stream may include non-JSON protocol frames; ignore those.
}
}

return {
inputTokens,
outputTokens,
totalTokens: totalTokens || inputTokens + outputTokens,
};
};

export class VercelAIGatewayProvider extends BaseProvider {
getType(): ProviderType {
return ProviderType.VERCEL_AI_GATEWAY;
}

getBaseUrl(): string {
return VERCEL_AI_GATEWAY_BASE_URL;
}

getApiKey(): string | undefined {
return env.AI_GATEWAY_API_KEY || env.VERCEL_OIDC_TOKEN;
}

override async formatAuthHeaders(
headers: Record<string, string>
): Promise<Record<string, string>> {
const formattedHeaders = await super.formatAuthHeaders(headers);

return {
...formattedHeaders,
'ai-gateway-protocol-version':
headers['ai-gateway-protocol-version'] || '0.0.1',
'ai-gateway-auth-method': env.AI_GATEWAY_API_KEY ? 'api-key' : 'oidc',
};
}

async handleBody(data: string): Promise<Transaction> {
try {
const usage = this.getIsStream()
? parseStreamUsage(data)
: readUsage(JSON.parse(data));

const cost = getCostPerToken(
this.getModel(),
usage.inputTokens,
usage.outputTokens
);

const metadata: LlmTransactionMetadata = {
providerId: 'null',
provider: this.getType(),
model: this.getModel(),
inputTokens: usage.inputTokens,
outputTokens: usage.outputTokens,
totalTokens: usage.totalTokens,
};

return {
metadata,
rawTransactionCost: cost,
status: 'success',
};
} catch (error) {
logger.error(`Error processing Vercel AI Gateway data: ${error}`);
throw error;
}
}
}
2 changes: 2 additions & 0 deletions packages/app/server/src/services/AccountingService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
SupportedImageModel,
SupportedVideoModel,
XAIModels,
VercelAIGatewayModels,
} from '@merit-systems/echo-typescript-sdk';

import { Decimal } from '@prisma/client/runtime/library';
Expand All @@ -30,6 +31,7 @@ export const ALL_SUPPORTED_MODELS: SupportedModel[] = [
...OpenRouterModels,
...GroqModels,
...XAIModels,
...VercelAIGatewayModels,
];

// Handle image models separately since they have different pricing structure
Expand Down
10 changes: 10 additions & 0 deletions packages/app/server/src/services/RequestDataService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,11 @@ export function extractModelName(req: Request): string | undefined {
return model;
}

const gatewayModel = req.headers['ai-language-model-id'];
if (typeof gatewayModel === 'string' && gatewayModel.length > 0) {
return gatewayModel;
}

const modelFromPath = extractGeminiModelName(req);

if (modelFromPath && modelFromPath !== undefined) {
Expand Down Expand Up @@ -55,6 +60,11 @@ export function extractIsStream(req: Request): boolean {
return stream;
}

const gatewayStream = req.headers['ai-language-model-streaming'];
if (gatewayStream === 'true') {
return true;
}

if (isGeminiStreamingPath(req.path)) {
return true;
}
Expand Down
4 changes: 2 additions & 2 deletions packages/sdk/ts/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
"author": "Merit Systems",
"license": "MIT",
"devDependencies": {
"@ai-sdk/gateway": "^1.0.12",
"@types/node": "^24.3.1",
"@typescript-eslint/eslint-plugin": "^8.34.1",
"@typescript-eslint/parser": "^8.34.1",
Expand All @@ -63,6 +62,7 @@
"@ai-sdk/openai": "2.0.32",
"@ai-sdk/xai": "2.0.16",
"@openrouter/ai-sdk-provider": "1.2.0",
"ai": "5.0.47"
"ai": "5.0.47",
"@ai-sdk/gateway": "^1.0.12"
}
}
2 changes: 2 additions & 0 deletions packages/sdk/ts/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ export { GeminiModels } from './supported-models/chat/gemini';
export type { GeminiModel } from './supported-models/chat/gemini';
export { OpenRouterModels } from './supported-models/chat/openrouter';
export type { OpenRouterModel } from './supported-models/chat/openrouter';
export { VercelAIGatewayModels } from './supported-models/chat/vercel-ai-gateway';
export type { VercelAIGatewayModel } from './supported-models/chat/vercel-ai-gateway';
export { GroqModels } from './supported-models/chat/groq';
export type { GroqModel } from './supported-models/chat/groq';
export { XAIModels } from './supported-models/chat/xai';
Expand Down
2 changes: 2 additions & 0 deletions packages/sdk/ts/src/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ export * from './groq';
export * from './xai';
export * from './openai';
export * from './openrouter';
export * from './vercel-ai-gateway';

export function echoFetch(
originalFetch: typeof fetch,
Expand Down Expand Up @@ -62,4 +63,5 @@ export { type GoogleGenerativeAIProvider } from '@ai-sdk/google';
export { type GroqProvider } from '@ai-sdk/groq';
export { type OpenAIProvider } from '@ai-sdk/openai';
export { type OpenRouterProvider } from '@openrouter/ai-sdk-provider';
export { type GatewayProvider } from '@ai-sdk/gateway';
export { type XaiProvider } from '@ai-sdk/xai';
23 changes: 23 additions & 0 deletions packages/sdk/ts/src/providers/vercel-ai-gateway.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { createGatewayProvider, GatewayProvider } from '@ai-sdk/gateway';
import { ROUTER_BASE_URL } from 'config';
import { EchoConfig } from '../types';
import { validateAppId } from '../utils/validation';
import { echoFetch } from './index';

export function createEchoVercelAIGateway(
{ appId, baseRouterUrl = ROUTER_BASE_URL }: EchoConfig,
getTokenFn: (appId: string) => Promise<string | null>,
onInsufficientFunds?: () => void
): GatewayProvider {
validateAppId(appId, 'createEchoVercelAIGateway');

return createGatewayProvider({
baseURL: baseRouterUrl,
apiKey: 'placeholder_replaced_by_echoFetch',
fetch: echoFetch(
fetch,
async () => await getTokenFn(appId),
onInsufficientFunds
),
});
}
2 changes: 2 additions & 0 deletions packages/sdk/ts/src/resources/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import {
AnthropicModels,
GeminiModels,
OpenRouterModels,
VercelAIGatewayModels,
OpenAIImageModels,
SupportedModel,
SupportedImageModel,
Expand All @@ -26,6 +27,7 @@ export class ModelsResource extends BaseResource {
...AnthropicModels,
...GeminiModels,
...OpenRouterModels,
...VercelAIGatewayModels,
];

return allModels;
Expand Down
Loading