Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions JS/edgechains/arakoodev/src/ai/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
export { OpenAI } from "./lib/openai/openai.js";
export { GeminiAI } from "./lib/gemini/gemini.js";
export { Palm2AI } from "./lib/palm2/palm2.js";
export type {
Palm2Candidate,
Palm2ChatOptions,
Palm2ConstructionOptions,
Palm2Content,
Palm2ContentPart,
Palm2EmbeddingOptions,
Palm2EmbeddingResponse,
Palm2GenerateResponse,
ResponseMimeType,
Palm2TextOptions,
} from "./lib/palm2/palm2.js";
export { LlamaAI } from "./lib/llama/llama.js";
export { RetellAI } from "./lib/retell-ai/retell.js";
export { RetellWebClient } from "./lib/retell-ai/retellWebClient.js";
108 changes: 32 additions & 76 deletions JS/edgechains/arakoodev/src/ai/src/lib/gemini/gemini.ts
Original file line number Diff line number Diff line change
@@ -1,97 +1,53 @@
import axios from "axios";
import { retry } from "@lifeomic/attempt";
const url = "https://generativelanguage.googleapis.com/v1/models/gemini-pro:generateContent";
import { Palm2AI, type Palm2GenerateResponse, type ResponseMimeType } from "../palm2/palm2.js";

interface GeminiAIConstructionOptions {
apiKey?: string;
baseUrl?: string;
}

type SafetyRating = {
category:
| "HARM_CATEGORY_SEXUALLY_EXPLICIT"
| "HARM_CATEGORY_HATE_SPEECH"
| "HARM_CATEGORY_HARASSMENT"
| "HARM_CATEGORY_DANGEROUS_CONTENT";
probability: "NEGLIGIBLE" | "LOW" | "MEDIUM" | "HIGH";
};

type ContentPart = {
text: string;
};

type Content = {
parts: ContentPart[];
role: string;
};

type Candidate = {
content: Content;
finishReason: string;
index: number;
safetyRatings: SafetyRating[];
};

type UsageMetadata = {
promptTokenCount: number;
candidatesTokenCount: number;
totalTokenCount: number;
};

type Response = {
candidates: Candidate[];
usageMetadata: UsageMetadata;
};

type responseMimeType = "text/plain" | "application/json";

interface GeminiAIChatOptions {
model?: string;
max_output_tokens?: number;
temperature?: number;
topP?: number;
topK?: number;
candidateCount?: number;
stopSequences?: string[];
prompt: string;
max_retry?: number;
responseType?: responseMimeType;
responseType?: ResponseMimeType;
delay?: number;
}

export class GeminiAI {
apiKey: string;
constructor(options: GeminiAIConstructionOptions) {
this.apiKey = options.apiKey || process.env.GEMINI_API_KEY || "";
baseUrl?: string;

constructor(options: GeminiAIConstructionOptions = {}) {
this.apiKey =
options.apiKey ??
process.env.GEMINI_API_KEY ??
process.env.GOOGLE_API_KEY ??
process.env.PALM2_API_KEY ??
"";
this.baseUrl = options.baseUrl;
}

async chat(chatOptions: GeminiAIChatOptions): Promise<Response> {
let data = JSON.stringify({
contents: [
{
role: "user",
parts: [
{
text: chatOptions.prompt,
},
],
},
],
async chat(chatOptions: GeminiAIChatOptions): Promise<Palm2GenerateResponse> {
const client = new Palm2AI({ apiKey: this.apiKey, baseUrl: this.baseUrl });

return client.chat({
model: chatOptions.model ?? "gemini-pro",
prompt: chatOptions.prompt,
temperature: chatOptions.temperature,
topP: chatOptions.topP,
topK: chatOptions.topK,
candidateCount: chatOptions.candidateCount,
stopSequences: chatOptions.stopSequences,
responseMimeType: chatOptions.responseType ?? "text/plain",
maxOutputTokens: chatOptions.max_output_tokens ?? 1024,
maxRetries: chatOptions.max_retry ?? 3,
delay: chatOptions.delay ?? 200,
});

let config = {
method: "post",
maxBodyLength: Infinity,
url,
headers: {
"Content-Type": "application/json",
"x-goog-api-key": this.apiKey,
},
temperature: chatOptions.temperature || "0.7",
responseMimeType: chatOptions.responseType || "text/plain",
max_output_tokens: chatOptions.max_output_tokens || 1024,
data: data,
};
return await retry(
async () => {
return (await axios.request(config)).data;
},
{ maxAttempts: chatOptions.max_retry || 3, delay: chatOptions.delay || 200 }
);
}
}
190 changes: 190 additions & 0 deletions JS/edgechains/arakoodev/src/ai/src/lib/palm2/palm2.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,190 @@
import axios from "axios";
import { retry } from "@lifeomic/attempt";

const defaultBaseUrl = "https://generativelanguage.googleapis.com/v1beta";

export type ResponseMimeType = "text/plain" | "application/json";

export interface Palm2ConstructionOptions {
apiKey?: string;
baseUrl?: string;
}

export interface Palm2ContentPart {
text: string;
}

export interface Palm2Content {
role?: "user" | "model";
parts: Palm2ContentPart[];
}

interface Palm2GenerationConfig {
temperature?: number;
topP?: number;
topK?: number;
maxOutputTokens?: number;
candidateCount?: number;
stopSequences?: string[];
responseMimeType?: ResponseMimeType;
}

export interface Palm2ChatOptions extends Palm2GenerationConfig {
model?: string;
prompt?: string;
contents?: Palm2Content[];
maxRetries?: number;
delay?: number;
}

export interface Palm2TextOptions {
model?: string;
prompt: string;
temperature?: number;
topP?: number;
topK?: number;
maxOutputTokens?: number;
candidateCount?: number;
stopSequences?: string[];
maxRetries?: number;
delay?: number;
}

export interface Palm2EmbeddingOptions {
model?: string;
text: string;
maxRetries?: number;
delay?: number;
}

export interface Palm2Candidate {
content?: Palm2Content;
output?: string;
finishReason?: string;
index?: number;
}

export interface Palm2GenerateResponse {
candidates: Palm2Candidate[];
usageMetadata?: {
promptTokenCount?: number;
candidatesTokenCount?: number;
totalTokenCount?: number;
};
}

export interface Palm2EmbeddingResponse {
embedding: {
value?: number[];
values?: number[];
};
}

const stripUndefined = <T extends Record<string, unknown>>(value: T): Partial<T> =>
Object.fromEntries(Object.entries(value).filter(([, entry]) => entry !== undefined)) as Partial<T>;

export class Palm2AI {
private readonly apiKey: string;
private readonly baseUrl: string;

constructor(options: Palm2ConstructionOptions = {}) {
this.apiKey =
options.apiKey ??
process.env.GOOGLE_API_KEY ??
process.env.GEMINI_API_KEY ??
process.env.PALM2_API_KEY ??
"";
this.baseUrl = (options.baseUrl ?? defaultBaseUrl).replace(/\/$/, "");
}

async chat(options: Palm2ChatOptions): Promise<Palm2GenerateResponse> {
const model = options.model ?? "gemini-pro";
const contents = options.contents ?? [
{
role: "user" as const,
parts: [{ text: options.prompt ?? "" }],
},
];

return this.request<Palm2GenerateResponse>({
endpoint: `models/${model}:generateContent`,
data: {
contents,
generationConfig: stripUndefined({
temperature: options.temperature,
topP: options.topP,
topK: options.topK,
maxOutputTokens: options.maxOutputTokens,
candidateCount: options.candidateCount,
stopSequences: options.stopSequences,
responseMimeType: options.responseMimeType,
}),
},
maxRetries: options.maxRetries,
delay: options.delay,
});
}

async generateText(options: Palm2TextOptions): Promise<Palm2GenerateResponse> {
const model = options.model ?? "text-bison-001";

return this.request<Palm2GenerateResponse>({
endpoint: `models/${model}:generateText`,
data: stripUndefined({
prompt: {
text: options.prompt,
},
temperature: options.temperature,
topP: options.topP,
topK: options.topK,
maxOutputTokens: options.maxOutputTokens,
candidateCount: options.candidateCount,
stopSequences: options.stopSequences,
}),
maxRetries: options.maxRetries,
delay: options.delay,
});
}

async generateEmbedding(options: Palm2EmbeddingOptions): Promise<Palm2EmbeddingResponse> {
const model = options.model ?? "embedding-gecko-001";

return this.request<Palm2EmbeddingResponse>({
endpoint: `models/${model}:embedText`,
data: {
text: options.text,
},
maxRetries: options.maxRetries,
delay: options.delay,
});
}

private async request<T>({
endpoint,
data,
maxRetries,
delay,
}: {
endpoint: string;
data: object;
maxRetries?: number;
delay?: number;
}): Promise<T> {
if (!this.apiKey) {
throw new Error("Google Generative Language API key is required");
}

return retry(
async () => {
const response = await axios.post<T>(`${this.baseUrl}/${endpoint}`, data, {
headers: {
"Content-Type": "application/json",
"x-goog-api-key": this.apiKey,
},
});
return response.data;
},
{ maxAttempts: maxRetries ?? 3, delay: delay ?? 200 }
);
}
}
Loading
Loading