Skip to content

Commit

Permalink
[OPIK-712]: fe gemini;
Browse files Browse the repository at this point in the history
  • Loading branch information
aadereiko committed Feb 28, 2025
1 parent 31e6170 commit a38e1ca
Show file tree
Hide file tree
Showing 6 changed files with 90 additions and 1 deletion.
6 changes: 6 additions & 0 deletions apps/opik-frontend/src/constants/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,12 @@ export const DEFAULT_ANTHROPIC_CONFIGS = {
TOP_P: 1,
};

export const DEFAULT_GEMINI_CONFIGS = {
TEMPERATURE: 0,
MAX_COMPLETION_TOKENS: 1024,
TOP_P: 1,
};

export const DEFAULT_OPEN_ROUTER_CONFIGS = {
MAX_TOKENS: 0,
TEMPERATURE: 1,
Expand Down
10 changes: 10 additions & 0 deletions apps/opik-frontend/src/constants/providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import OpenAIIcon from "@/icons/integrations/openai.svg?react";
import AnthropicIcon from "@/icons/integrations/anthropic.svg?react";
import OpenRouterIcon from "@/icons/integrations/open_router.svg?react";
import OllamaIcon from "@/icons/integrations/ollama.svg?react";
import GeminiIcon from "@/icons/integrations/gemini.svg?react";

import {
PROVIDER_LOCATION_TYPE,
Expand Down Expand Up @@ -77,6 +78,15 @@ export const PROVIDERS: PROVIDERS_TYPE = {
lsKey: OLLAMA_LS_KEY,
defaultModel: "",
},
[PROVIDER_TYPE.GEMINI]: {
label: "Gemini",
value: PROVIDER_TYPE.GEMINI,
icon: GeminiIcon,
apiKeyName: "GEMINI_API_KEY",
apiKeyURL: "https://aistudio.google.com/apikey",
defaultModel: PROVIDER_MODEL_TYPE.GEMINI_1_5_FLASH,
locationType: PROVIDER_LOCATION_TYPE.cloud,
},
};

export const PROVIDERS_OPTIONS = Object.values(PROVIDERS);
32 changes: 32 additions & 0 deletions apps/opik-frontend/src/hooks/useLLMProviderModelsData.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1462,6 +1462,38 @@ export const PROVIDER_MODELS: PROVIDER_MODELS_TYPE = {
label: "01-ai/yi-vision",
},
],

[PROVIDER_TYPE.GEMINI]: [
{
value: PROVIDER_MODEL_TYPE.GEMINI_2_0_FLASH,
label: "Gemini 2.0 Flash",
},
{
value: PROVIDER_MODEL_TYPE.GEMINI_1_5_FLASH,
label: "Gemini 1.5 Flash",
},
{
value: PROVIDER_MODEL_TYPE.GEMINI_1_5_FLASH_8B,
label: "Gemini 1.5 Flash-8B",
},
{
value: PROVIDER_MODEL_TYPE.GEMINI_1_5_PRO,
label: "Gemini 1.5 Pro",
},
{
value: PROVIDER_MODEL_TYPE.GEMINI_1_0_PRO,
label: "Gemini 1.0 Pro",
},
{
value: PROVIDER_MODEL_TYPE.TEXT_EMBEDDING,
label: "Text Embedding",
},
{
value: PROVIDER_MODEL_TYPE.AQA,
label: "AQA",
},
],

[PROVIDER_TYPE.OLLAMA]: [
// the list will be full filled base on data in localstorage
],
Expand Down
14 changes: 14 additions & 0 deletions apps/opik-frontend/src/icons/integrations/gemini.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
10 changes: 10 additions & 0 deletions apps/opik-frontend/src/lib/playground.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,13 @@ import { PlaygroundPromptType } from "@/types/playground";
import { generateRandomString } from "@/lib/utils";
import {
DEFAULT_ANTHROPIC_CONFIGS,
DEFAULT_GEMINI_CONFIGS,
DEFAULT_OPEN_AI_CONFIGS,
DEFAULT_OPEN_ROUTER_CONFIGS,
} from "@/constants/llm";
import {
LLMAnthropicConfigsType,
LLMGeminiConfigsType,
LLMOpenAIConfigsType,
LLMOpenRouterConfigsType,
LLMPromptConfigsType,
Expand Down Expand Up @@ -54,6 +56,14 @@ export const getDefaultConfigByProvider = (
} as LLMOpenRouterConfigsType;
}

if (provider === PROVIDER_TYPE.GEMINI) {
return {
temperature: DEFAULT_GEMINI_CONFIGS.TEMPERATURE,
maxCompletionTokens: DEFAULT_GEMINI_CONFIGS.MAX_COMPLETION_TOKENS,
topP: DEFAULT_GEMINI_CONFIGS.TOP_P,
} as LLMGeminiConfigsType;
}

return {};
};

Expand Down
19 changes: 18 additions & 1 deletion apps/opik-frontend/src/types/providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ export enum PROVIDER_TYPE {
ANTHROPIC = "anthropic",
OPEN_ROUTER = "openrouter",
OLLAMA = "ollama",
GEMINI = "gemini",
}

export enum PROVIDER_MODEL_TYPE {
Expand Down Expand Up @@ -351,6 +352,15 @@ export enum PROVIDER_MODEL_TYPE {
ZERO_ONE_AI_YI_LARGE_FC = "01-ai/yi-large-fc",
ZERO_ONE_AI_YI_LARGE_TURBO = "01-ai/yi-large-turbo",
ZERO_ONE_AI_YI_VISION = "01-ai/yi-vision",

// <----- gemini
GEMINI_2_0_FLASH = "gemini-2.0-flash-exp",
GEMINI_1_5_FLASH = "gemini-1.5-flash",
GEMINI_1_5_FLASH_8B = "gemini-1.5-flash-8b",
GEMINI_1_5_PRO = "gemini-1.5-pro",
GEMINI_1_0_PRO = "gemini-1.0-pro",
TEXT_EMBEDDING = "text-embedding-004",
AQA = "aqa",
}

export type PROVIDER_MODELS_TYPE = {
Expand Down Expand Up @@ -409,8 +419,15 @@ export interface LLMOpenRouterConfigsType {
topA: number;
}

export interface LLMGeminiConfigsType {
temperature: number;
maxCompletionTokens: number;
topP: number;
}

export type LLMPromptConfigsType =
| Record<string, never>
| LLMOpenAIConfigsType
| LLMAnthropicConfigsType
| LLMOpenRouterConfigsType;
| LLMOpenRouterConfigsType
| LLMGeminiConfigsType;

0 comments on commit a38e1ca

Please sign in to comment.