diff --git a/.env.example b/.env.example index 444bf1a79a..081f92f279 100644 --- a/.env.example +++ b/.env.example @@ -5,6 +5,7 @@ OPENAI_API_KEY=sk-* # OpenAI API key, starting with sk- REDPILL_API_KEY= # REDPILL API Key GROQ_API_KEY=gsk_* OPENROUTER_API_KEY= +GOOGLE_GENERATIVE_AI_API_KEY= # Gemini API key ELEVENLABS_XI_API_KEY= # API key from elevenlabs diff --git a/README.md b/README.md index 3786057ef1..c8c2b32637 100644 --- a/README.md +++ b/README.md @@ -83,6 +83,7 @@ DISCORD_APPLICATION_ID= DISCORD_API_TOKEN= # Bot token OPENAI_API_KEY=sk-* # OpenAI API key, starting with sk- ELEVENLABS_XI_API_KEY= # API key from elevenlabs +GOOGLE_GENERATIVE_AI_API_KEY= # Gemini API key # ELEVENLABS SETTINGS ELEVENLABS_MODEL_ID=eleven_multilingual_v2 diff --git a/packages/core/src/generation.ts b/packages/core/src/generation.ts index f6cdbf7227..43cd2e240a 100644 --- a/packages/core/src/generation.ts +++ b/packages/core/src/generation.ts @@ -11,6 +11,7 @@ import { default as tiktoken, TiktokenModel } from "tiktoken"; import Together from "together-ai"; import { elizaLogger } from "./index.ts"; import models from "./models.ts"; +import { createGoogleGenerativeAI } from "@ai-sdk/google"; import { parseBooleanFromText, parseJsonArrayFromText, @@ -104,6 +105,25 @@ export async function generateText({ break; } + case ModelProviderName.GOOGLE: + const google = createGoogleGenerativeAI(); + + const { text: anthropicResponse } = await aiGenerateText({ + model: google(model), + prompt: context, + system: + runtime.character.system ?? + settings.SYSTEM_PROMPT ?? + undefined, + temperature: temperature, + maxTokens: max_response_length, + frequencyPenalty: frequency_penalty, + presencePenalty: presence_penalty, + }); + + response = anthropicResponse; + break; + case ModelProviderName.ANTHROPIC: { elizaLogger.log("Initializing Anthropic model."); @@ -214,7 +234,6 @@ export async function generateText({ break; } - case ModelProviderName.OPENROUTER: { elizaLogger.log("Initializing OpenRouter model."); const serverUrl = models[provider].endpoint; @@ -238,7 +257,6 @@ export async function generateText({ break; } - case ModelProviderName.OLLAMA: { console.log("Initializing Ollama model."); @@ -425,10 +443,13 @@ export async function generateTrueOrFalse({ modelClass: string; }): Promise { let retryDelay = 1000; - console.log("modelClass", modelClass) + console.log("modelClass", modelClass); const stop = Array.from( - new Set([...(models[runtime.modelProvider].settings.stop || []), ["\n"]]) + new Set([ + ...(models[runtime.modelProvider].settings.stop || []), + ["\n"], + ]) ) as string[]; while (true) { diff --git a/packages/core/src/models.ts b/packages/core/src/models.ts index 4112c1b0d1..0b3d5de47c 100644 --- a/packages/core/src/models.ts +++ b/packages/core/src/models.ts @@ -137,9 +137,9 @@ const models: Models = { temperature: 0.3, }, model: { - [ModelClass.SMALL]: "gemini-1.5-flash", - [ModelClass.MEDIUM]: "gemini-1.5-flash", - [ModelClass.LARGE]: "gemini-1.5-pro", + [ModelClass.SMALL]: "gemini-1.5-flash-latest", + [ModelClass.MEDIUM]: "gemini-1.5-flash-latest", + [ModelClass.LARGE]: "gemini-1.5-pro-latest", [ModelClass.EMBEDDING]: "text-embedding-004", }, }, @@ -187,8 +187,7 @@ const models: Models = { settings.LARGE_OPENROUTER_MODEL || settings.OPENROUTER_MODEL || "nousresearch/hermes-3-llama-3.1-405b", - [ModelClass.EMBEDDING]: - "text-embedding-3-small", + [ModelClass.EMBEDDING]: "text-embedding-3-small", }, }, [ModelProviderName.OLLAMA]: { diff --git a/packages/core/src/runtime.ts b/packages/core/src/runtime.ts index 2731ed0ec4..6ec9494704 100644 --- a/packages/core/src/runtime.ts +++ b/packages/core/src/runtime.ts @@ -498,14 +498,14 @@ export class AgentRuntime implements IAgentRuntime { * @returns The results of the evaluation. */ async evaluate(message: Memory, state?: State, didRespond?: boolean) { - console.log("Evaluate: ", didRespond) + console.log("Evaluate: ", didRespond); const evaluatorPromises = this.evaluators.map( async (evaluator: Evaluator) => { - console.log("Evaluating", evaluator.name) + console.log("Evaluating", evaluator.name); if (!evaluator.handler) { return null; } - if(!didRespond && !evaluator.alwaysRun) { + if (!didRespond && !evaluator.alwaysRun) { return null; } const result = await evaluator.validate(this, message, state); diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts index 4fa54fcb90..7d61a241f5 100644 --- a/packages/core/src/types.ts +++ b/packages/core/src/types.ts @@ -550,7 +550,11 @@ export interface IAgentRuntime { state?: State, callback?: HandlerCallback ): Promise; - evaluate(message: Memory, state?: State, didRespond?: boolean): Promise; + evaluate( + message: Memory, + state?: State, + didRespond?: boolean + ): Promise; ensureParticipantExists(userId: UUID, roomId: UUID): Promise; ensureUserExists( userId: UUID, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8ac65575ff..89da935c5d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -709,33 +709,6 @@ importers: specifier: 7.1.0 version: 7.1.0 - packages/test: - dependencies: - '@ai16z/adapter-sqlite': - specifier: workspace:* - version: link:../adapter-sqlite - '@ai16z/adapter-sqljs': - specifier: workspace:* - version: link:../adapter-sqljs - '@ai16z/adapter-supabase': - specifier: workspace:* - version: link:../adapter-supabase - '@ai16z/eliza': - specifier: workspace:* - version: link:../core - '@ai16z/plugin-bootstrap': - specifier: workspace:* - version: link:../plugin-bootstrap - '@ai16z/plugin-node': - specifier: workspace:* - version: link:../plugin-node - tsup: - specifier: ^8.3.5 - version: 8.3.5(jiti@1.21.6)(postcss@8.4.47)(tsx@4.19.2)(typescript@5.6.3)(yaml@2.6.0) - whatwg-url: - specifier: 7.1.0 - version: 7.1.0 - packages: '@ai-sdk/anthropic@0.0.53':