Skip to content

Commit

Permalink
Add Ollama API Key Configuration Support (#566)
Browse files Browse the repository at this point in the history
  • Loading branch information
ruanwz authored Dec 30, 2024
1 parent 06580b0 commit 2289879
Showing 1 changed file with 4 additions and 1 deletion.
5 changes: 4 additions & 1 deletion pydantic_ai_slim/pydantic_ai/models/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ def __init__(
model_name: OllamaModelName,
*,
base_url: str | None = 'http://localhost:11434/v1/',
api_key: str = 'ollama',
openai_client: AsyncOpenAI | None = None,
http_client: AsyncHTTPClient | None = None,
):
Expand All @@ -83,6 +84,8 @@ def __init__(
model_name: The name of the Ollama model to use. List of models available [here](https://ollama.com/library)
You must first download the model (`ollama pull <MODEL-NAME>`) in order to use the model
base_url: The base url for the ollama requests. The default value is the ollama default
api_key: The API key to use for authentication. Defaults to 'ollama' for local instances,
but can be customized for proxy setups that require authentication
openai_client: An existing
[`AsyncOpenAI`](https://github.com/openai/openai-python?tab=readme-ov-file#async-usage)
client to use, if provided, `base_url` and `http_client` must be `None`.
Expand All @@ -96,7 +99,7 @@ def __init__(
else:
# API key is not required for ollama but a value is required to create the client
http_client_ = http_client or cached_async_http_client()
oai_client = AsyncOpenAI(base_url=base_url, api_key='ollama', http_client=http_client_)
oai_client = AsyncOpenAI(base_url=base_url, api_key=api_key, http_client=http_client_)
self.openai_model = OpenAIModel(model_name=model_name, openai_client=oai_client)

async def agent_model(
Expand Down

0 comments on commit 2289879

Please sign in to comment.