Skip to content

Commit

Permalink
Merge pull request #851 from arc53/feat/premaillm
Browse files Browse the repository at this point in the history
Add PremAI LLM implementation
  • Loading branch information
dartpain authored Feb 13, 2024
2 parents 2c99158 + b7927d8 commit 5b9352a
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 1 deletion.
3 changes: 3 additions & 0 deletions application/core/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,9 @@ class Settings(BaseSettings):
SAGEMAKER_ACCESS_KEY: Optional[str] = None # SageMaker access key
SAGEMAKER_SECRET_KEY: Optional[str] = None # SageMaker secret key

# prem ai project id
PREMAI_PROJECT_ID: Optional[str] = None


path = Path(__file__).parent.parent.absolute()
settings = Settings(_env_file=path.joinpath(".env"), _env_file_encoding="utf-8")
4 changes: 3 additions & 1 deletion application/llm/llm_creator.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from application.llm.llama_cpp import LlamaCpp
from application.llm.anthropic import AnthropicLLM
from application.llm.docsgpt_provider import DocsGPTAPILLM
from application.llm.premai import PremAILLM



Expand All @@ -15,7 +16,8 @@ class LLMCreator:
'huggingface': HuggingFaceLLM,
'llama.cpp': LlamaCpp,
'anthropic': AnthropicLLM,
'docsgpt': DocsGPTAPILLM
'docsgpt': DocsGPTAPILLM,
'premai': PremAILLM,
}

@classmethod
Expand Down
33 changes: 33 additions & 0 deletions application/llm/premai.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
from application.llm.base import BaseLLM
from application.core.settings import settings

class PremAILLM(BaseLLM):

def __init__(self, api_key):
from premai import Prem

self.client = Prem(
api_key=api_key
)
self.api_key = api_key
self.project_id = settings.PREMAI_PROJECT_ID

def gen(self, model, engine, messages, stream=False, **kwargs):
response = self.client.chat.completions.create(model=model,
project_id=self.project_id,
messages=messages,
stream=stream,
**kwargs)

return response.choices[0].message["content"]

def gen_stream(self, model, engine, messages, stream=True, **kwargs):
response = self.client.chat.completions.create(model=model,
project_id=self.project_id,
messages=messages,
stream=stream,
**kwargs)

for line in response:
if line.choices[0].delta["content"] is not None:
yield line.choices[0].delta["content"]

0 comments on commit 5b9352a

Please sign in to comment.