Skip to content

Commit

Permalink
Merge pull request #221 from bigadsoleiman/main
Browse files Browse the repository at this point in the history
Add Llama2 support for Amazon Bedrock. Fix prompts for Llama2 on SageMaker.
  • Loading branch information
massi-ang authored Nov 22, 2023
2 parents c0cdde1 + a95f04f commit c1d9b8c
Show file tree
Hide file tree
Showing 19 changed files with 621 additions and 84 deletions.
3 changes: 1 addition & 2 deletions lib/chatbot-api/functions/api-handler/routes/embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ def embeddings():
if selected_model is None:
raise genai_core.types.CommonError("Model not found")

ret_value = genai_core.embeddings.generate_embeddings(
selected_model, request.input)
ret_value = genai_core.embeddings.generate_embeddings(selected_model, request.input)

return {"ok": True, "data": ret_value}
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
from .sagemaker import *
from .bedrock import *
from .base import Mode
from .shared import *
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def get_prompt(self):
{chat_history}
Question: {input}"""

return PromptTemplate.from_template(template)

def get_condense_question_prompt(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
from .titan import *
from .ai21_j2 import *
from .cohere import *
from .llama2_chat import *
Loading

0 comments on commit c1d9b8c

Please sign in to comment.