Skip to content

Commit

Permalink
Merge pull request #385 from TransformerOptimus/new-open-ai-16k-model…
Browse files Browse the repository at this point in the history
…-support

gpt-3.5-turbo-16k support
  • Loading branch information
I’m authored Jun 15, 2023
2 parents 3284c01 + f157bed commit a479e06
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion gui/pages/Content/Agents/AgentCreate.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ export default function AgentCreate({sendAgentData, selectedProjectId, fetchAgen

const [goals, setGoals] = useState(['Describe the agent goals here']);

const models = ['gpt-4', 'gpt-3.5-turbo']
const models = ['gpt-4', 'gpt-3.5-turbo','gpt-3.5-turbo-16k']
const [model, setModel] = useState(models[1]);
const modelRef = useRef(null);
const [modelDropdown, setModelDropdown] = useState(false);
Expand Down
4 changes: 2 additions & 2 deletions superagi/helper/token_counter.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ class TokenCounter:
@staticmethod
def token_limit(model: str = "gpt-3.5-turbo-0301") -> int:
try:
model_token_limit_dict = {"gpt-3.5-turbo-0301": 4032, "gpt-4-0314": 8092, "gpt-3.5-turbo": 4032, "gpt-4": 8092}
model_token_limit_dict = {"gpt-3.5-turbo-0301": 4032, "gpt-4-0314": 8092, "gpt-3.5-turbo": 4032, "gpt-4": 8092,"gpt-3.5-turbo-16k": 16184}
return model_token_limit_dict[model]
except KeyError:
print("Warning: model not found. Using cl100k_base encoding.")
Expand All @@ -17,7 +17,7 @@ def token_limit(model: str = "gpt-3.5-turbo-0301") -> int:
@staticmethod
def count_message_tokens(messages: List[BaseMessage], model: str = "gpt-3.5-turbo-0301") -> int:
try:
model_token_per_message_dict = {"gpt-3.5-turbo-0301": 4, "gpt-4-0314": 3, "gpt-3.5-turbo": 4, "gpt-4": 3}
model_token_per_message_dict = {"gpt-3.5-turbo-0301": 4, "gpt-4-0314": 3, "gpt-3.5-turbo": 4, "gpt-4": 3,"gpt-3.5-turbo-16k":4}
encoding = tiktoken.encoding_for_model(model)
except KeyError:
print("Warning: model not found. Using cl100k_base encoding.")
Expand Down

0 comments on commit a479e06

Please sign in to comment.