Skip to content

Commit

Permalink
fix cut message
Browse files Browse the repository at this point in the history
  • Loading branch information
vvincent1234 committed Jan 25, 2025
1 parent 01014ec commit 96d02b5
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion src/agent/custom_massage_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def cut_messages(self):
"""Get current message list, potentially trimmed to max tokens"""
diff = self.history.total_tokens - self.max_input_tokens
i = 1 # start from 1 to keep system message in history
while diff > 0:
while diff > 0 and i < len(self.history.messages):
self.history.remove_message(i)
diff = self.history.total_tokens - self.max_input_tokens
i += 1
Expand Down
2 changes: 1 addition & 1 deletion tests/test_browser_use.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ async def test_browser_use_custom_v2():

llm = utils.get_llm_model(
provider="deepseek",
model_name="deepseek-chat",
model_name="deepseek-reasoner",
temperature=0.8
)

Expand Down

0 comments on commit 96d02b5

Please sign in to comment.