forked from rui2333/Accessibility-Advocate-AI
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathapp.py
127 lines (102 loc) · 4.14 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
import streamlit as st
import os
from toolhouse import Toolhouse
from llms import llms, llm_call
from http_exceptions.client_exceptions import NotFoundException
st.set_page_config(
page_title="Intelligence AI",
)
if "messages" not in st.session_state:
st.session_state.messages = []
if "user" not in st.session_state:
st.session_state.user = ""
if "stream" not in st.session_state:
st.session_state.stream = True
if "provider" not in st.session_state:
st.session_state.provider = llms.get(next(iter(llms))).get("provider")
if "bundle" not in st.session_state:
st.session_state.bundle = "default"
if "previous_bundle" not in st.session_state:
st.session_state.previous_bundle = "default"
from utils import print_messages, append_and_print
import dotenv
load_dotenv(dotenv_path="/home/user/.ssh/.env")
TOGETHER_API_KEY = os.getenv("TOGETHER_API_KEY")
TOOLHOUSE_API_KEY=os.getenv("TOOLHOUSE_API_KEY")
st.logo("logo.svg")
with st.sidebar:
th = Toolhouse(access_token=TOOLHOUSE_API_KEY, provider=st.session_state.provider)
st.title("Accessibility Adovcate")
with st.expander("Advanced"):
llm_choice = st.selectbox("Model", tuple(llms.keys()))
st.session_state.stream = st.toggle("Stream responses", True)
user = st.text_input("User", "daniele")
st.session_state.bundle = st.text_input("Bundle", "default")
st.session_state.tools = th.get_tools(bundle=st.session_state.bundle)
try:
available_tools = th.get_tools(bundle=st.session_state.bundle)
except NotFoundException:
available_tools = None
if not available_tools:
st.subheader("No tools installed")
st.caption(
"Go to the [Tool Store](https://app.toolhouse.ai/store) to install your tools, or visit [Bundles](https://app.toolhouse.ai/bundles) to check if the selected bundle exists."
)
else:
st.subheader("Installed tools")
for tool in available_tools:
tool_name = tool.get("name")
if st.session_state.provider != "anthropic":
tool_name = tool["function"].get("name")
st.page_link(f"https://app.toolhouse.ai/store/{tool_name}", label=tool_name)
st.caption(
"\n\nManage your tools in the [Tool Store](https://app.toolhouse.ai/store) or your [Bundles](https://app.toolhouse.ai/bundles)."
)
for i in range(4):
with st.sidebar:
side_bar_selection = "sidebar" + str(i)
st.button(label=side_bar_selection)
st.markdown("---")
llm = llms.get(llm_choice)
st.session_state.provider = llm.get("provider")
model = llm.get("model")
th = Toolhouse(provider=llm.get("provider"))
if st.session_state.bundle != st.session_state.previous_bundle:
st.session_state.tools = th.get_tools(bundle=st.session_state.bundle)
st.session_state.previous_bundle = st.session_state.bundle
th.set_metadata("timezone", -7)
if user:
th.set_metadata("id", user)
print_messages(st.session_state.messages, st.session_state.provider)
if prompt := st.chat_input("What is up?"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with llm_call(
provider=llm_choice,
model=model,
messages=st.session_state.messages,
stream=st.session_state.stream,
tools=st.session_state.tools,
max_tokens=4096,
temperature=0.1,
) as response:
completion = append_and_print(response)
tool_results = th.run_tools(
completion, append=False
)
while tool_results:
st.session_state.messages += tool_results
with llm_call(
provider=llm_choice,
model=model,
messages=st.session_state.messages,
stream=st.session_state.stream,
tools=st.session_state.tools,
max_tokens=4096,
temperature=0.1,
) as after_tool_response:
after_tool_response = append_and_print(after_tool_response)
tool_results = th.run_tools(
after_tool_response, append=False
)