-
Notifications
You must be signed in to change notification settings - Fork 18
/
utils.py
154 lines (125 loc) · 5.62 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
from langchain_core.runnables.utils import Output
from lightning import LndNode
from all_tools.bitcoin_tools import LndTools
from L402 import L402APIChain
from langchain import hub
from all_tools import api_tools
from langchain.schema import StrOutputParser
from langchain.prompts import PromptTemplate
from langchain_openai import OpenAI, ChatOpenAI
from langchain.agents import AgentExecutor, create_structured_chat_agent
from langchain.schema.runnable import RunnableMap
from dotenv import dotenv_values
config = dotenv_values(".env.shared")
class LLMUtils:
lnd_node: LndNode
def __init__(self, lnd_node: LndNode):
self.lnd_node = lnd_node
self.llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0)
def api_chain_factory(self, api_docs: str, api_host: str):
api_chain = L402APIChain.from_llm_and_api_docs(
self.llm,
api_docs,
lightning_node=self.lnd_node,
verbose=True,
limit_to_domains=[api_host, "localhost"],
)
return api_chain
def get_target_api_chain(self) -> L402APIChain:
target_host = config.get("TARGET_HOST", "unknown_host")
# Add the specifics of the API to be worked with here
API_DOCS = f"""BASE URL: {target_host}
API Documentation
Request:
Response:
"""
target_api_chain = self.api_chain_factory(
api_docs=API_DOCS, api_host=target_host
)
return target_api_chain
def get_target_api_tool(self):
name = config.get("API_TOOL_NAME", "Default api tool name")
description = config.get("API_TOOL_DESCRIPTION", "Default api tool description")
target_api_chain = self.get_target_api_chain()
target_api_tool = api_tools.api_tool_factory(
api_chain=target_api_chain, name=name, description=description
)
return target_api_tool
def _get_agent_executor(self, tools):
prompt = hub.pull("hwchase17/structured-chat-agent")
agent = create_structured_chat_agent(llm=self.llm, prompt=prompt, tools=tools)
agent_executor = AgentExecutor(
agent=agent,
tools=tools,
verbose=True,
handle_parsing_errors=True,
return_intermediate_steps=True,
)
return agent_executor
def get_lnd_agent_executor(self):
lnd_tools = LndTools.from_lnd_node(lnd_node=self.lnd_node).get_tools()
agent_executor = self._get_agent_executor(tools=lnd_tools)
return agent_executor
def get_entry_point(self, additional_tools):
lnd_tools = LndTools.from_lnd_node(lnd_node=self.lnd_node).get_tools()
all_tools = lnd_tools + additional_tools
agent_executor = self._get_agent_executor(tools=all_tools)
return agent_executor
def get_entry_point_v2(self):
prompt = PromptTemplate.from_template(
"""If the input is about payments, balance, general info of specific Lighting node, respond with `LND`. If the input is about general info of Lighting node technology, Bitcoin or blockchain in general, respond with `BLOCKCHAIN`.
If the input is about retreaving API data, respond with `API`. If the input is about the functionality if this tool or how this tool may help user, respond with `FAQ`. Otherwise, respond `OTHER`
Question: {question}"""
)
blockchain_llm_chain = (
PromptTemplate.from_template(
"""You are an expert in Blockchain technology.You have hure experience with Bitcoin and Lighting Network. Respond to the question:
Question: {input}"""
)
| ChatOpenAI()
| StrOutputParser()
)
target_host = config.get("TARGET_HOST", "missed")
faq_str = f"""You are a tool designed to help users communicate with Lighting Network that is on top of the bitcoin blockchain. Also you are able to communicate with some websites API. One of them is: `{target_host}`. On this website you can find API data and services. You can also find other information. Respond with information about your features.
"""
faq_llm_chain = (
PromptTemplate.from_template(
faq_str
+ """
Question: {input}"""
)
| ChatOpenAI()
| StrOutputParser()
)
general_llm_chain = (
PromptTemplate.from_template(
f"""Respond that you dont have answer for user query and provide information about your features based on this text: ###{faq_str}###
Question: {input}"""
)
| ChatOpenAI()
| StrOutputParser()
)
router_chain = prompt | ChatOpenAI() | StrOutputParser()
target_api_chain = self.get_target_api_chain()
agent_executor = self.get_lnd_agent_executor()
# Add the routing logic - use the action key to route
def select_chain(output):
if output["action"] == "LND":
return agent_executor
elif output["action"] == "OTHER":
return general_llm_chain
elif output["action"] == "FAQ":
return faq_llm_chain
elif output["action"] == "BLOCKCHAIN":
return blockchain_llm_chain
elif output["action"] == "API":
return target_api_chain
else:
raise ValueError
chain = (
RunnableMap(
{"action": router_chain, "input": {"question": lambda x: x["question"]}}
)
| select_chain
)
return chain