-
Notifications
You must be signed in to change notification settings - Fork 0
/
RAG102.py
110 lines (87 loc) · 2.74 KB
/
RAG102.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
from langchain.vectorstores import Pinecone
text_field = "text"
index_name = '<>'
# switch back to normal index for langchain
index = pc.Index(index_name)
vectorstore = Pinecone(
index, embed.embed_query, text_field
)
query = "who was Benito Mussolini?"
vectorstore.similarity_search(
query, # our search query
k=3 # return 3 most relevant docs
)
#####################################################
from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQA
# completion llm
llm = ChatOpenAI(
openai_api_key=OPENAI_API_KEY,
model_name='gpt-3.5-turbo',
temperature=0.0
)
qa = RetrievalQA.from_chain_type(
llm=llm,
chain_type="stuff",
retriever=vectorstore.as_retriever()
)
qa.run(query)
######################################################### with source link
from langchain.chains import RetrievalQAWithSourcesChain
qa_with_sources = RetrievalQAWithSourcesChain.from_chain_type(
llm=llm,
chain_type="stuff",
retriever=vectorstore.as_retriever()
)
qa_with_sources(query)
######################################################### agent
from langchain.agents import Tool
from langchain.agents import AgentType
from langchain.memory import ConversationBufferMemory
from langchain.chat_models import ChatOpenAI
from langchain.agents import initialize_agent
from langchain.agents import load_tools
from langchain.llms import OpenAI
memory = ConversationBufferMemory(memory_key="chat_history")
from langchain.agents import Tool
tools = [
Tool(
name='Knowledge Base',
func=qa.run,
description=(
'use this tool when answering general knowledge queries to get '
'more information about the topic'
)
)
]
# conversational memory
from langchain.chains.conversation.memory import ConversationBufferWindowMemory
conversational_memory = ConversationBufferWindowMemory(
memory_key='chat_history',
k=5,
return_messages=True
)
from langchain.agents import initialize_agent
agent = initialize_agent(
agent='chat-conversational-react-description',
tools=tools,
llm=llm,
verbose=True,
max_iterations=3,
early_stopping_method='generate',
memory=conversational_memory
)
agent("how many records used in the dataset in the paper?")
#############################
from langchain.agents import tool
@tool
def coolest_guy(text: str) -> str:
'''Returns the name of the coolest guy in the universe'''
return "cc"
tools = load_tools(["wikipedia","llm-math"], llm=llm)
tools = tools +[coolest_guy]
from langchain.agents import initialize_agent
agent = initialize_agent(tools,
llm,
agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
verbose=True)