ctankso_americas_corpdir_net commited on
Commit
e0800e8
1 Parent(s): 14cf2a1

feat: temperature adjustment

Browse files
InnovationHub/llm/__pycache__/chain.cpython-310.pyc CHANGED
Binary files a/InnovationHub/llm/__pycache__/chain.cpython-310.pyc and b/InnovationHub/llm/__pycache__/chain.cpython-310.pyc differ
 
InnovationHub/llm/__pycache__/vector_store.cpython-310.pyc CHANGED
Binary files a/InnovationHub/llm/__pycache__/vector_store.cpython-310.pyc and b/InnovationHub/llm/__pycache__/vector_store.cpython-310.pyc differ
 
InnovationHub/llm/chain.py CHANGED
@@ -11,6 +11,71 @@ from langchain import OpenAI, ConversationChain, LLMChain, PromptTemplate
11
  from langchain.chains.conversation.memory import ConversationalBufferWindowMemory
12
  from EdgeGPT import Chatbot
13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  db_paths = {
15
  "S-Class": "data/s-class-manual",
16
  "EQS": "data/eqs-manual"
@@ -18,84 +83,27 @@ db_paths = {
18
 
19
  embeddings = HuggingFaceEmbeddings()
20
 
21
-
22
- template = """
23
- {history}
24
- Human: {human_input}
25
- Assistant:"""
26
-
27
- prompt = PromptTemplate(
28
- input_variables=["history", "human_input"],
29
- template=template
30
- )
31
-
32
- chatgpt_chain = LLMChain(
33
- llm=OpenAI(temperature=0),
34
- prompt=prompt,
35
- verbose=True,
36
- memory=ConversationalBufferWindowMemory(k=2),
37
- )
38
- human_input = """I want you to act as a voice assistant for a Mercedes-Benz vehicle.
39
- I will provide you with excerpts from a vehicle manual.
40
- You must use the excerpts to answer the user's question as best as you can.
41
- If you are unsure about the answer, you will truthfully say "not sure".
42
- Let's think step by step.
43
- """
44
- bot_response = chatgpt_chain.predict(human_input=human_input)
45
-
46
-
47
- def get_prompt(question, vehicle, embeddings, k=4):
48
- prompt = f"""
49
- I need information from my {vehicle} manual.
50
- I will provide an excerpt from the manual. Use the excerpt and nothing else to answer the question.
51
- You must refer to the excerpt as "{vehicle} Manual" in your response. Here is the excerpt:
52
- """
53
-
54
- index = FAISS.load_local(folder_path=db_paths[vehicle], embeddings=embeddings)
55
- similar_docs = index.similarity_search(query=question, k=k)
56
- context = []
57
- for d in similar_docs:
58
- content = d.page_content
59
- context.append(content)
60
- user_input = prompt + '\n[EXCERPT]' + '\n' + \
61
- '\n'.join(context[:k]) + '\n' + 'Question:\n' + question
62
- return user_input
63
-
64
-
65
- def ask_question(question, vehicle, embeddings, chatgpt_chain, k=2):
66
- index = FAISS.load_local(
67
- folder_path=db_paths[vehicle], embeddings=embeddings)
68
-
69
- prompt = get_prompt(question=question, vehicle=vehicle,
70
- embeddings=embeddings, k=k)
71
- response = chatgpt_chain.predict(human_input=prompt)
72
- return response
73
-
74
-
75
- async def chatbot(question, vehicle, k=2):
76
- response = ask_question(question=question, vehicle=vehicle,
77
- embeddings=embeddings, chatgpt_chain=chatgpt_chain, k=2)
78
- return response
79
-
80
 
81
  def start_ui():
82
- vehicle_options = ["S-Class", "EQS"]
83
  chatbot_interface = gradio.Interface(
84
- fn=chatbot,
85
- inputs=[
86
- "text",
87
- gradio.inputs.Dropdown(vehicle_options, label="Select Vehicle Model"),
88
- gradio.inputs.Slider(minimum=1, maximum=10, step=1, label="k")
89
  ],
90
  outputs="text",
91
  title="Owner's Manual",
92
  description="Ask your vehicle manual and get a response.",
93
- examples=[
94
- ["What are the different features of the dashboard console?", "S-Class", 2],
95
- ["What is flacon?", "S-Class", 3],
96
- ["What is hyperscreen?", "EQS", 2],
97
- ["Where can I find my vin?", "EQS", 3]
98
  ]
99
  )
100
 
101
- chatbot_interface.launch()
 
11
  from langchain.chains.conversation.memory import ConversationalBufferWindowMemory
12
  from EdgeGPT import Chatbot
13
 
14
+
15
+ class VehicleManualChatbot:
16
+ def __init__(self, db_paths, vehicle_options, embeddings):
17
+ self.db_paths = db_paths
18
+ self.vehicle_options = vehicle_options
19
+ self.embeddings = embeddings
20
+ self.chatgpt_chain = self._create_chatgpt_chain()
21
+
22
+
23
+ def _create_chatgpt_chain(self, temperature=0.5):
24
+ template = """
25
+ {history}
26
+ Human: {human_input}
27
+ Assistant:"""
28
+
29
+ prompt = PromptTemplate(
30
+ input_variables=["history", "human_input"],
31
+ template=template
32
+ )
33
+
34
+ return LLMChain(
35
+ llm=OpenAI(temperature=temperature),
36
+ prompt=prompt,
37
+ verbose=True,
38
+ memory=ConversationalBufferWindowMemory(k=2),
39
+ )
40
+
41
+
42
+ def _get_prompt(self, question, vehicle, k=4):
43
+ prompt = f"""
44
+ I need information from my {vehicle} manual.
45
+ I will provide an excerpt from the manual. Use the excerpt and nothing else to answer the question.
46
+ You must refer to the excerpt as "{vehicle} Manual" in your response. Here is the excerpt:
47
+ """
48
+
49
+ index = FAISS.load_local(
50
+ folder_path=self.db_paths[vehicle], embeddings=self.embeddings)
51
+ similar_docs = index.similarity_search(query=question, k=k)
52
+ context = []
53
+ for d in similar_docs:
54
+ content = d.page_content
55
+ context.append(content)
56
+ user_input = prompt + '\n[EXCERPT]' + '\n' + \
57
+ '\n'.join(context[:k]) + '\n' + 'Question:\n' + question
58
+ return user_input
59
+
60
+
61
+ def _ask_question(self, question, vehicle, k=2):
62
+ index = FAISS.load_local(
63
+ folder_path=self.db_paths[vehicle], embeddings=self.embeddings)
64
+
65
+ prompt = self._get_prompt(question=question, vehicle=vehicle,
66
+ k=k)
67
+ response = self.chatgpt_chain.predict(human_input=prompt)
68
+ return response
69
+
70
+
71
+ def chat(self, question, vehicle, k=2, temperature=0.5):
72
+ self.chatgpt_chain = self._create_chatgpt_chain(temperature=temperature)
73
+ response = self._ask_question(question=question, vehicle=vehicle,
74
+ k=k)
75
+ return response
76
+
77
+
78
+
79
  db_paths = {
80
  "S-Class": "data/s-class-manual",
81
  "EQS": "data/eqs-manual"
 
83
 
84
  embeddings = HuggingFaceEmbeddings()
85
 
86
+ vehicle_options = ["S-Class", "EQS"]
87
+ chatbot = VehicleManualChatbot(db_paths=db_paths,
88
+ vehicle_options=vehicle_options,
89
+ embeddings=embeddings)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
 
91
  def start_ui():
 
92
  chatbot_interface = gradio.Interface(
93
+ fn=chatbot.chat,
94
+ inputs=["text",
95
+ gradio.inputs.Dropdown(
96
+ vehicle_options, label="Select Vehicle Model"),
97
+ gradio.inputs.Slider(minimum=1, maximum=10, step=1, label="k")
98
  ],
99
  outputs="text",
100
  title="Owner's Manual",
101
  description="Ask your vehicle manual and get a response.",
102
+ examples=[["What are the different features of the dashboard console?", "S-Class", 2],
103
+ ["What is flacon?", "S-Class", 3],
104
+ ["What is hyperscreen?", "EQS", 2],
105
+ ["Where can I find my vin?", "EQS", 3]
 
106
  ]
107
  )
108
 
109
+ chatbot_interface.launch()
InnovationHub/llm/vector_store.py CHANGED
@@ -85,7 +85,7 @@ def convert_to_html(similar_docs):
85
  return html
86
 
87
 
88
- def create_similarity_plot(embeddings, labels, query_index, n_clusters=3):
89
  # Only include embeddings that have corresponding labels
90
  embeddings_with_labels = [
91
  embedding for i, embedding in enumerate(embeddings) if i < len(labels)]
@@ -100,16 +100,16 @@ def create_similarity_plot(embeddings, labels, query_index, n_clusters=3):
100
 
101
  # Create a trace for the query point
102
  query_trace = go.Scatter3d(
103
- x=[pca_embeddings[query_index, 0]],
104
- y=[pca_embeddings[query_index, 1]],
105
- z=[pca_embeddings[query_index, 2]],
106
  mode='markers',
107
  marker=dict(
108
  color='black',
109
  symbol='diamond',
110
  size=10
111
  ),
112
- name='Query'
113
  )
114
 
115
  # Create a trace for the other points
 
85
  return html
86
 
87
 
88
+ def create_similarity_plot(embeddings, labels, query, n_clusters=3):
89
  # Only include embeddings that have corresponding labels
90
  embeddings_with_labels = [
91
  embedding for i, embedding in enumerate(embeddings) if i < len(labels)]
 
100
 
101
  # Create a trace for the query point
102
  query_trace = go.Scatter3d(
103
+ x=[pca_embeddings[-1, 0]],
104
+ y=[pca_embeddings[-1, 1]],
105
+ z=[pca_embeddings[-1, 2]],
106
  mode='markers',
107
  marker=dict(
108
  color='black',
109
  symbol='diamond',
110
  size=10
111
  ),
112
+ name=f"Query: '{query}'"
113
  )
114
 
115
  # Create a trace for the other points
app.py CHANGED
@@ -2,30 +2,27 @@ from InnovationHub.llm.vector_store import *
2
  from InnovationHub.llm.chain import *
3
 
4
 
5
- """
6
- # Create the vector index
7
- db_path = "./data/s-class-manual"
8
- embeddings = HuggingfaceEmbeddings()
9
- index = FAISS(docs=docs, folder_path=db_path, embeddings=embeddings)
10
- """
11
-
12
- # Launch the Gradio UI
13
- def start_gradio():
14
  chatbot_interface = gradio.Interface(
15
- fn=chatbot,
16
- inputs=["text", gradio.inputs.Checkbox(label="Create bot"), gradio.inputs.Slider(
17
- minimum=1, maximum=10, step=1, label="k")],
 
 
 
 
 
18
  outputs="text",
19
- title="Mercedes-Benz S-Class Owner's Manual",
20
- description="Ask your vehicle's manual questions and get answers",
21
- examples=[
22
- ["What are the different features of the dashboard console?", True, 2],
23
- ["What do they do?", False, 3]
24
- ]
 
25
  )
26
- chatbot_interface.launch()
27
 
 
28
 
29
  if __name__ == '__main__':
30
- start_ui()
31
-
 
2
  from InnovationHub.llm.chain import *
3
 
4
 
5
+ def start_ui():
 
 
 
 
 
 
 
 
6
  chatbot_interface = gradio.Interface(
7
+ fn=chatbot.chat,
8
+ inputs=["text",
9
+ gradio.inputs.Dropdown(
10
+ vehicle_options, label="Select Vehicle Model"),
11
+ gradio.inputs.Slider(minimum=1, maximum=10, step=1, label="k"),
12
+ gradio.inputs.Slider(
13
+ minimum=0.0, maximum=1.0, step=0.01, label="Temperature")
14
+ ],
15
  outputs="text",
16
+ title="Owner's Manual",
17
+ description="Ask your vehicle manual and get a response.",
18
+ examples=[["What are the different features of the dashboard console?", "S-Class", 2, 0.5],
19
+ ["What is flacon?", "S-Class", 3, 0.8],
20
+ ["What is hyperscreen?", "EQS", 2, 0.2],
21
+ ["Where can I find my vin?", "EQS", 3, 0.3]
22
+ ]
23
  )
 
24
 
25
+ chatbot_interface.launch()
26
 
27
  if __name__ == '__main__':
28
+ start_ui()