Spaces:
Build error
Build error
XThomasBU
commited on
Commit
•
aaaac46
1
Parent(s):
fc2cb23
test commit for ELI5 (cleanup required)
Browse files- code/main.py +14 -17
- code/modules/chat/helpers.py +13 -3
- code/modules/chat/llm_tutor.py +15 -2
- code/modules/config/config.yml +1 -0
- code/modules/config/constants.py +13 -0
code/main.py
CHANGED
@@ -34,12 +34,6 @@ class Chatbot:
|
|
34 |
config = yaml.safe_load(f)
|
35 |
return config
|
36 |
|
37 |
-
async def ask_helper(func, **kwargs):
|
38 |
-
res = await func(**kwargs).send()
|
39 |
-
while not res:
|
40 |
-
res = await func(**kwargs).send()
|
41 |
-
return res
|
42 |
-
|
43 |
@no_type_check
|
44 |
async def setup_llm(self) -> None:
|
45 |
"""From the session `llm_settings`, create new LLMConfig and LLM objects,
|
@@ -54,6 +48,7 @@ class Chatbot:
|
|
54 |
chat_profile = llm_settings.get("chat_model")
|
55 |
retriever_method = llm_settings.get("retriever_method")
|
56 |
memory_window = llm_settings.get("memory_window")
|
|
|
57 |
|
58 |
self._configure_llm(chat_profile)
|
59 |
|
@@ -65,8 +60,12 @@ class Chatbot:
|
|
65 |
new_config["llm_params"][
|
66 |
"memory_window"
|
67 |
] = memory_window # update the memory window in the config
|
|
|
68 |
|
69 |
-
self.llm_tutor.update_llm(new_config)
|
|
|
|
|
|
|
70 |
self.chain = self.llm_tutor.qa_bot(memory=memory)
|
71 |
|
72 |
tags = [chat_profile, self.config["vectorstore"]["db_option"]]
|
@@ -110,6 +109,9 @@ class Chatbot:
|
|
110 |
cl.input_widget.Switch(
|
111 |
id="view_sources", label="View Sources", initial=False
|
112 |
),
|
|
|
|
|
|
|
113 |
# cl.input_widget.TextInput(
|
114 |
# id="vectorstore",
|
115 |
# label="temp",
|
@@ -189,15 +191,15 @@ class Chatbot:
|
|
189 |
async def start(self):
|
190 |
await self.make_llm_settings_widgets(self.config)
|
191 |
|
192 |
-
chat_profile = cl.user_session.get("chat_profile")
|
193 |
-
if chat_profile:
|
194 |
-
|
195 |
|
196 |
self.llm_tutor = LLMTutor(
|
197 |
self.config, user={"user_id": "abc123", "session_id": "789"}
|
198 |
)
|
199 |
self.chain = self.llm_tutor.qa_bot()
|
200 |
-
tags = [
|
201 |
self.chat_processor = ChatProcessor(self.llm_tutor, tags=tags)
|
202 |
|
203 |
cl.user_session.set("llm_tutor", self.llm_tutor)
|
@@ -214,11 +216,6 @@ class Chatbot:
|
|
214 |
llm_settings = cl.user_session.get("llm_settings", {})
|
215 |
view_sources = llm_settings.get("view_sources", False)
|
216 |
|
217 |
-
print("HERE")
|
218 |
-
print(llm_settings)
|
219 |
-
print(view_sources)
|
220 |
-
print("\n\n")
|
221 |
-
|
222 |
counter += 1
|
223 |
cl.user_session.set("counter", counter)
|
224 |
|
@@ -255,7 +252,7 @@ chatbot = Chatbot()
|
|
255 |
|
256 |
# Register functions to Chainlit events
|
257 |
cl.set_starters(chatbot.set_starters)
|
258 |
-
cl.set_chat_profiles(chatbot.chat_profile)
|
259 |
cl.author_rename(chatbot.rename)
|
260 |
cl.on_chat_start(chatbot.start)
|
261 |
cl.on_chat_end(chatbot.on_chat_end)
|
|
|
34 |
config = yaml.safe_load(f)
|
35 |
return config
|
36 |
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
@no_type_check
|
38 |
async def setup_llm(self) -> None:
|
39 |
"""From the session `llm_settings`, create new LLMConfig and LLM objects,
|
|
|
48 |
chat_profile = llm_settings.get("chat_model")
|
49 |
retriever_method = llm_settings.get("retriever_method")
|
50 |
memory_window = llm_settings.get("memory_window")
|
51 |
+
ELI5 = llm_settings.get("ELI5")
|
52 |
|
53 |
self._configure_llm(chat_profile)
|
54 |
|
|
|
60 |
new_config["llm_params"][
|
61 |
"memory_window"
|
62 |
] = memory_window # update the memory window in the config
|
63 |
+
new_config["llm_params"]["ELI5"] = ELI5
|
64 |
|
65 |
+
# self.llm_tutor.update_llm(new_config) # TODO: Fi this!!!
|
66 |
+
self.llm_tutor = LLMTutor(
|
67 |
+
self.config, user={"user_id": "abc123", "session_id": "789"}
|
68 |
+
)
|
69 |
self.chain = self.llm_tutor.qa_bot(memory=memory)
|
70 |
|
71 |
tags = [chat_profile, self.config["vectorstore"]["db_option"]]
|
|
|
109 |
cl.input_widget.Switch(
|
110 |
id="view_sources", label="View Sources", initial=False
|
111 |
),
|
112 |
+
cl.input_widget.Switch(
|
113 |
+
id="ELI5", label="Explain Like I'm 5 (ELI5)", initial=False
|
114 |
+
),
|
115 |
# cl.input_widget.TextInput(
|
116 |
# id="vectorstore",
|
117 |
# label="temp",
|
|
|
191 |
async def start(self):
|
192 |
await self.make_llm_settings_widgets(self.config)
|
193 |
|
194 |
+
# chat_profile = cl.user_session.get("chat_profile")
|
195 |
+
# if chat_profile:
|
196 |
+
# self._configure_llm(chat_profile)
|
197 |
|
198 |
self.llm_tutor = LLMTutor(
|
199 |
self.config, user={"user_id": "abc123", "session_id": "789"}
|
200 |
)
|
201 |
self.chain = self.llm_tutor.qa_bot()
|
202 |
+
tags = [self.config["vectorstore"]["db_option"]]
|
203 |
self.chat_processor = ChatProcessor(self.llm_tutor, tags=tags)
|
204 |
|
205 |
cl.user_session.set("llm_tutor", self.llm_tutor)
|
|
|
216 |
llm_settings = cl.user_session.get("llm_settings", {})
|
217 |
view_sources = llm_settings.get("view_sources", False)
|
218 |
|
|
|
|
|
|
|
|
|
|
|
219 |
counter += 1
|
220 |
cl.user_session.set("counter", counter)
|
221 |
|
|
|
252 |
|
253 |
# Register functions to Chainlit events
|
254 |
cl.set_starters(chatbot.set_starters)
|
255 |
+
# cl.set_chat_profiles(chatbot.chat_profile)
|
256 |
cl.author_rename(chatbot.rename)
|
257 |
cl.on_chat_start(chatbot.start)
|
258 |
cl.on_chat_end(chatbot.on_chat_end)
|
code/modules/chat/helpers.py
CHANGED
@@ -94,11 +94,21 @@ def get_prompt(config, prompt_type):
|
|
94 |
llm_loader = llm_params["llm_loader"]
|
95 |
use_history = llm_params["use_history"]
|
96 |
|
|
|
|
|
|
|
|
|
|
|
97 |
if prompt_type == "qa":
|
98 |
if llm_loader == "openai":
|
99 |
-
|
100 |
-
|
101 |
-
|
|
|
|
|
|
|
|
|
|
|
102 |
elif (
|
103 |
llm_loader == "local_llm"
|
104 |
and llm_params.get("local_llm_params") == "tiny-llama"
|
|
|
94 |
llm_loader = llm_params["llm_loader"]
|
95 |
use_history = llm_params["use_history"]
|
96 |
|
97 |
+
print("llm_params: ", llm_params)
|
98 |
+
print("ELI5", llm_params["ELI5"])
|
99 |
+
|
100 |
+
print("\n\n")
|
101 |
+
|
102 |
if prompt_type == "qa":
|
103 |
if llm_loader == "openai":
|
104 |
+
if llm_params["ELI5"]:
|
105 |
+
return ELI5_PROMPT_WITH_HISTORY
|
106 |
+
else:
|
107 |
+
return (
|
108 |
+
OPENAI_PROMPT_WITH_HISTORY
|
109 |
+
if use_history
|
110 |
+
else OPENAI_PROMPT_NO_HISTORY
|
111 |
+
)
|
112 |
elif (
|
113 |
llm_loader == "local_llm"
|
114 |
and llm_params.get("local_llm_params") == "tiny-llama"
|
code/modules/chat/llm_tutor.py
CHANGED
@@ -20,6 +20,10 @@ class LLMTutor:
|
|
20 |
self.user = user
|
21 |
self.logger = logger
|
22 |
self.vector_db = VectorStoreManager(config, logger=self.logger)
|
|
|
|
|
|
|
|
|
23 |
if self.config["vectorstore"]["embedd_files"]:
|
24 |
self.vector_db.create_database()
|
25 |
self.vector_db.save_database()
|
@@ -45,6 +49,11 @@ class LLMTutor:
|
|
45 |
self.vector_db.create_database()
|
46 |
self.vector_db.save_database()
|
47 |
|
|
|
|
|
|
|
|
|
|
|
48 |
def get_config_changes(self, old_config, new_config):
|
49 |
"""
|
50 |
Get the changes between the old and new configuration.
|
@@ -79,14 +88,14 @@ class LLMTutor:
|
|
79 |
retriever = Retriever(self.config)._return_retriever(db)
|
80 |
|
81 |
if self.config["llm_params"]["use_history"]:
|
82 |
-
qa_chain = CustomConversationalRetrievalChain(
|
83 |
llm=llm,
|
84 |
memory=memory,
|
85 |
retriever=retriever,
|
86 |
qa_prompt=qa_prompt,
|
87 |
rephrase_prompt=rephrase_prompt,
|
88 |
)
|
89 |
-
return qa_chain
|
90 |
|
91 |
def load_llm(self):
|
92 |
"""
|
@@ -115,6 +124,10 @@ class LLMTutor:
|
|
115 |
qa_prompt = get_prompt(self.config, "qa")
|
116 |
if rephrase_prompt is None:
|
117 |
rephrase_prompt = get_prompt(self.config, "rephrase")
|
|
|
|
|
|
|
|
|
118 |
db = self.vector_db.load_database()
|
119 |
# sanity check to see if there are any documents in the database
|
120 |
if len(db) == 0:
|
|
|
20 |
self.user = user
|
21 |
self.logger = logger
|
22 |
self.vector_db = VectorStoreManager(config, logger=self.logger)
|
23 |
+
self.qa_prompt = get_prompt(config, "qa") # Initialize qa_prompt
|
24 |
+
self.rephrase_prompt = get_prompt(
|
25 |
+
config, "rephrase"
|
26 |
+
) # Initialize rephrase_prompt
|
27 |
if self.config["vectorstore"]["embedd_files"]:
|
28 |
self.vector_db.create_database()
|
29 |
self.vector_db.save_database()
|
|
|
49 |
self.vector_db.create_database()
|
50 |
self.vector_db.save_database()
|
51 |
|
52 |
+
if "ELI5" in changes:
|
53 |
+
self.qa_prompt = get_prompt(
|
54 |
+
self.config, "qa"
|
55 |
+
) # Update qa_prompt if ELI5 changes
|
56 |
+
|
57 |
def get_config_changes(self, old_config, new_config):
|
58 |
"""
|
59 |
Get the changes between the old and new configuration.
|
|
|
88 |
retriever = Retriever(self.config)._return_retriever(db)
|
89 |
|
90 |
if self.config["llm_params"]["use_history"]:
|
91 |
+
self.qa_chain = CustomConversationalRetrievalChain(
|
92 |
llm=llm,
|
93 |
memory=memory,
|
94 |
retriever=retriever,
|
95 |
qa_prompt=qa_prompt,
|
96 |
rephrase_prompt=rephrase_prompt,
|
97 |
)
|
98 |
+
return self.qa_chain
|
99 |
|
100 |
def load_llm(self):
|
101 |
"""
|
|
|
124 |
qa_prompt = get_prompt(self.config, "qa")
|
125 |
if rephrase_prompt is None:
|
126 |
rephrase_prompt = get_prompt(self.config, "rephrase")
|
127 |
+
|
128 |
+
print("using qa_prompt: ", qa_prompt)
|
129 |
+
print("\n\n\n")
|
130 |
+
# exit()
|
131 |
db = self.vector_db.load_database()
|
132 |
# sanity check to see if there are any documents in the database
|
133 |
if len(db) == 0:
|
code/modules/config/config.yml
CHANGED
@@ -28,6 +28,7 @@ vectorstore:
|
|
28 |
llm_params:
|
29 |
use_history: True # bool
|
30 |
memory_window: 3 # int
|
|
|
31 |
llm_loader: 'openai' # str [local_llm, openai]
|
32 |
openai_params:
|
33 |
model: 'gpt-3.5-turbo-1106' # str [gpt-3.5-turbo-1106, gpt-4]
|
|
|
28 |
llm_params:
|
29 |
use_history: True # bool
|
30 |
memory_window: 3 # int
|
31 |
+
ELI5: False # bool
|
32 |
llm_loader: 'openai' # str [local_llm, openai]
|
33 |
openai_params:
|
34 |
model: 'gpt-3.5-turbo-1106' # str [gpt-3.5-turbo-1106, gpt-4]
|
code/modules/config/constants.py
CHANGED
@@ -36,6 +36,19 @@ OPENAI_PROMPT_WITH_HISTORY = (
|
|
36 |
"AI Tutor:"
|
37 |
)
|
38 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
OPENAAI_PROMPT_NO_HISTORY = (
|
40 |
"You are an AI Tutor for the course DS598, taught by Prof. Thomas Gardos. Answer the user's question using the provided context. Only use the context if it is relevant. The context is ordered by relevance. "
|
41 |
"If you don't know the answer, do your best without making things up. Keep the conversation flowing naturally. "
|
|
|
36 |
"AI Tutor:"
|
37 |
)
|
38 |
|
39 |
+
ELI5_PROMPT_WITH_HISTORY = (
|
40 |
+
"You are an AI Tutor for the course DS598, taught by Prof. Thomas Gardos. Answer the user's question using the provided context in the simplest way possible, as if you are explaining to a 5-year-old. Only use the context if it helps make things clearer. The context is ordered by relevance. "
|
41 |
+
"If you don't know the answer, do your best without making things up. Keep the conversation simple and easy to understand. "
|
42 |
+
"Use chat history and context as guides but avoid repeating past responses. Provide links from the source_file metadata. Use the source context that is most relevant. "
|
43 |
+
"Speak in a friendly and engaging manner, like talking to a curious child. Avoid complex terms.\n\n"
|
44 |
+
"Chat History:\n{chat_history}\n\n"
|
45 |
+
"Context:\n{context}\n\n"
|
46 |
+
"Answer the student's question below in a friendly, simple, and engaging manner. Use the context and history only if relevant, otherwise, engage in a free-flowing conversation.\n"
|
47 |
+
"Give a very detailed narrative explanation. Use examples wherever you can to aid in the explanation. Remember, explain it as if you are talking to a 5-year-old.\n"
|
48 |
+
"Student: {input}\n"
|
49 |
+
"AI Tutor:"
|
50 |
+
)
|
51 |
+
|
52 |
OPENAAI_PROMPT_NO_HISTORY = (
|
53 |
"You are an AI Tutor for the course DS598, taught by Prof. Thomas Gardos. Answer the user's question using the provided context. Only use the context if it is relevant. The context is ordered by relevance. "
|
54 |
"If you don't know the answer, do your best without making things up. Keep the conversation flowing naturally. "
|