XThomasBU commited on
Commit
a052bdc
1 Parent(s): a2dfb74

improvements

Browse files
code/main.py CHANGED
@@ -96,7 +96,7 @@ async def start():
96
  model = config["llm_params"]["local_llm_params"]["model"]
97
  msg = cl.Message(content=f"Starting the bot {model}...")
98
  await msg.send()
99
- msg.content = f"Hey, What Can I Help You With?\n\nYou can me ask me questions about the course logistics, course content, about the final project, or anything else! You can find me at {model}"
100
  await msg.update()
101
 
102
  cl.user_session.set("chain", chain)
 
96
  model = config["llm_params"]["local_llm_params"]["model"]
97
  msg = cl.Message(content=f"Starting the bot {model}...")
98
  await msg.send()
99
+ msg.content = f"Hey, What Can I Help You With?\n\nYou can me ask me questions about the course logistics, course content, about the final project, or anything else!"
100
  await msg.update()
101
 
102
  cl.user_session.set("chain", chain)
code/modules/constants.py CHANGED
@@ -10,17 +10,17 @@ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
10
 
11
  # Prompt Templates
12
 
13
- # prompt_template = """Use the following pieces of information to answer the user's question.
14
- # If you don't know the answer, just say that you don't know.
15
 
16
- # Context: {context}
17
- # Question: {question}
18
 
19
- # Only return the helpful answer below and nothing else.
20
- # Helpful answer:
21
- # """
22
 
23
- prompt_template_with_history = """Use the following pieces of information to answer the user's question.
24
  If you don't know the answer, just say that you don't know, don't try to make up an answer.
25
  Use the history to answer the question if you can.
26
  Chat History:
@@ -32,10 +32,31 @@ Only return the helpful answer below and nothing else.
32
  Helpful answer:
33
  """
34
 
35
- prompt_template = """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  <|im_start|>system
37
  Assistant is an intelligent chatbot designed to help students with questions regarding the course. Only answer questions using the context below and if you're not sure of an answer, you can say "I don't know". Always give a breif and concise answer to the question.
38
 
 
 
39
  Context:
40
  {context}
41
  <|im_end|>
@@ -51,6 +72,7 @@ Question: {question}
51
  <|im_start|>assistant
52
  """
53
 
 
54
  # Model Paths
55
 
56
  LLAMA_PATH = "storage/models/tinyllama-1.1b-chat-v1.0.Q5_K_M.gguf"
 
10
 
11
  # Prompt Templates
12
 
13
+ openai_prompt_template = """Use the following pieces of information to answer the user's question.
14
+ If you don't know the answer, just say that you don't know.
15
 
16
+ Context: {context}
17
+ Question: {question}
18
 
19
+ Only return the helpful answer below and nothing else.
20
+ Helpful answer:
21
+ """
22
 
23
+ openai_prompt_template_with_history = """Use the following pieces of information to answer the user's question.
24
  If you don't know the answer, just say that you don't know, don't try to make up an answer.
25
  Use the history to answer the question if you can.
26
  Chat History:
 
32
  Helpful answer:
33
  """
34
 
35
+ tinyllama_prompt_template = """
36
+ <|im_start|>system
37
+ Assistant is an intelligent chatbot designed to help students with questions regarding the course. Only answer questions using the context below and if you're not sure of an answer, you can say "I don't know". Always give a breif and concise answer to the question. Use the history to answer the question if you can.
38
+
39
+ Context:
40
+ {context}
41
+ <|im_end|>
42
+ <|im_start|>user
43
+ Question: Who is the instructor for this course?
44
+ <|im_end|>
45
+ <|im_start|>assistant
46
+ The instructor for this course is Prof. Thomas Gardos.
47
+ <|im_end|>
48
+ <|im_start|>user
49
+ Question: {question}
50
+ <|im_end|>
51
+ <|im_start|>assistant
52
+ """
53
+
54
+ tinyllama_prompt_template_with_history = """
55
  <|im_start|>system
56
  Assistant is an intelligent chatbot designed to help students with questions regarding the course. Only answer questions using the context below and if you're not sure of an answer, you can say "I don't know". Always give a breif and concise answer to the question.
57
 
58
+ Chat History:
59
+ {chat_history}
60
  Context:
61
  {context}
62
  <|im_end|>
 
72
  <|im_start|>assistant
73
  """
74
 
75
+
76
  # Model Paths
77
 
78
  LLAMA_PATH = "storage/models/tinyllama-1.1b-chat-v1.0.Q5_K_M.gguf"
code/modules/helpers.py CHANGED
@@ -3,6 +3,8 @@ from bs4 import BeautifulSoup
3
  from tqdm import tqdm
4
  from urllib.parse import urlparse
5
  import chainlit as cl
 
 
6
 
7
  """
8
  Ref: https://python.plainenglish.io/scraping-the-subpages-on-a-website-ea2d4e3db113
@@ -95,6 +97,30 @@ def get_base_url(url):
95
  base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
96
  return base_url
97
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
98
 
99
  def get_sources(res, answer):
100
  source_elements_dict = {}
 
3
  from tqdm import tqdm
4
  from urllib.parse import urlparse
5
  import chainlit as cl
6
+ from langchain import PromptTemplate
7
+ from modules.constants import *
8
 
9
  """
10
  Ref: https://python.plainenglish.io/scraping-the-subpages-on-a-website-ea2d4e3db113
 
97
  base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
98
  return base_url
99
 
100
+ def get_prompt(config):
101
+ if config["llm_params"]["use_history"]:
102
+ if config["llm_params"]["llm_loader"] == "local_llm":
103
+ custom_prompt_template = tinyllama_prompt_template_with_history
104
+ elif config["llm_params"]["llm_loader"] == "openai":
105
+ custom_prompt_template = openai_prompt_template_with_history
106
+ # else:
107
+ # custom_prompt_template = tinyllama_prompt_template_with_history # default
108
+ prompt = PromptTemplate(
109
+ template=custom_prompt_template,
110
+ input_variables=["context", "chat_history", "question"],
111
+ )
112
+ else:
113
+ if config["llm_params"]["llm_loader"] == "local_llm":
114
+ custom_prompt_template = tinyllama_prompt_template
115
+ elif config["llm_params"]["llm_loader"] == "openai":
116
+ custom_prompt_template = openai_prompt_template
117
+ # else:
118
+ # custom_prompt_template = tinyllama_prompt_template
119
+ prompt = PromptTemplate(
120
+ template=custom_prompt_template,
121
+ input_variables=["context", "question"],
122
+ )
123
+ return prompt
124
 
125
  def get_sources(res, answer):
126
  source_elements_dict = {}
code/modules/llm_tutor.py CHANGED
@@ -10,6 +10,7 @@ from langchain.chains.conversational_retrieval.prompts import QA_PROMPT
10
  import os
11
 
12
  from modules.constants import *
 
13
  from modules.chat_model_loader import ChatModelLoader
14
  from modules.vector_db import VectorDB
15
 
@@ -26,14 +27,7 @@ class LLMTutor:
26
  """
27
  Prompt template for QA retrieval for each vectorstore
28
  """
29
- if self.config["llm_params"]["use_history"]:
30
- custom_prompt_template = prompt_template_with_history
31
- else:
32
- custom_prompt_template = prompt_template
33
- prompt = PromptTemplate(
34
- template=custom_prompt_template,
35
- input_variables=["context", "chat_history", "question"],
36
- )
37
  # prompt = QA_PROMPT
38
 
39
  return prompt
 
10
  import os
11
 
12
  from modules.constants import *
13
+ from modules.helpers import get_prompt
14
  from modules.chat_model_loader import ChatModelLoader
15
  from modules.vector_db import VectorDB
16
 
 
27
  """
28
  Prompt template for QA retrieval for each vectorstore
29
  """
30
+ prompt = get_prompt(self.config)
 
 
 
 
 
 
 
31
  # prompt = QA_PROMPT
32
 
33
  return prompt