annas4421 commited on
Commit
fd38cd8
1 Parent(s): f893e42

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -28,7 +28,7 @@ However, if the user does not require legal assistance in Pakistan, you will imm
28
  say goodbye, ending the conversation. Remember to base your responses on the user's needs, providing accurate and
29
  concise information regarding the Pakistan legal law and rights where applicable. Your interactions should be professional and
30
  focused, ensuring the user's queries are addressed efficiently without deviating from the set flows.
31
-
32
  CHAT HISTORY: {chat_history}
33
  QUESTION: {question}
34
  ANSWER:
@@ -84,14 +84,13 @@ if "messages" not in st.session_state:
84
  st.session_state.messages = []
85
 
86
  if "memory" not in st.session_state:
87
- st.session_state.memory = ConversationBufferWindowMemory(k=2, memory_key="chat_history",return_messages=True)
88
 
89
  #embeddings = HuggingFaceEmbeddings(model_name="nomic-ai/nomic-embed-text-v1",model_kwargs={"trust_remote_code":True,"revision":"289f532e14dbbbd5a04753fa58739e9ba766f3c7"})
90
  #db=FAISS.load_local("/content/ipc_vector_db", embeddings, allow_dangerous_deserialization=True)
91
 
92
 
93
- prompt = PromptTemplate(template=custom_template,
94
- input_variables=[ 'question', 'chat_history'])
95
 
96
  # You can also use other LLMs options from https://python.langchain.com/docs/integrations/llms. Here I have used TogetherAI API
97
 
 
28
  say goodbye, ending the conversation. Remember to base your responses on the user's needs, providing accurate and
29
  concise information regarding the Pakistan legal law and rights where applicable. Your interactions should be professional and
30
  focused, ensuring the user's queries are addressed efficiently without deviating from the set flows.
31
+ CONTEXT: {context}
32
  CHAT HISTORY: {chat_history}
33
  QUESTION: {question}
34
  ANSWER:
 
84
  st.session_state.messages = []
85
 
86
  if "memory" not in st.session_state:
87
+ st.session_state.memory = ConversationBufferWindowMemory(k=5, memory_key="chat_history",return_messages=True,output_key='answer')
88
 
89
  #embeddings = HuggingFaceEmbeddings(model_name="nomic-ai/nomic-embed-text-v1",model_kwargs={"trust_remote_code":True,"revision":"289f532e14dbbbd5a04753fa58739e9ba766f3c7"})
90
  #db=FAISS.load_local("/content/ipc_vector_db", embeddings, allow_dangerous_deserialization=True)
91
 
92
 
93
+ prompt = PromptTemplate(template=custom_template)
 
94
 
95
  # You can also use other LLMs options from https://python.langchain.com/docs/integrations/llms. Here I have used TogetherAI API
96