samim2024 commited on
Commit
59c9bb2
1 Parent(s): 180eb9b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -11
app.py CHANGED
@@ -11,6 +11,18 @@ from langchain.memory import ConversationBufferMemory
11
  import streamlit as st
12
  import os
13
  import time
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
  if not os.path.exists('files'):
16
  os.mkdir('files')
@@ -37,18 +49,13 @@ if 'memory' not in st.session_state:
37
  return_messages=True,
38
  input_key="question")
39
  if 'vectorstore' not in st.session_state:
40
- st.session_state.vectorstore = Chroma(persist_directory='jj',
41
- embedding_function=OllamaEmbeddings(base_url='http://localhost:11434',
42
- model="mistral")
43
- )
44
  if 'llm' not in st.session_state:
45
- st.session_state.llm = Ollama(base_url="http://localhost:11434",
46
- model="mistral",
47
- verbose=True,
48
- callback_manager=CallbackManager(
49
- [StreamingStdOutCallbackHandler()]),
50
- )
51
-
52
  # Initialize session state
53
  if 'chat_history' not in st.session_state:
54
  st.session_state.chat_history = []
 
11
  import streamlit as st
12
  import os
13
  import time
14
+ from langchain_community.llms import HuggingFaceEndpoint
15
+
16
+ from langchain_community.embeddings import HuggingFaceEmbeddings
17
+
18
+ model_name = "sentence-transformers/all-mpnet-base-v2"
19
+ model_kwargs = {'device': 'cpu'}
20
+ encode_kwargs = {'normalize_embeddings': False}
21
+ embeddings = HuggingFaceEmbeddings(
22
+ model_name=model_name,
23
+ model_kwargs=model_kwargs,
24
+ encode_kwargs=encode_kwargs
25
+ )
26
 
27
  if not os.path.exists('files'):
28
  os.mkdir('files')
 
49
  return_messages=True,
50
  input_key="question")
51
  if 'vectorstore' not in st.session_state:
52
+ #st.session_state.vectorstore = Chroma(persist_directory='jj', embedding_function=OllamaEmbeddings(base_url='http://localhost:11434',model="mistral")
53
+ st.session_state.vectorstore = Chroma(persist_directory='jj', embedding_function=embeddings)
54
+
 
55
  if 'llm' not in st.session_state:
56
+ #st.session_state.llm = Ollama(base_url="http://localhost:11434",model="mistral",verbose=True,callback_manager=CallbackManager([StreamingStdOutCallbackHandler()]),)
57
+ st.session_state.llm = HuggingFaceEndpoint(repo_id="mistralai/Mistral-7B-Instruct-v0.2", Temperature=0.9)
58
+
 
 
 
 
59
  # Initialize session state
60
  if 'chat_history' not in st.session_state:
61
  st.session_state.chat_history = []