Chandranshu Jain
commited on
Commit
•
06aea67
1
Parent(s):
db20f92
Update app2.py
Browse files
app2.py
CHANGED
@@ -10,11 +10,6 @@ from langchain.prompts import PromptTemplate
|
|
10 |
from langchain_community.document_loaders import PyPDFLoader
|
11 |
from langchain_chroma import Chroma
|
12 |
|
13 |
-
configuration = {
|
14 |
-
"client": "PersistentClient",
|
15 |
-
"path": "/tmp/.chroma"
|
16 |
-
}
|
17 |
-
|
18 |
st.set_page_config(page_title="Document Genie", layout="wide")
|
19 |
|
20 |
st.markdown("""
|
@@ -32,12 +27,9 @@ Follow these simple steps to interact with the chatbot:
|
|
32 |
""")
|
33 |
|
34 |
def get_pdf(pdf_docs):
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
for page in pdf_reader.pages:
|
39 |
-
text += page.extract_text()
|
40 |
-
return text
|
41 |
|
42 |
def text_splitter(text):
|
43 |
text_splitter = RecursiveCharacterTextSplitter(
|
@@ -52,10 +44,7 @@ GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
|
|
52 |
|
53 |
def embedding(chunk):
|
54 |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
|
55 |
-
|
56 |
-
new_client = chromadb.EphemeralClient()
|
57 |
-
db = Chroma.from_documents(vector, embeddings,client=new_client
|
58 |
-
, persist_directory="./chroma_db")
|
59 |
|
60 |
def get_conversational_chain():
|
61 |
prompt_template = """
|
|
|
10 |
from langchain_community.document_loaders import PyPDFLoader
|
11 |
from langchain_chroma import Chroma
|
12 |
|
|
|
|
|
|
|
|
|
|
|
13 |
st.set_page_config(page_title="Document Genie", layout="wide")
|
14 |
|
15 |
st.markdown("""
|
|
|
27 |
""")
|
28 |
|
29 |
def get_pdf(pdf_docs):
|
30 |
+
loader = PyPDFLoader("financialguide.pdf")
|
31 |
+
docs = loader.load()
|
32 |
+
return docs
|
|
|
|
|
|
|
33 |
|
34 |
def text_splitter(text):
|
35 |
text_splitter = RecursiveCharacterTextSplitter(
|
|
|
44 |
|
45 |
def embedding(chunk):
|
46 |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
|
47 |
+
db = Chroma.from_documents(chunk,embeddings, persist_directory="./chroma_db")
|
|
|
|
|
|
|
48 |
|
49 |
def get_conversational_chain():
|
50 |
prompt_template = """
|