Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import sys
|
3 |
+
import openai
|
4 |
+
from langchain.chains import ConversationalRetrievalChain, RetrievalQA
|
5 |
+
from langchain.chat_models import ChatOpenAI
|
6 |
+
from langchain.document_loaders import DirectoryLoader, TextLoader
|
7 |
+
from langchain.embeddings import OpenAIEmbeddings
|
8 |
+
from langchain.indexes import VectorstoreIndexCreator
|
9 |
+
from langchain.indexes.vectorstore import VectorStoreIndexWrapper
|
10 |
+
from langchain.llms import OpenAI
|
11 |
+
|
12 |
+
__import__('pysqlite3')
|
13 |
+
import sys
|
14 |
+
sys.modules['sqlite3'] = sys.modules.pop('pysqlite3')
|
15 |
+
|
16 |
+
from langchain.vectorstores import Chroma
|
17 |
+
|
18 |
+
os.environ["OPENAI_API_KEY"] = os.getenv("OPENAPIKEY")
|
19 |
+
|
20 |
+
# Enable to save to disk & reuse the model (for repeated queries on the same data)
|
21 |
+
PERSIST = False
|
22 |
+
|
23 |
+
query = None
|
24 |
+
if len(sys.argv) > 1:
|
25 |
+
query = sys.argv[1]
|
26 |
+
|
27 |
+
if PERSIST and os.path.exists("persist"):
|
28 |
+
print("Reusing index...\n")
|
29 |
+
vectorstore = Chroma(persist_directory="persist", embedding_function=OpenAIEmbeddings())
|
30 |
+
index = VectorStoreIndexWrapper(vectorstore=vectorstore)
|
31 |
+
else:
|
32 |
+
loader = TextLoader("input/input_data.txt") # Use this line if you only need data.txt
|
33 |
+
# loader = DirectoryLoader("data/")
|
34 |
+
if PERSIST:
|
35 |
+
index = VectorstoreIndexCreator(vectorstore_kwargs={"persist_directory":"persist"}).from_loaders([loader])
|
36 |
+
else:
|
37 |
+
index = VectorstoreIndexCreator().from_loaders([loader])
|
38 |
+
|
39 |
+
chain = ConversationalRetrievalChain.from_llm(
|
40 |
+
llm=ChatOpenAI(model="gpt-3.5-turbo"),
|
41 |
+
retriever=index.vectorstore.as_retriever(search_kwargs={"k": 1}),
|
42 |
+
)
|
43 |
+
|
44 |
+
chat_history = []
|
45 |
+
while True:
|
46 |
+
if not query:
|
47 |
+
query = input("Prompt: ")
|
48 |
+
if query in ['quit', 'q', 'exit']:
|
49 |
+
sys.exit()
|
50 |
+
result = chain({"question": query, "chat_history": chat_history})
|
51 |
+
print(result['answer'])
|
52 |
+
|
53 |
+
chat_history.append((query, result['answer']))
|
54 |
+
query = None
|