got rid of json
Browse files
app.py
CHANGED
@@ -4,11 +4,11 @@ import numpy as np
|
|
4 |
import faiss
|
5 |
import PyPDF2
|
6 |
import os
|
|
|
7 |
|
8 |
from transformers import DPRContextEncoder, DPRContextEncoderTokenizer, DPRQuestionEncoder, DPRQuestionEncoderTokenizer, BartForQuestionAnswering
|
9 |
from transformers import BartForConditionalGeneration, BartTokenizer, AutoTokenizer
|
10 |
|
11 |
-
from langchain import text_splitter
|
12 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
13 |
from langchain.document_loaders import PyPDFLoader
|
14 |
|
@@ -151,6 +151,10 @@ question_model_name="facebook/dpr-question_encoder-multiset-base"
|
|
151 |
|
152 |
rag = RAG(file_path, device)
|
153 |
|
|
|
|
|
|
|
|
|
154 |
st.title("RAG Model Query Interface")
|
155 |
|
156 |
query = st.text_area("Enter your question:")
|
@@ -158,4 +162,4 @@ query = st.text_area("Enter your question:")
|
|
158 |
# If a query is given, get the answer
|
159 |
if query:
|
160 |
answer = rag.query(query)
|
161 |
-
st.
|
|
|
4 |
import faiss
|
5 |
import PyPDF2
|
6 |
import os
|
7 |
+
import langchain
|
8 |
|
9 |
from transformers import DPRContextEncoder, DPRContextEncoderTokenizer, DPRQuestionEncoder, DPRQuestionEncoderTokenizer, BartForQuestionAnswering
|
10 |
from transformers import BartForConditionalGeneration, BartTokenizer, AutoTokenizer
|
11 |
|
|
|
12 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
13 |
from langchain.document_loaders import PyPDFLoader
|
14 |
|
|
|
151 |
|
152 |
rag = RAG(file_path, device)
|
153 |
|
154 |
+
query = "what is the benefit of using multiple attention heads in mult-head attention?"
|
155 |
+
|
156 |
+
print(rag.query(query))
|
157 |
+
|
158 |
st.title("RAG Model Query Interface")
|
159 |
|
160 |
query = st.text_area("Enter your question:")
|
|
|
162 |
# If a query is given, get the answer
|
163 |
if query:
|
164 |
answer = rag.query(query)
|
165 |
+
st.write(answer)
|