Spaces:
Runtime error
Runtime error
first
Browse files- src/app.py +1 -1
- src/pdfchatbot.py +8 -1
src/app.py
CHANGED
@@ -8,7 +8,7 @@ with demo:
|
|
8 |
uploaded_pdf.upload(pdf_chatbot.render_file, inputs=[uploaded_pdf,slider_chunk_size,slider_overlap_percentage,slider_temp,slider_k], outputs=[show_img])
|
9 |
|
10 |
submit_button.click(pdf_chatbot.add_text, inputs=[chat_history, txt], outputs=[chat_history], queue=False).\
|
11 |
-
success(pdf_chatbot.generate_response, inputs=[chat_history, txt, uploaded_pdf], outputs=[chat_history,txt]).\
|
12 |
success(pdf_chatbot.render_file, inputs=[uploaded_pdf], outputs=[show_img])
|
13 |
|
14 |
if __name__ == "__main__":
|
|
|
8 |
uploaded_pdf.upload(pdf_chatbot.render_file, inputs=[uploaded_pdf,slider_chunk_size,slider_overlap_percentage,slider_temp,slider_k], outputs=[show_img])
|
9 |
|
10 |
submit_button.click(pdf_chatbot.add_text, inputs=[chat_history, txt], outputs=[chat_history], queue=False).\
|
11 |
+
success(pdf_chatbot.generate_response, inputs=[chat_history, txt, uploaded_pdf,slider_chunk_size,slider_overlap_percentage,slider_temp,slider_k], outputs=[chat_history,txt]).\
|
12 |
success(pdf_chatbot.render_file, inputs=[uploaded_pdf], outputs=[show_img])
|
13 |
|
14 |
if __name__ == "__main__":
|
src/pdfchatbot.py
CHANGED
@@ -124,7 +124,12 @@ class PDFChatBot:
|
|
124 |
self.create_organic_pipeline()
|
125 |
#self.create_chain()
|
126 |
@spaces.GPU
|
127 |
-
def generate_response(self, history, query, file):
|
|
|
|
|
|
|
|
|
|
|
128 |
|
129 |
if not query:
|
130 |
raise gr.Error(message='Submit a question')
|
@@ -134,6 +139,8 @@ class PDFChatBot:
|
|
134 |
self.process_file(file)
|
135 |
self.processed = True
|
136 |
|
|
|
|
|
137 |
result = self.create_organic_response(history="",query=query)
|
138 |
for char in result:
|
139 |
history[-1][-1] += char
|
|
|
124 |
self.create_organic_pipeline()
|
125 |
#self.create_chain()
|
126 |
@spaces.GPU
|
127 |
+
def generate_response(self, history, query, file,chunk_size,chunk_overlap_percentage,model_temperature,max_chunks_in_context):
|
128 |
+
|
129 |
+
self.chunk_size = chunk_size
|
130 |
+
self.overlap_percentage = chunk_overlap_percentage
|
131 |
+
self.model_temperatue = model_temperature
|
132 |
+
self.max_chunks_in_context = max_chunks_in_context
|
133 |
|
134 |
if not query:
|
135 |
raise gr.Error(message='Submit a question')
|
|
|
139 |
self.process_file(file)
|
140 |
self.processed = True
|
141 |
|
142 |
+
|
143 |
+
|
144 |
result = self.create_organic_response(history="",query=query)
|
145 |
for char in result:
|
146 |
history[-1][-1] += char
|