apahilaj commited on
Commit
cc82ce5
1 Parent(s): 4aa3156

attempt 100

Browse files
Files changed (1) hide show
  1. app.py +78 -77
app.py CHANGED
@@ -66,90 +66,91 @@ def load_db(file, k):
66
 
67
  # return qa
68
 
69
- # chat_history = [] # initialize chat history
70
 
71
- # def greet(question, pdf_file):
72
- # global chat_history
73
- # a = load_db(pdf_file, 3)
74
- # r = a.invoke({"question": question, "chat_history": chat_history})
75
- # match = re.search(r'Helpful Answer:(.*)', r['answer'])
76
- # if match:
77
- # helpful_answer = match.group(1).strip()
78
- # # Extend chat history with the current question and answer
79
- # chat_history.extend([(question, helpful_answer)])
80
- # return helpful_answer
81
- # else:
82
- # return "No helpful answer found."
 
83
 
84
- # iface = gr.Interface(fn=greet, inputs=["text", "file"], outputs="text")
85
- # iface.launch(share=True)
86
 
87
 
88
 
89
- import gradio as gr
90
- import pandas as pd
91
- from langchain.embeddings import HuggingFaceEmbeddings
92
- from langchain.vectorstores import Chroma, faiss
93
- from langchain_community.llms import HuggingFaceEndpoint, HuggingFaceHub
94
- from langchain.chains import LLMChain
95
- from langchain_community.document_loaders.csv_loader import CSVLoader
96
- from langchain_community.document_loaders import PyPDFLoader
97
- from langchain.text_splitter import CharacterTextSplitter
98
- from langchain_community.document_loaders import TextLoader
99
- from langchain_community import vectorstores
100
- from langchain.prompts import PromptTemplate
101
- from langchain.chains import RetrievalQA
102
- from langchain.memory import ConversationBufferMemory
103
- from langchain.chains import ConversationalRetrievalChain
104
- from langchain.text_splitter import CharacterTextSplitter, RecursiveCharacterTextSplitter
105
- from langchain.vectorstores import DocArrayInMemorySearch
106
- from langchain.document_loaders import TextLoader
107
- from langchain.chains import RetrievalQA, ConversationalRetrievalChain
108
- from langchain.memory import ConversationBufferMemory
109
- from langchain.chat_models import ChatOpenAI
110
- from langchain.document_loaders import TextLoader
111
- from langchain.document_loaders import PyPDFLoader
112
- import panel as pn
113
- import param
114
- import re
115
- import os
116
 
117
- api_token = os.environ.get('HUGGINGFACEHUB_API_TOKEN')
118
 
119
- model = HuggingFaceHub(
120
- huggingfacehub_api_token=api_token,
121
- repo_id="mistralai/Mistral-7B-Instruct-v0.2",
122
- task="conversational",
123
- model_kwargs={"temperature": 0.8, "max_length": 1000},
124
- )
125
- template = """Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer. Use three sentences maximum. Keep the answer as concise as possible. Always say "thanks for asking!" at the end of the answer.
126
- {context}
127
- Question: {question}
128
- Helpful Answer:"""
129
- QA_CHAIN_PROMPT = PromptTemplate.from_template(template)
130
 
131
- # Updated greet function to handle file upload
132
- def greet(message, pdf_file):
133
- global chat_history
134
- user_message = message
135
- if pdf_file is not None:
136
- # Save the uploaded PDF file
137
- with open("uploaded_file.pdf", "wb") as f:
138
- f.write(pdf_file.read())
139
- a = load_db("uploaded_file.pdf", 3)
140
- # else:
141
- # a = load_db("temp.pdf", 3) # assuming you've uploaded the file and saved it as "temp.pdf"
142
 
143
- r = a.invoke({"question": user_message, "chat_history": chat_history})
144
- match = re.search(r'Helpful Answer:(.*)', r['answer'])
145
- if match:
146
- helpful_answer = match.group(1).strip()
147
- # Extend chat history with the current question and answer
148
- chat_history.extend([{"role": "user", "content": user_message}, {"role": "assistant", "content": helpful_answer}])
149
- return [{"role": "assistant", "content": helpful_answer}]
150
- else:
151
- return [{"role": "assistant", "content": "No helpful answer found."}]
152
 
153
- # Gradio ChatInterface with file upload support
154
- iface = gr.ChatInterface(fn=greet, title="Your Chatbot Title", additional_inputs="file")
155
- iface.launch(share=True)
 
66
 
67
  # return qa
68
 
69
+ chat_history = [] # initialize chat history
70
 
71
+ def greet(question, pdf_file):
72
+ global chat_history
73
+ print("chat_history: ", chat_history)
74
+ a = load_db(pdf_file, 3)
75
+ r = a.invoke({"question": question, "chat_history": chat_history})
76
+ match = re.search(r'Helpful Answer:(.*)', r['answer'])
77
+ if match:
78
+ helpful_answer = match.group(1).strip()
79
+ # Extend chat history with the current question and answer
80
+ chat_history.extend([(question, helpful_answer)])
81
+ return helpful_answer
82
+ else:
83
+ return "No helpful answer found."
84
 
85
+ iface = gr.Interface(fn=greet, inputs=["text", "file"], outputs="text")
86
+ iface.launch(share=True)
87
 
88
 
89
 
90
+ # import gradio as gr
91
+ # import pandas as pd
92
+ # from langchain.embeddings import HuggingFaceEmbeddings
93
+ # from langchain.vectorstores import Chroma, faiss
94
+ # from langchain_community.llms import HuggingFaceEndpoint, HuggingFaceHub
95
+ # from langchain.chains import LLMChain
96
+ # from langchain_community.document_loaders.csv_loader import CSVLoader
97
+ # from langchain_community.document_loaders import PyPDFLoader
98
+ # from langchain.text_splitter import CharacterTextSplitter
99
+ # from langchain_community.document_loaders import TextLoader
100
+ # from langchain_community import vectorstores
101
+ # from langchain.prompts import PromptTemplate
102
+ # from langchain.chains import RetrievalQA
103
+ # from langchain.memory import ConversationBufferMemory
104
+ # from langchain.chains import ConversationalRetrievalChain
105
+ # from langchain.text_splitter import CharacterTextSplitter, RecursiveCharacterTextSplitter
106
+ # from langchain.vectorstores import DocArrayInMemorySearch
107
+ # from langchain.document_loaders import TextLoader
108
+ # from langchain.chains import RetrievalQA, ConversationalRetrievalChain
109
+ # from langchain.memory import ConversationBufferMemory
110
+ # from langchain.chat_models import ChatOpenAI
111
+ # from langchain.document_loaders import TextLoader
112
+ # from langchain.document_loaders import PyPDFLoader
113
+ # import panel as pn
114
+ # import param
115
+ # import re
116
+ # import os
117
 
118
+ # api_token = os.environ.get('HUGGINGFACEHUB_API_TOKEN')
119
 
120
+ # model = HuggingFaceHub(
121
+ # huggingfacehub_api_token=api_token,
122
+ # repo_id="mistralai/Mistral-7B-Instruct-v0.2",
123
+ # task="conversational",
124
+ # model_kwargs={"temperature": 0.8, "max_length": 1000},
125
+ # )
126
+ # template = """Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer. Use three sentences maximum. Keep the answer as concise as possible. Always say "thanks for asking!" at the end of the answer.
127
+ # {context}
128
+ # Question: {question}
129
+ # Helpful Answer:"""
130
+ # QA_CHAIN_PROMPT = PromptTemplate.from_template(template)
131
 
132
+ # # Updated greet function to handle file upload
133
+ # def greet(message, pdf_file):
134
+ # global chat_history
135
+ # user_message = message
136
+ # if pdf_file is not None:
137
+ # # Save the uploaded PDF file
138
+ # with open("uploaded_file.pdf", "wb") as f:
139
+ # f.write(pdf_file.read())
140
+ # a = load_db("uploaded_file.pdf", 3)
141
+ # # else:
142
+ # # a = load_db("temp.pdf", 3) # assuming you've uploaded the file and saved it as "temp.pdf"
143
 
144
+ # r = a.invoke({"question": user_message, "chat_history": chat_history})
145
+ # match = re.search(r'Helpful Answer:(.*)', r['answer'])
146
+ # if match:
147
+ # helpful_answer = match.group(1).strip()
148
+ # # Extend chat history with the current question and answer
149
+ # chat_history.extend([{"role": "user", "content": user_message}, {"role": "assistant", "content": helpful_answer}])
150
+ # return [{"role": "assistant", "content": helpful_answer}]
151
+ # else:
152
+ # return [{"role": "assistant", "content": "No helpful answer found."}]
153
 
154
+ # # Gradio ChatInterface with file upload support
155
+ # iface = gr.ChatInterface(fn=greet, title="Your Chatbot Title", additional_inputs="file")
156
+ # iface.launch(share=True)