sidcww commited on
Commit
3a221be
1 Parent(s): 8e21440

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -32,7 +32,7 @@ def initialize(file_path, question):
32
  if os.path.exists(file_path):
33
  pdf_loader = PyPDFLoader(file_path)
34
  pages = pdf_loader.load_and_split()
35
- context = "\n".join(str(page.page_content) for page in pages[:10])
36
  stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
37
  stuff_answer = stuff_chain({"input_documents": pages, "question": question, "context": context}, return_only_outputs=True)
38
  gemini_answer = stuff_answer['output_text']
@@ -41,7 +41,7 @@ def initialize(file_path, question):
41
  mistral_prompt = f"Based on this answer: {gemini_answer}\nGenerate a follow-up question:"
42
  mistral_inputs = mistral_tokenizer.encode(mistral_prompt, return_tensors='pt').to(device)
43
  with torch.no_grad():
44
- mistral_outputs = mistral_model.generate(mistral_inputs, max_length=100)
45
  mistral_output = mistral_tokenizer.decode(mistral_outputs[0], skip_special_tokens=True)
46
 
47
  combined_output = f"Gemini Answer: {gemini_answer}\n\nMistral Follow-up: {mistral_output}"
 
32
  if os.path.exists(file_path):
33
  pdf_loader = PyPDFLoader(file_path)
34
  pages = pdf_loader.load_and_split()
35
+ context = "\n".join(str(page.page_content) for page in pages[:30])
36
  stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
37
  stuff_answer = stuff_chain({"input_documents": pages, "question": question, "context": context}, return_only_outputs=True)
38
  gemini_answer = stuff_answer['output_text']
 
41
  mistral_prompt = f"Based on this answer: {gemini_answer}\nGenerate a follow-up question:"
42
  mistral_inputs = mistral_tokenizer.encode(mistral_prompt, return_tensors='pt').to(device)
43
  with torch.no_grad():
44
+ mistral_outputs = mistral_model.generate(mistral_inputs, max_length=50)
45
  mistral_output = mistral_tokenizer.decode(mistral_outputs[0], skip_special_tokens=True)
46
 
47
  combined_output = f"Gemini Answer: {gemini_answer}\n\nMistral Follow-up: {mistral_output}"