highdeff1 / questions to model app.py
highdeff's picture
Upload 16 files
2c07569
import streamlit as st
import torch
from transformers import AutoTokenizer, AutoModelForQuestionAnswering
tokenizer = AutoTokenizer.from_pretrained("https://huggingface.co/spaces/highdeff/highdeffrepo/tree/main")
model = AutoModelForQuestionAnswering.from_pretrained("./trained.pt")
def get_answer(context, question):
encoding = tokenizer.encode_plus(question, context, return_tensors='pt')
input_ids = encoding['input_ids']
attention_mask = encoding['attention_mask']
start_scores, end_scores = model(input_ids, attention_mask=attention_mask)
start_index = torch.argmax(start_scores)
end_index = torch.argmax(end_scores)
answer_tokens = input_ids[0][start_index:end_index+1]
answer = tokenizer.decode(answer_tokens)
return answer
st.title("Question Answering with Transformers")
context = st.text_area("Context:", "Enter the context here...")
question = st.text_input("Question:", "Enter your question here...")
if st.button("Answer"):
if not context or not question:
st.error("Please provide both a context and a question.")
else:
answer = get_answer(context, question)
st.success(f"Answer: {answer}")