import gradio as gr from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline model_name = "IProject-10/roberta-base-finetuned-squad2" nlp = pipeline("question-answering", model=model_name, tokenizer=model_name) def predict(context, question): res = nlp({"question": question, "context": context}) return res["answer"] md = """ """ context = "The Amazon rainforest, also known in English as Amazonia or the Amazon Jungle, is a moist broadleaf forest that covers most of the Amazon basin of South America..." question = "Which continent is the Amazon rainforest in?" apple_context = "An apple is an edible fruit produced by an apple tree (Malus domestica)..." apple_question = "How many years have apples been grown for?" gr.Interface( predict, inputs=[ gr.Textbox(lines=7, value=context, label="Context Paragraph"), gr.Textbox(lines=2, value=question, label="Question"), ], outputs=gr.Textbox(label="Answer"), examples=[[apple_context, apple_question]], title="Question Answering System", description=md, ).launch()