IProject-10 commited on
Commit
a194712
1 Parent(s): b54e082

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -32
app.py CHANGED
@@ -1,32 +1,30 @@
1
- import gradio as gr
2
- from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
3
-
4
- model_name = "IProject-10/roberta-base-finetuned-squad2"
5
- nlp = pipeline("question-answering", model=model_name, tokenizer=model_name)
6
-
7
- def predict(context, question):
8
- res = nlp({"question": question, "context": context})
9
- return res["answer"]
10
-
11
- md = """In this project work we build a Text Retrieval Question-Answering system using BERT model. QA system is an important NLP task in which the user asks a question in natural language to the model as input and the model provides the answer in natural language as output.
12
- The language representation model BERT stands for Bidirectional Encoder Representations from Transformers. The model is based on the Devlin et al. paper: [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805).
13
- Dataset used is SQuAD 2.0 [Stanford Question Answering Dataset 2.0](https://rajpurkar.github.io/SQuAD-explorer/). It is a reading comprehension dataset which consists of question-answer pairs derived from Wikipedia articles written by crowdworkers. The answer to all the questions is in the form of a span of text.
14
- """
15
-
16
- context = "The Amazon rainforest, also known in English as Amazonia or the Amazon Jungle, is a moist broadleaf forest that covers most of the Amazon basin of South America..."
17
- question = "Which continent is the Amazon rainforest in?"
18
-
19
- apple_context = "An apple is an edible fruit produced by an apple tree (Malus domestica)..."
20
- apple_question = "How many years have apples been grown for?"
21
-
22
- gr.Interface(
23
- predict,
24
- inputs=[
25
- gr.Textbox(lines=7, value=context, label="Context Paragraph"),
26
- gr.Textbox(lines=2, value=question, label="Question"),
27
- ],
28
- outputs=gr.Textbox(label="Answer"),
29
- examples=[[apple_context, apple_question]],
30
- title="Question & Answering with BERT using the SQuAD 2 dataset",
31
- description=md,
32
- ).launch()
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
3
+
4
+ model_name = "IProject-10/roberta-base-finetuned-squad2"
5
+ nlp = pipeline("question-answering", model=model_name, tokenizer=model_name)
6
+
7
+ def predict(context, question):
8
+ res = nlp({"question": question, "context": context})
9
+ return res["answer"]
10
+
11
+ md = """
12
+ """
13
+
14
+ context = "The Amazon rainforest, also known in English as Amazonia or the Amazon Jungle, is a moist broadleaf forest that covers most of the Amazon basin of South America..."
15
+ question = "Which continent is the Amazon rainforest in?"
16
+
17
+ apple_context = "An apple is an edible fruit produced by an apple tree (Malus domestica)..."
18
+ apple_question = "How many years have apples been grown for?"
19
+
20
+ gr.Interface(
21
+ predict,
22
+ inputs=[
23
+ gr.Textbox(lines=7, value=context, label="Context Paragraph"),
24
+ gr.Textbox(lines=2, value=question, label="Question"),
25
+ ],
26
+ outputs=gr.Textbox(label="Answer"),
27
+ examples=[[apple_context, apple_question]],
28
+ title="Question Answering System",
29
+ description=md,
30
+ ).launch()