nehalelkaref commited on
Commit
25f0ea1
1 Parent(s): f760978

Upload app.py

Browse files
Files changed (1) hide show
  1. code/app.py +36 -0
code/app.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, jsonify, request
2
+ from transformers import AutoAdapterModel, AutoTokenizer, TextClassificationPipeline
3
+ from huggingface_hub import Repository
4
+
5
+ app = Flask(__name__)
6
+
7
+ #define model
8
+ tokenizer = AutoTokenizer.from_pretrained("UBC-NLP/MARBERT")
9
+
10
+ sarcasm_adapter = Repository(local_dir="sarcasm_adapter", clone_from="nehalelkaref/sarcasm_adapter")
11
+ aoc3_adapter = Repository(local_dir="aoc3_adapter", clone_from="nehalelkaref/aoc3_adapter")
12
+ aoc4_adapter = Repository(local_dir="aoc4_adapter", clone_from="nehalelkaref/aoc4_adapter")
13
+ fusion_adapter = Repository(local_dir="fusion_adapter", clone_from="nehalelkaref/region_fusion")
14
+
15
+ model = AutoAdapterModel.from_pretrained("UBC-NLP/MARBERT")
16
+
17
+ model.load_adapter("/code/aoc3_adapter", set_active=True, with_head=False)
18
+ model.load_adapter("/code/aoc4_adapter", set_active=True, with_head=False)
19
+ model.load_adapter("/code/sarcasm_adapter", set_active=True, with_head=False)
20
+
21
+ model.load_adapter_fusion("/code/fusion_adapter/aoc(3),aoc(4),sarcasm",with_head=True, set_active=True)
22
+
23
+ pipe = TextClassificationPipeline(tokenizer=tokenizer, model=model)
24
+
25
+ @app.route('/predict', methods=['POST'])
26
+ def predict():
27
+ text = request.json['inputs']
28
+
29
+ prediction = pipe(text)
30
+ labels = {"LABEL_0":"GULF", "LABEL_1":"LEVANT","LABEL_2":"EGYPT"}
31
+ regions = []
32
+ for res in prediction:
33
+ regions.append(labels[res['label']])
34
+
35
+ return jsonify({'response': regions})
36
+