nehalelkaref's picture
Update app.py
e2a9e04
raw
history blame
866 Bytes
from flask import Flask, jsonify, request
from transformers import AutoAdapterModel, AutoTokenizer, TextClassificationPipeline
from huggingface_hub import Repository
tokenizer = AutoTokenizer.from_pretrained("UBC-NLP/MARBERT")
model = AutoAdapterModel.from_pretrained("UBC-NLP/MARBERT")
model.load_adapter("adapters/aoc3_adapter", set_active=True, with_head=False)
model.load_adapter("adapters/aoc4_adapter", set_active=True, with_head=False)
model.load_adapter("adapters/sarcasm_adapter", set_active=True, with_head=False)
model.load_adapter_fusion("adapters/fusion_adapter/aoc(3),aoc(4),sarcasm",with_head=True, set_active=True)
pipe = TextClassificationPipeline(tokenizer=tokenizer, model=model)
app = Flask(__name__)
@app.route("/", methods=['GET'])
def home():
return "<h1>GFG is great platform to learn</h1>"
if __name__ == "__main__":
app.run()