from flask import Flask, jsonify, request from transformers import AutoAdapterModel, AutoTokenizer, TextClassificationPipeline from huggingface_hub import Repository tokenizer = AutoTokenizer.from_pretrained("UBC-NLP/MARBERT") sarcasm_adapter = Repository(local_dir="sarcasm_adapter", clone_from="nehalelkaref/sarcasm_adapter") aoc3_adapter = Repository(local_dir="aoc3_adapter", clone_from="nehalelkaref/aoc3_adapter") aoc4_adapter = Repository(local_dir="aoc4_adapter", clone_from="nehalelkaref/aoc4_adapter") fusion_adapter = Repository(local_dir="fusion_adapter", clone_from="nehalelkaref/region_fusion") model = AutoAdapterModel.from_pretrained("UBC-NLP/MARBERT") model.load_adapter("aoc3_adapter", set_active=True, with_head=False) model.load_adapter("aoc4_adapter", set_active=True, with_head=False) model.load_adapter("sarcasm_adapter", set_active=True, with_head=False) model.load_adapter_fusion("fusion_adapter/aoc(3),aoc(4),sarcasm",with_head=True, set_active=True) pipe = TextClassificationPipeline(tokenizer=tokenizer, model=model) app = Flask(__name__) @app.route("/", methods=['GET']) def home(): return "

GFG is great platform to learn

" if __name__ == "__main__": app.run()