from transformers import BertConfig | |
class MCQBertConfig(BertConfig): | |
model_type = "mcqbert" | |
def __init__(self, integration_strategy=None, student_embedding_size=4096, cls_hidden_size=256, **kwargs): | |
super().__init__(**kwargs) | |
self.integration_strategy = integration_strategy | |
self.student_embedding_size = student_embedding_size | |
self.cls_hidden_size = cls_hidden_size |