File size: 2,352 Bytes
7762514
 
 
 
 
 
f797743
 
7762514
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
from transformers import BertModel
import torch

from .configuration_mcqbert import MCQBertConfig

class MCQStudentBert(BertModel):
    config_class = MCQBertConfig
    
    def __init__(self, config: MCQBertConfig):
        super().__init__(config)

        if config.integration_strategy is not None:
            self.student_embedding_layer = torch.nn.Linear(config.student_embedding_size, config.hidden_size)
        
        cls_input_dim_multiplier = 2 if config.integration_strategy == "cat" else 1
        cls_input_dim = self.config.hidden_size * cls_input_dim_multiplier
        
        self.classifier = torch.nn.Sequential(
            torch.nn.Linear(cls_input_dim, config.cls_hidden_size),
            torch.nn.ReLU(),
            torch.nn.Linear(config.cls_hidden_size, 1)
        )
        
    def forward(self, input_ids, student_embeddings=None):
        if self.config.integration_strategy is None:
            # don't consider embeddings is no integration strategy (MCQBert)
            student_embeddings = torch.zeros(self.config.student_embedding_layer)

            input_embeddings = self.embeddings(input_ids)
            combined_embeddings = input_embeddings + self.student_embedding_layer(student_embeddings).unsqueeze(1).repeat(1, input_embeddings.size(1), 1)
            output = super().forward(inputs_embeds = combined_embeddings)
            return self.classifier(output.last_hidden_state[:, 0, :])

        elif self.config.integration_strategy == "cat":
            # MCQStudentBertCat
            output = super().forward(input_ids)
            output_with_student_embedding = torch.cat((output.last_hidden_state[:, 0, :], self.student_embedding_layer(student_embeddings)), dim = 1)
            return self.classifier(output_with_student_embedding)

        elif self.config.integration_strategy == "sum":
            # MCQStudentBertSum
            input_embeddings = self.embeddings(input_ids)
            combined_embeddings = input_embeddings + self.student_embedding_layer(student_embeddings).unsqueeze(1).repeat(1, input_embeddings.size(1), 1)
            output = super().forward(inputs_embeds = combined_embeddings)
            return self.classifier(output.last_hidden_state[:, 0, :])

        else:
            raise ValueError(f"{self.config.integration_strategy} is not a known integration_strategy")