|
from transformers import PretrainedConfig |
|
|
|
class BilmaConfig(PretrainedConfig): |
|
model_type = "bilma" |
|
|
|
def __init__( |
|
self, |
|
weights="MX", |
|
include_top = True, |
|
include_head = None, |
|
num_attention_heads: int = 4, |
|
num_hidden_layers: int = 2, |
|
seq_max_length: int = 280, |
|
hidden_size: int = 512, |
|
vocab_size: int = 29025, |
|
hidden_dropout_prob: float = 0.1, |
|
**kwargs, |
|
): |
|
countries = ["MX"] |
|
if weights not in countries: |
|
raise ValueError(f"`weights` must be one of {countries}, got {weights}.") |
|
if include_head is not None and include_top == True: |
|
raise ValueError(f"To include a head, 'include_top' must be False") |
|
if weights is not None: |
|
self.weights = weights |
|
self.include_top = include_top |
|
self.include_head = include_head |
|
self.num_attention_heads = 4 |
|
self.num_hidden_layers = 2 |
|
self.seq_max_length = 280 |
|
self.hidden_size = 512 |
|
self.vocab_size = 29025 |
|
self.hidden_dropout_prob = 0.1 |
|
super().__init__(**kwargs) |
|
return |
|
|
|
self.weights = weights |
|
self.include_top = include_top |
|
self.include_head = include_head |
|
self.num_attention_heads = num_attention_heads |
|
self.num_hidden_layers = num_hidden_layers |
|
self.seq_max_length = seq_max_length |
|
self.hidden_size = hidden_size |
|
self.vocab_size = vocab_size |
|
self.hidden_dropout_prob = hidden_dropout_prob |
|
super().__init__(**kwargs) |