|
{ |
|
"_name_or_path": "distilbert-base-uncased", |
|
"activation": "gelu", |
|
"architectures": [ |
|
"DistilBertForTokenClassification" |
|
], |
|
"attention_dropout": 0.1, |
|
"dim": 768, |
|
"dropout": 0.1, |
|
"hidden_dim": 3072, |
|
"id2label": { |
|
"0": "B-O", |
|
"1": "B-AC", |
|
"2": "I-AC", |
|
"3": "B-LF", |
|
"4": "I-LF" |
|
}, |
|
"initializer_range": 0.02, |
|
"label2id": { |
|
"B-O": 0, |
|
"B-AC": 1, |
|
"I-AC": 2, |
|
"B-LF": 3, |
|
"I-LF": 4 |
|
}, |
|
"max_position_embeddings": 512, |
|
"model_type": "distilbert", |
|
"n_heads": 12, |
|
"n_layers": 6, |
|
"pad_token_id": 0, |
|
"qa_dropout": 0.1, |
|
"seq_classif_dropout": 0.2, |
|
"sinusoidal_pos_embds": false, |
|
"tie_weights_": true, |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.17.0", |
|
"vocab_size": 30522 |
|
} |