vibert-base-cased-ed / config.json
nc33's picture
End of training
d533c00 verified
raw
history blame
2.42 kB
{
"_name_or_path": "FPTAI/vibert-base-cased",
"_num_labels": 2,
"architectures": [
"BertForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"directionality": "bidi",
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "Appeal",
"1": "Merge-org",
"2": "Sue",
"3": "O",
"4": "End-position",
"5": "Start-position",
"6": "Transfer-ownership",
"7": "Sentence",
"8": "Transfer-money",
"9": "Trial-hearing",
"10": "Die",
"11": "Injure",
"12": "Release-parole",
"13": "Divorce",
"14": "Marry",
"15": "Pardon",
"16": "Meet",
"17": "Convict",
"18": "Demonstrate",
"19": "Start-org",
"20": "Extradite",
"21": "Fine",
"22": "Execute",
"23": "Phone-write",
"24": "Arrest-jail",
"25": "End-org",
"26": "Elect",
"27": "Declare-bankruptcy",
"28": "Nominate",
"29": "Attack",
"30": "Be-born",
"31": "Arquit",
"32": "Transport",
"33": "Charge-indict"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"Appeal": 0,
"Arquit": 31,
"Arrest-jail": 24,
"Attack": 29,
"Be-born": 30,
"Charge-indict": 33,
"Convict": 17,
"Declare-bankruptcy": 27,
"Demonstrate": 18,
"Die": 10,
"Divorce": 13,
"Elect": 26,
"End-org": 25,
"End-position": 4,
"Execute": 22,
"Extradite": 20,
"Fine": 21,
"Injure": 11,
"Marry": 14,
"Meet": 16,
"Merge-org": 1,
"Nominate": 28,
"O": 3,
"Pardon": 15,
"Phone-write": 23,
"Release-parole": 12,
"Sentence": 7,
"Start-org": 19,
"Start-position": 5,
"Sue": 2,
"Transfer-money": 8,
"Transfer-ownership": 6,
"Transport": 32,
"Trial-hearing": 9
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"output_past": true,
"pad_token_id": 0,
"pooler_fc_size": 768,
"pooler_num_attention_heads": 12,
"pooler_num_fc_layers": 3,
"pooler_size_per_head": 128,
"pooler_type": "first_token_transform",
"position_embedding_type": "absolute",
"problem_type": "multi_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.41.2",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 38168
}