|
{ |
|
"_name_or_path": "AutoTrain", |
|
"_num_labels": 31, |
|
"architectures": [ |
|
"DebertaV2ForSequenceClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "\u2600", |
|
"1": "\u2639\ufe0f", |
|
"2": "\u2728", |
|
"3": "\u2764", |
|
"4": "\ud83c\udf84", |
|
"5": "\ud83d\udc95", |
|
"6": "\ud83d\udc99", |
|
"7": "\ud83d\udc9c", |
|
"8": "\ud83d\udca2", |
|
"9": "\ud83d\udcaf", |
|
"10": "\ud83d\udcf7", |
|
"11": "\ud83d\udcf8", |
|
"12": "\ud83d\udd25", |
|
"13": "\ud83d\ude01", |
|
"14": "\ud83d\ude02", |
|
"15": "\ud83d\ude09", |
|
"16": "\ud83d\ude0a", |
|
"17": "\ud83d\ude0d", |
|
"18": "\ud83d\ude0e", |
|
"19": "\ud83d\ude14", |
|
"20": "\ud83d\ude18", |
|
"21": "\ud83d\ude1c", |
|
"22": "\ud83d\ude20", |
|
"23": "\ud83d\ude21", |
|
"24": "\ud83d\ude24", |
|
"25": "\ud83d\ude29", |
|
"26": "\ud83d\ude2d", |
|
"27": "\ud83d\ude33", |
|
"28": "\ud83d\ude43", |
|
"29": "\ud83d\ude44", |
|
"30": "\ud83d\ude48" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"\u2600": 0, |
|
"\u2639\ufe0f": 1, |
|
"\u2728": 2, |
|
"\u2764": 3, |
|
"\ud83c\udf84": 4, |
|
"\ud83d\udc95": 5, |
|
"\ud83d\udc99": 6, |
|
"\ud83d\udc9c": 7, |
|
"\ud83d\udca2": 8, |
|
"\ud83d\udcaf": 9, |
|
"\ud83d\udcf7": 10, |
|
"\ud83d\udcf8": 11, |
|
"\ud83d\udd25": 12, |
|
"\ud83d\ude01": 13, |
|
"\ud83d\ude02": 14, |
|
"\ud83d\ude09": 15, |
|
"\ud83d\ude0a": 16, |
|
"\ud83d\ude0d": 17, |
|
"\ud83d\ude0e": 18, |
|
"\ud83d\ude14": 19, |
|
"\ud83d\ude18": 20, |
|
"\ud83d\ude1c": 21, |
|
"\ud83d\ude20": 22, |
|
"\ud83d\ude21": 23, |
|
"\ud83d\ude24": 24, |
|
"\ud83d\ude29": 25, |
|
"\ud83d\ude2d": 26, |
|
"\ud83d\ude33": 27, |
|
"\ud83d\ude43": 28, |
|
"\ud83d\ude44": 29, |
|
"\ud83d\ude48": 30 |
|
}, |
|
"layer_norm_eps": 1e-07, |
|
"max_length": 64, |
|
"max_position_embeddings": 512, |
|
"max_relative_positions": -1, |
|
"model_type": "deberta-v2", |
|
"norm_rel_ebd": "layer_norm", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"pad_token_id": 0, |
|
"padding": "max_length", |
|
"pooler_dropout": 0, |
|
"pooler_hidden_act": "gelu", |
|
"pooler_hidden_size": 768, |
|
"pos_att_type": [ |
|
"p2c", |
|
"c2p" |
|
], |
|
"position_biased_input": false, |
|
"position_buckets": 256, |
|
"relative_attention": true, |
|
"share_att_key": true, |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.29.2", |
|
"type_vocab_size": 0, |
|
"vocab_size": 128100 |
|
} |
|
|