{ "_name_or_path": "xlm-roberta-base", "architectures": [ "RobertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "B\u1ea5t \u0111\u1ed9ng s\u1ea3n", "1": "Du l\u1ecbch", "2": "Gi\u00e1o d\u1ee5c", "3": "Gi\u1ea3i tr\u00ed", "4": "Kinh doanh", "5": "Ph\u00e1p lu\u1eadt", "6": "S\u1ee9c kh\u1ecfe", "7": "Th\u1ebf gi\u1edbi", "8": "Th\u1ec3 thao", "9": "Th\u1eddi s\u1ef1", "10": "Xe", "11": "\u0110\u1eddi s\u1ed1ng" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B\u1ea5t \u0111\u1ed9ng s\u1ea3n": 0, "Du l\u1ecbch": 1, "Gi\u00e1o d\u1ee5c": 2, "Gi\u1ea3i tr\u00ed": 3, "Kinh doanh": 4, "Ph\u00e1p lu\u1eadt": 5, "S\u1ee9c kh\u1ecfe": 6, "Th\u1ebf gi\u1edbi": 7, "Th\u1ec3 thao": 8, "Th\u1eddi s\u1ef1": 9, "Xe": 10, "\u0110\u1eddi s\u1ed1ng": 11 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "multi_label_classification", "torch_dtype": "float32", "transformers_version": "4.31.0", "type_vocab_size": 1, "use_cache": true, "vocab_size": 250002 }