ltg
/

PyTorch
English
custom_code
gpt-bert-babylm-base / config.json
davda54's picture
initial upload
7f321a9 verified
raw
history blame
1.04 kB
{
"architectures": [
"LtgbertFoCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_ltgbert.LtgbertConfig",
"AutoModel": "modeling_ltgbert.LtgbertModel",
"AutoModelForCausalLM": "modeling_ltgbert.LtgbertForCausalLM",
"AutoModelForMaskedLM": "modeling_ltgbert.LtgbertForMaskedLM",
"AutoModelForSequenceClassification": "modeling_ltgbert.LtgbertForSequenceClassification",
"AutoModelForTokenClassification": "modeling_ltgbert.LtgbertForTokenClassification",
"AutoModelForQuestionAnswering": "modeling_ltgbert.LtgbertForQuestionAnswering",
"AutoModelForMultipleChoice": "modeling_ltgbert.LtgbertForMultipleChoice"
},
"attention_probs_dropout_prob": 0.1,
"hidden_dropout_prob": 0.1,
"hidden_size": 384,
"intermediate_size": 1024,
"layer_norm_eps": 1e-07,
"max_position_embeddings": 512,
"num_attention_heads": 6,
"num_hidden_layers": 12,
"position_bucket_size": 32,
"torch_dtype": "float32",
"vocab_size": 8192
}