SabiYarn-125M / config.json
BeardedMonster's picture
Upload GPTJXForCausalLM
2228dd4 verified
raw
history blame contribute delete
470 Bytes
{
"_name_or_path": "/pretrainedmodel",
"architectures": [
"GPTJXForCausalLM"
],
"auto_map": {
"AutoConfig": "pretrained_config.GPTJXConfig",
"AutoModelForCausalLM": "pretrained_model.GPTJXForCausalLM"
},
"bias": false,
"block_size": 1024,
"dropout": 0.0,
"model_type": "nanogpt-j",
"n_embd": 768,
"n_head": 12,
"n_layer": 12,
"torch_dtype": "float32",
"transformers_version": "4.39.3",
"vocab_size": 52050
}