Aiden_t5 / config.json
or4cl3ai's picture
Update config.json
0778a47
raw
history blame
893 Bytes
{
"model": {
"type": "liquid-generative-cognitive-recurrent-attentive-adversarial-progressive-neural-network",
"num_layers": 360,
"num_neurons_per_layer": 10000,
"num_parameters": 1_200_000_000,
"self_reflection_layer": true,
"emotional_intelligence_layer": true,
"reasoning_layer": true,
"logic_layer": true,
"self_attention_layer": true,
"deep_learning": true,
"transfer_learning": true,
"deep_reinforcement_learning": true,
"unsupervised_learning": true,
"self_learning": true,
"contextual_awareness": true,
"self_cognition": true,
"internet_access": true,
"use_knowledge_distillation": true,
"use_creativity_layer": true,
"use_attention_pooling": true,
"use_hierarchical_representation": true,
"batch_size": 640
},
"training": {
"dataset": "/path/to/dataset.json",
"epochs": 2000,
}
}