x-lora / xlora_config.json
mjbuehler's picture
Update xlora_config.json
764f6a1 verified
raw
history blame
No virus
909 Bytes
{
"base_model_id": "HuggingFaceH4/zephyr-7b-beta",
"adapters": {
"adapter_1": "lamm-mit/x-lora/adapter_1",
"adapter_2": "lamm-mit/x-lora/adapter_2/",
"adapter_3": "lamm-mit/x-lora/adapter_3/",
"adapter_4": "lamm-mit/x-lora/adapter_4/",
"adapter_5": "lamm-mit/x-lora/adapter_5/",
"adapter_6": "lamm-mit/x-lora/adapter_6/",
"adapter_7": "lamm-mit/x-lora/adapter_7/",
"adapter_8": "lamm-mit/x-lora/adapter_8/",
"adapter_9": "lamm-mit/x-lora/adapter_9/"
},
"hidden_size": 4096,
"enable_softmax": true,
"enable_softmax_topk": false,
"layerwise_scalings": true,
"xlora_depth": 1,
"xlora_size": 2048,
"enable_relu_and_dropout": true,
"use_bias": true,
"xlora_dropout_p": 0.2,
"use_trainable_adapters": false,
"softmax_temperature": 1,
"top_k_lora": null,
"scaling_pass_value": 0
}