Update xlora_config.json
Browse files- xlora_config.json +28 -1
xlora_config.json
CHANGED
@@ -1 +1,28 @@
|
|
1 |
-
{
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"base_model_id": "HuggingFaceH4/zephyr-7b-beta",
|
3 |
+
"adapters": {
|
4 |
+
"adapter_1": "lamm-mit/x-lora/X-LoRA_adapters/1/",
|
5 |
+
"adapter_2": "lamm-mit/x-lora/X-LoRA_adapters/2/",
|
6 |
+
"adapter_3": "lamm-mit/x-lora/X-LoRA_adapters/3/",
|
7 |
+
"adapter_4": "lamm-mit/x-lora/X-LoRA_adapters/4/",
|
8 |
+
"adapter_5": "lamm-mit/x-lora/X-LoRA_adapters/5/",
|
9 |
+
"adapter_6": "lamm-mit/x-lora/X-LoRA_adapters/6/",
|
10 |
+
"adapter_7": "lamm-mit/x-lora/X-LoRA_adapters/7/",
|
11 |
+
"adapter_8": "lamm-mit/x-lora/X-LoRA_adapters/8/",
|
12 |
+
"adapter_9": "lamm-mit/x-lora/X-LoRA_adapters/9/"
|
13 |
+
},
|
14 |
+
"hidden_size": 4096,
|
15 |
+
"enable_softmax": true,
|
16 |
+
"enable_softmax_topk": false,
|
17 |
+
"layerwise_scalings": true,
|
18 |
+
"xlora_depth": 1,
|
19 |
+
"xlora_size": 2048,
|
20 |
+
"enable_relu_and_dropout": true,
|
21 |
+
"use_bias": true,
|
22 |
+
"xlora_dropout_p": 0.2,
|
23 |
+
"stop_token_id": null,
|
24 |
+
"use_trainable_adapters": false,
|
25 |
+
"softmax_temperature": 1,
|
26 |
+
"top_k_lora": null,
|
27 |
+
"scaling_pass_value": 0
|
28 |
+
}
|