mjbuehler commited on
Commit
edabc7f
1 Parent(s): d174ca8

Update xlora_config.json

Browse files
Files changed (1) hide show
  1. xlora_config.json +28 -1
xlora_config.json CHANGED
@@ -1 +1,28 @@
1
- {"base_model_id":"HuggingFaceH4/zephyr-7b-beta", "adapters": {"adapter_1": "lamm-mit/x-lora/X-LoRA_adapters/1/", "adapter_2": "lamm-mit/x-lora/X-LoRA_adapters/2/", "adapter_3": "lamm-mit/x-lora/X-LoRA_adapters/3/", "adapter_4": "lamm-mit/x-lora/X-LoRA_adapters/4/", "adapter_5": "lamm-mit/x-lora/X-LoRA_adapters/5/", "adapter_6": "lamm-mit/x-lora/X-LoRA_adapters/6/", "adapter_7": "lamm-mit/x-lora/X-LoRA_adapters/7/", "adapter_8": "lamm-mit/x-lora/X-LoRA_adapters/8/", "adapter_9": "lamm-mit/x-lora/X-LoRA_adapters/9/",}, "hidden_size": 4096, "enable_softmax": true, "enable_softmax_topk": false, "layerwise_scalings": true, "xlora_depth": 1, "xlora_size": 2048, "enable_relu_and_dropout": true, "use_bias": true, "xlora_dropout_p": 0.2, "stop_token_id": null, "use_trainable_adapters": false, "softmax_temperature": 1.0, "top_k_lora": null, "scaling_pass_value": 0.0}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_model_id": "HuggingFaceH4/zephyr-7b-beta",
3
+ "adapters": {
4
+ "adapter_1": "lamm-mit/x-lora/X-LoRA_adapters/1/",
5
+ "adapter_2": "lamm-mit/x-lora/X-LoRA_adapters/2/",
6
+ "adapter_3": "lamm-mit/x-lora/X-LoRA_adapters/3/",
7
+ "adapter_4": "lamm-mit/x-lora/X-LoRA_adapters/4/",
8
+ "adapter_5": "lamm-mit/x-lora/X-LoRA_adapters/5/",
9
+ "adapter_6": "lamm-mit/x-lora/X-LoRA_adapters/6/",
10
+ "adapter_7": "lamm-mit/x-lora/X-LoRA_adapters/7/",
11
+ "adapter_8": "lamm-mit/x-lora/X-LoRA_adapters/8/",
12
+ "adapter_9": "lamm-mit/x-lora/X-LoRA_adapters/9/"
13
+ },
14
+ "hidden_size": 4096,
15
+ "enable_softmax": true,
16
+ "enable_softmax_topk": false,
17
+ "layerwise_scalings": true,
18
+ "xlora_depth": 1,
19
+ "xlora_size": 2048,
20
+ "enable_relu_and_dropout": true,
21
+ "use_bias": true,
22
+ "xlora_dropout_p": 0.2,
23
+ "stop_token_id": null,
24
+ "use_trainable_adapters": false,
25
+ "softmax_temperature": 1,
26
+ "top_k_lora": null,
27
+ "scaling_pass_value": 0
28
+ }