Safetensors
File size: 1,009 Bytes
edabc7f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
{
    "base_model_id": "HuggingFaceH4/zephyr-7b-beta",
    "adapters": {
        "adapter_1": "lamm-mit/x-lora/X-LoRA_adapters/1/",
        "adapter_2": "lamm-mit/x-lora/X-LoRA_adapters/2/",
        "adapter_3": "lamm-mit/x-lora/X-LoRA_adapters/3/",
        "adapter_4": "lamm-mit/x-lora/X-LoRA_adapters/4/",
        "adapter_5": "lamm-mit/x-lora/X-LoRA_adapters/5/",
        "adapter_6": "lamm-mit/x-lora/X-LoRA_adapters/6/",
        "adapter_7": "lamm-mit/x-lora/X-LoRA_adapters/7/",
        "adapter_8": "lamm-mit/x-lora/X-LoRA_adapters/8/",
        "adapter_9": "lamm-mit/x-lora/X-LoRA_adapters/9/"
    },
    "hidden_size": 4096,
    "enable_softmax": true,
    "enable_softmax_topk": false,
    "layerwise_scalings": true,
    "xlora_depth": 1,
    "xlora_size": 2048,
    "enable_relu_and_dropout": true,
    "use_bias": true,
    "xlora_dropout_p": 0.2,
    "stop_token_id": null,
    "use_trainable_adapters": false,
    "softmax_temperature": 1,
    "top_k_lora": null,
    "scaling_pass_value": 0
}