tykiww commited on
Commit
18ea001
·
verified ·
1 Parent(s): 611772e

Update config/config.json

Browse files
Files changed (1) hide show
  1. config/config.json +6 -6
config/config.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
  "model": {
3
  "peft": {
4
- "r": 64, # Number of parameter in adapter layer (64 generalizes well from QLora Paper)
5
- "alpha": 16, # weighting of fine tuning vs base model
6
- "dropout": 0, # Supports any, but = 0 is optimized
7
- "bias": "none", # Supports any, but = "none" is optimized
8
  "seed": 3407,
9
- "rslora": False, # We support rank stabilized LoRA
10
- "loftq_config": None, # And LoftQ
11
  },
12
  "sft": {
13
  "output_dir": "outputs",
 
1
  {
2
  "model": {
3
  "peft": {
4
+ "r": 64,
5
+ "alpha": 16,
6
+ "dropout": 0,
7
+ "bias": "none",
8
  "seed": 3407,
9
+ "rslora": false,
10
+ "loftq_config": null
11
  },
12
  "sft": {
13
  "output_dir": "outputs",