tykiww commited on
Commit
dc813e2
·
verified ·
1 Parent(s): f58a3a5

Update config/config.json

Browse files
Files changed (1) hide show
  1. config/config.json +10 -1
config/config.json CHANGED
@@ -1,6 +1,15 @@
1
  {
2
  "model": {
3
- "params": {
 
 
 
 
 
 
 
 
 
4
  "output_dir": "outputs",
5
  "num_train_epochs": 3,
6
  "per_device_train_batch_size": 4,
 
1
  {
2
  "model": {
3
+ "peft": {
4
+ "r": 64, # Number of parameter in adapter layer (64 generalizes well from QLora Paper)
5
+ "alpha": 16, # weighting of fine tuning vs base model
6
+ "dropout": 0, # Supports any, but = 0 is optimized
7
+ "bias": "none", # Supports any, but = "none" is optimized
8
+ "seed": 3407,
9
+ "rslora": False, # We support rank stabilized LoRA
10
+ "loftq_config": None, # And LoftQ
11
+ },
12
+ "sft": {
13
  "output_dir": "outputs",
14
  "num_train_epochs": 3,
15
  "per_device_train_batch_size": 4,