bosbos commited on
Commit
ef4d881
1 Parent(s): 4582326

End of training

Browse files
Files changed (1) hide show
  1. README.md +7 -5
README.md CHANGED
@@ -2,6 +2,8 @@
2
  license: apache-2.0
3
  library_name: peft
4
  tags:
 
 
5
  - generated_from_trainer
6
  base_model: vilsonrodrigues/falcon-7b-instruct-sharded
7
  model-index:
@@ -42,7 +44,7 @@ The following hyperparameters were used during training:
42
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
43
  - lr_scheduler_type: cosine
44
  - lr_scheduler_warmup_ratio: 0.03
45
- - training_steps: 75
46
  - mixed_precision_training: Native AMP
47
 
48
  ### Training results
@@ -51,8 +53,8 @@ The following hyperparameters were used during training:
51
 
52
  ### Framework versions
53
 
54
- - PEFT 0.7.1
55
- - Transformers 4.36.1
56
- - Pytorch 2.1.2
57
- - Datasets 2.1.0
58
  - Tokenizers 0.15.2
 
2
  license: apache-2.0
3
  library_name: peft
4
  tags:
5
+ - trl
6
+ - sft
7
  - generated_from_trainer
8
  base_model: vilsonrodrigues/falcon-7b-instruct-sharded
9
  model-index:
 
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
  - lr_scheduler_type: cosine
46
  - lr_scheduler_warmup_ratio: 0.03
47
+ - training_steps: 40
48
  - mixed_precision_training: Native AMP
49
 
50
  ### Training results
 
53
 
54
  ### Framework versions
55
 
56
+ - PEFT 0.10.1.dev0
57
+ - Transformers 4.39.3
58
+ - Pytorch 2.2.1+cu121
59
+ - Datasets 2.18.0
60
  - Tokenizers 0.15.2