devlocalhost commited on
Commit
f8e88f2
1 Parent(s): dc057e6

End of training

Browse files
Files changed (1) hide show
  1. README.md +9 -6
README.md CHANGED
@@ -1,10 +1,11 @@
1
  ---
2
  license: apache-2.0
3
- base_model: PY007/TinyLlama-1.1B-Chat-v0.3
4
  tags:
5
  - trl
6
  - sft
7
  - generated_from_trainer
 
8
  model-index:
9
  - name: tinyllama-min-primary-secondary-lora
10
  results: []
@@ -42,7 +43,8 @@ The following hyperparameters were used during training:
42
  - total_train_batch_size: 32
43
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
44
  - lr_scheduler_type: cosine
45
- - training_steps: 200
 
46
 
47
  ### Training results
48
 
@@ -50,7 +52,8 @@ The following hyperparameters were used during training:
50
 
51
  ### Framework versions
52
 
53
- - Transformers 4.31.0
54
- - Pytorch 2.0.1+cu117
55
- - Datasets 2.15.0
56
- - Tokenizers 0.13.3
 
 
1
  ---
2
  license: apache-2.0
3
+ library_name: peft
4
  tags:
5
  - trl
6
  - sft
7
  - generated_from_trainer
8
+ base_model: PY007/TinyLlama-1.1B-Chat-v0.3
9
  model-index:
10
  - name: tinyllama-min-primary-secondary-lora
11
  results: []
 
43
  - total_train_batch_size: 32
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
  - lr_scheduler_type: cosine
46
+ - num_epochs: 1
47
+ - mixed_precision_training: Native AMP
48
 
49
  ### Training results
50
 
 
52
 
53
  ### Framework versions
54
 
55
+ - PEFT 0.10.0
56
+ - Transformers 4.39.0
57
+ - Pytorch 2.2.0+cu121
58
+ - Datasets 2.18.0
59
+ - Tokenizers 0.15.2