hyk000 commited on
Commit
9ec1a3f
1 Parent(s): 5ea1621

End of training

Browse files
Files changed (3) hide show
  1. README.md +165 -0
  2. generation_config.json +167 -0
  3. model.safetensors +1 -1
README.md ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ language:
4
+ - ko
5
+ license: apache-2.0
6
+ base_model: openai/whisper-base
7
+ tags:
8
+ - hf-asr-leaderboard
9
+ - generated_from_trainer
10
+ datasets:
11
+ - hyk000/gdialect
12
+ model-index:
13
+ - name: gg_mdl
14
+ results: []
15
+ ---
16
+
17
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
18
+ should probably proofread and complete it, then remove this comment. -->
19
+
20
+ # gg_mdl
21
+
22
+ This model is a fine-tuned version of [openai/whisper-base](https://huggingface.co/openai/whisper-base) on the gg_ds dataset.
23
+ It achieves the following results on the evaluation set:
24
+ - Loss: 1.7053
25
+ - Cer: 26.9304
26
+
27
+ ## Model description
28
+
29
+ More information needed
30
+
31
+ ## Intended uses & limitations
32
+
33
+ More information needed
34
+
35
+ ## Training and evaluation data
36
+
37
+ More information needed
38
+
39
+ ## Training procedure
40
+
41
+ ### Training hyperparameters
42
+
43
+ The following hyperparameters were used during training:
44
+ - learning_rate: 1e-05
45
+ - train_batch_size: 4
46
+ - eval_batch_size: 4
47
+ - seed: 42
48
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
49
+ - lr_scheduler_type: linear
50
+ - lr_scheduler_warmup_steps: 500
51
+ - training_steps: 100000
52
+ - mixed_precision_training: Native AMP
53
+
54
+ ### Training results
55
+
56
+ | Training Loss | Epoch | Step | Validation Loss | Cer |
57
+ |:-------------:|:-----:|:------:|:---------------:|:-------:|
58
+ | 0.2863 | 0.8 | 1000 | 0.9834 | 34.5643 |
59
+ | 0.3725 | 1.6 | 2000 | 0.9299 | 36.7432 |
60
+ | 0.3335 | 2.4 | 3000 | 0.9437 | 32.7097 |
61
+ | 0.1498 | 3.2 | 4000 | 0.9722 | 26.1319 |
62
+ | 0.2081 | 4.0 | 5000 | 0.9881 | 31.9533 |
63
+ | 0.213 | 4.8 | 6000 | 1.0197 | 30.8832 |
64
+ | 0.094 | 5.6 | 7000 | 1.0486 | 29.5707 |
65
+ | 0.0637 | 6.4 | 8000 | 1.0741 | 26.9211 |
66
+ | 0.0518 | 7.2 | 9000 | 1.0964 | 30.4382 |
67
+ | 0.0512 | 8.0 | 10000 | 1.1179 | 26.4199 |
68
+ | 0.0288 | 8.8 | 11000 | 1.1420 | 27.2981 |
69
+ | 0.0274 | 9.6 | 12000 | 1.1617 | 26.9620 |
70
+ | 0.0255 | 10.4 | 13000 | 1.1779 | 27.3133 |
71
+ | 0.0215 | 11.2 | 14000 | 1.2062 | 25.8813 |
72
+ | 0.0128 | 12.0 | 15000 | 1.2138 | 25.8497 |
73
+ | 0.013 | 12.8 | 16000 | 1.2354 | 26.8496 |
74
+ | 0.0054 | 13.6 | 17000 | 1.2323 | 27.6025 |
75
+ | 0.0088 | 14.4 | 18000 | 1.2596 | 25.8228 |
76
+ | 0.0031 | 15.2 | 19000 | 1.2807 | 29.0122 |
77
+ | 0.0093 | 16.0 | 20000 | 1.2865 | 25.8907 |
78
+ | 0.0113 | 16.8 | 21000 | 1.2983 | 28.7241 |
79
+ | 0.0051 | 17.6 | 22000 | 1.3118 | 25.8685 |
80
+ | 0.0019 | 18.4 | 23000 | 1.3225 | 26.2256 |
81
+ | 0.0031 | 19.2 | 24000 | 1.3419 | 25.9586 |
82
+ | 0.0096 | 20.0 | 25000 | 1.3516 | 28.7066 |
83
+ | 0.0051 | 20.8 | 26000 | 1.3419 | 25.9937 |
84
+ | 0.0028 | 21.6 | 27000 | 1.3634 | 28.4256 |
85
+ | 0.0019 | 22.4 | 28000 | 1.3659 | 26.7876 |
86
+ | 0.0041 | 23.2 | 29000 | 1.3855 | 25.7631 |
87
+ | 0.005 | 24.0 | 30000 | 1.3848 | 27.5709 |
88
+ | 0.0043 | 24.8 | 31000 | 1.3801 | 27.5252 |
89
+ | 0.0046 | 25.6 | 32000 | 1.3974 | 26.5253 |
90
+ | 0.0017 | 26.4 | 33000 | 1.3992 | 26.9854 |
91
+ | 0.0017 | 27.2 | 34000 | 1.4133 | 26.5405 |
92
+ | 0.0007 | 28.0 | 35000 | 1.4214 | 27.7360 |
93
+ | 0.0009 | 28.8 | 36000 | 1.4275 | 28.0322 |
94
+ | 0.0018 | 29.6 | 37000 | 1.4315 | 26.6939 |
95
+ | 0.0012 | 30.4 | 38000 | 1.4424 | 26.2431 |
96
+ | 0.0007 | 31.2 | 39000 | 1.4498 | 26.0640 |
97
+ | 0.0007 | 32.0 | 40000 | 1.4652 | 27.6891 |
98
+ | 0.001 | 32.8 | 41000 | 1.4652 | 26.2478 |
99
+ | 0.0003 | 33.6 | 42000 | 1.4696 | 26.8297 |
100
+ | 0.0004 | 34.4 | 43000 | 1.4603 | 26.3309 |
101
+ | 0.0004 | 35.2 | 44000 | 1.4692 | 26.9234 |
102
+ | 0.0003 | 36.0 | 45000 | 1.4689 | 26.7981 |
103
+ | 0.001 | 36.8 | 46000 | 1.4907 | 26.5323 |
104
+ | 0.0015 | 37.6 | 47000 | 1.4897 | 26.7817 |
105
+ | 0.0002 | 38.4 | 48000 | 1.4874 | 26.9093 |
106
+ | 0.0003 | 39.2 | 49000 | 1.4884 | 26.8637 |
107
+ | 0.0009 | 40.0 | 50000 | 1.4854 | 26.9386 |
108
+ | 0.001 | 40.8 | 51000 | 1.4978 | 26.8449 |
109
+ | 0.0002 | 41.6 | 52000 | 1.5018 | 27.8132 |
110
+ | 0.0007 | 42.4 | 53000 | 1.5129 | 27.7219 |
111
+ | 0.0002 | 43.2 | 54000 | 1.5252 | 27.9010 |
112
+ | 0.0024 | 44.0 | 55000 | 1.5070 | 25.5617 |
113
+ | 0.0007 | 44.8 | 56000 | 1.5149 | 27.3964 |
114
+ | 0.0025 | 45.6 | 57000 | 1.5287 | 25.9973 |
115
+ | 0.0004 | 46.4 | 58000 | 1.5313 | 27.6294 |
116
+ | 0.0001 | 47.2 | 59000 | 1.5313 | 26.6799 |
117
+ | 0.0005 | 48.0 | 60000 | 1.5478 | 27.5381 |
118
+ | 0.0003 | 48.8 | 61000 | 1.5353 | 27.3402 |
119
+ | 0.0001 | 49.6 | 62000 | 1.5550 | 25.4680 |
120
+ | 0.0001 | 50.4 | 63000 | 1.5463 | 25.9656 |
121
+ | 0.0001 | 51.2 | 64000 | 1.5609 | 26.2935 |
122
+ | 0.0001 | 52.0 | 65000 | 1.5556 | 25.8509 |
123
+ | 0.0012 | 52.8 | 66000 | 1.5704 | 26.3110 |
124
+ | 0.0007 | 53.6 | 67000 | 1.5673 | 26.3087 |
125
+ | 0.0003 | 54.4 | 68000 | 1.5767 | 26.2396 |
126
+ | 0.0 | 55.2 | 69000 | 1.5727 | 26.2139 |
127
+ | 0.0001 | 56.0 | 70000 | 1.5723 | 27.4116 |
128
+ | 0.0001 | 56.8 | 71000 | 1.5863 | 26.9082 |
129
+ | 0.0004 | 57.6 | 72000 | 1.5943 | 26.4949 |
130
+ | 0.0006 | 58.4 | 73000 | 1.5944 | 26.6330 |
131
+ | 0.0001 | 59.2 | 74000 | 1.5860 | 28.3659 |
132
+ | 0.0 | 60.0 | 75000 | 1.5973 | 26.7759 |
133
+ | 0.0 | 60.8 | 76000 | 1.6017 | 27.2278 |
134
+ | 0.0 | 61.6 | 77000 | 1.6070 | 26.2619 |
135
+ | 0.0 | 62.4 | 78000 | 1.6092 | 27.0030 |
136
+ | 0.0 | 63.2 | 79000 | 1.6108 | 26.6576 |
137
+ | 0.0 | 64.0 | 80000 | 1.6146 | 25.9387 |
138
+ | 0.0 | 64.8 | 81000 | 1.6202 | 25.7291 |
139
+ | 0.0 | 65.6 | 82000 | 1.6215 | 27.0042 |
140
+ | 0.0 | 66.4 | 83000 | 1.6256 | 27.1915 |
141
+ | 0.0 | 67.2 | 84000 | 1.6330 | 26.7677 |
142
+ | 0.0 | 68.0 | 85000 | 1.6279 | 26.5803 |
143
+ | 0.0 | 68.8 | 86000 | 1.6343 | 26.8625 |
144
+ | 0.0 | 69.6 | 87000 | 1.6417 | 26.1296 |
145
+ | 0.0 | 70.4 | 88000 | 1.6505 | 26.5874 |
146
+ | 0.0 | 71.2 | 89000 | 1.6558 | 26.0640 |
147
+ | 0.0 | 72.0 | 90000 | 1.6602 | 25.9469 |
148
+ | 0.0 | 72.8 | 91000 | 1.6662 | 26.2338 |
149
+ | 0.0 | 73.6 | 92000 | 1.6719 | 26.1460 |
150
+ | 0.0 | 74.4 | 93000 | 1.6783 | 26.6576 |
151
+ | 0.0 | 75.2 | 94000 | 1.6836 | 26.3099 |
152
+ | 0.0 | 76.0 | 95000 | 1.6891 | 26.4984 |
153
+ | 0.0 | 76.8 | 96000 | 1.6946 | 26.4328 |
154
+ | 0.0 | 77.6 | 97000 | 1.6988 | 26.7056 |
155
+ | 0.0 | 78.4 | 98000 | 1.7023 | 26.6049 |
156
+ | 0.0 | 79.2 | 99000 | 1.7046 | 27.1821 |
157
+ | 0.0 | 80.0 | 100000 | 1.7053 | 26.9304 |
158
+
159
+
160
+ ### Framework versions
161
+
162
+ - Transformers 4.47.0.dev0
163
+ - Pytorch 2.4.1
164
+ - Datasets 3.1.0
165
+ - Tokenizers 0.20.1
generation_config.json ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alignment_heads": [
3
+ [
4
+ 3,
5
+ 1
6
+ ],
7
+ [
8
+ 4,
9
+ 2
10
+ ],
11
+ [
12
+ 4,
13
+ 3
14
+ ],
15
+ [
16
+ 4,
17
+ 7
18
+ ],
19
+ [
20
+ 5,
21
+ 1
22
+ ],
23
+ [
24
+ 5,
25
+ 2
26
+ ],
27
+ [
28
+ 5,
29
+ 4
30
+ ],
31
+ [
32
+ 5,
33
+ 6
34
+ ]
35
+ ],
36
+ "begin_suppress_tokens": [
37
+ 220,
38
+ 50257
39
+ ],
40
+ "bos_token_id": 50257,
41
+ "decoder_start_token_id": 50258,
42
+ "eos_token_id": 50257,
43
+ "forced_decoder_ids": [
44
+ [
45
+ 1,
46
+ null
47
+ ],
48
+ [
49
+ 2,
50
+ 50359
51
+ ]
52
+ ],
53
+ "is_multilingual": true,
54
+ "lang_to_id": {
55
+ "<|af|>": 50327,
56
+ "<|am|>": 50334,
57
+ "<|ar|>": 50272,
58
+ "<|as|>": 50350,
59
+ "<|az|>": 50304,
60
+ "<|ba|>": 50355,
61
+ "<|be|>": 50330,
62
+ "<|bg|>": 50292,
63
+ "<|bn|>": 50302,
64
+ "<|bo|>": 50347,
65
+ "<|br|>": 50309,
66
+ "<|bs|>": 50315,
67
+ "<|ca|>": 50270,
68
+ "<|cs|>": 50283,
69
+ "<|cy|>": 50297,
70
+ "<|da|>": 50285,
71
+ "<|de|>": 50261,
72
+ "<|el|>": 50281,
73
+ "<|en|>": 50259,
74
+ "<|es|>": 50262,
75
+ "<|et|>": 50307,
76
+ "<|eu|>": 50310,
77
+ "<|fa|>": 50300,
78
+ "<|fi|>": 50277,
79
+ "<|fo|>": 50338,
80
+ "<|fr|>": 50265,
81
+ "<|gl|>": 50319,
82
+ "<|gu|>": 50333,
83
+ "<|haw|>": 50352,
84
+ "<|ha|>": 50354,
85
+ "<|he|>": 50279,
86
+ "<|hi|>": 50276,
87
+ "<|hr|>": 50291,
88
+ "<|ht|>": 50339,
89
+ "<|hu|>": 50286,
90
+ "<|hy|>": 50312,
91
+ "<|id|>": 50275,
92
+ "<|is|>": 50311,
93
+ "<|it|>": 50274,
94
+ "<|ja|>": 50266,
95
+ "<|jw|>": 50356,
96
+ "<|ka|>": 50329,
97
+ "<|kk|>": 50316,
98
+ "<|km|>": 50323,
99
+ "<|kn|>": 50306,
100
+ "<|ko|>": 50264,
101
+ "<|la|>": 50294,
102
+ "<|lb|>": 50345,
103
+ "<|ln|>": 50353,
104
+ "<|lo|>": 50336,
105
+ "<|lt|>": 50293,
106
+ "<|lv|>": 50301,
107
+ "<|mg|>": 50349,
108
+ "<|mi|>": 50295,
109
+ "<|mk|>": 50308,
110
+ "<|ml|>": 50296,
111
+ "<|mn|>": 50314,
112
+ "<|mr|>": 50320,
113
+ "<|ms|>": 50282,
114
+ "<|mt|>": 50343,
115
+ "<|my|>": 50346,
116
+ "<|ne|>": 50313,
117
+ "<|nl|>": 50271,
118
+ "<|nn|>": 50342,
119
+ "<|no|>": 50288,
120
+ "<|oc|>": 50328,
121
+ "<|pa|>": 50321,
122
+ "<|pl|>": 50269,
123
+ "<|ps|>": 50340,
124
+ "<|pt|>": 50267,
125
+ "<|ro|>": 50284,
126
+ "<|ru|>": 50263,
127
+ "<|sa|>": 50344,
128
+ "<|sd|>": 50332,
129
+ "<|si|>": 50322,
130
+ "<|sk|>": 50298,
131
+ "<|sl|>": 50305,
132
+ "<|sn|>": 50324,
133
+ "<|so|>": 50326,
134
+ "<|sq|>": 50317,
135
+ "<|sr|>": 50303,
136
+ "<|su|>": 50357,
137
+ "<|sv|>": 50273,
138
+ "<|sw|>": 50318,
139
+ "<|ta|>": 50287,
140
+ "<|te|>": 50299,
141
+ "<|tg|>": 50331,
142
+ "<|th|>": 50289,
143
+ "<|tk|>": 50341,
144
+ "<|tl|>": 50348,
145
+ "<|tr|>": 50268,
146
+ "<|tt|>": 50351,
147
+ "<|uk|>": 50280,
148
+ "<|ur|>": 50290,
149
+ "<|uz|>": 50337,
150
+ "<|vi|>": 50278,
151
+ "<|yi|>": 50335,
152
+ "<|yo|>": 50325,
153
+ "<|zh|>": 50260
154
+ },
155
+ "max_initial_timestamp_index": 50,
156
+ "max_length": 1024,
157
+ "no_timestamps_token_id": 50363,
158
+ "pad_token_id": 50257,
159
+ "prev_sot_token_id": 50361,
160
+ "return_timestamps": false,
161
+ "suppress_tokens": [],
162
+ "task_to_id": {
163
+ "transcribe": 50359,
164
+ "translate": 50358
165
+ },
166
+ "transformers_version": "4.47.0.dev0"
167
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6909b3d93ad80fd4be9aa575118dff22c8e8269c87869365317da26295de32dc
3
  size 290403936
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9f85b16d2c7c321c54e0047e3c7484ce750a1d23d9ce7f750847d2d53b9b251
3
  size 290403936