ojisetyawan commited on
Commit
7c31149
1 Parent(s): 290f123

Training in progress, epoch 3

Browse files
config.json ADDED
@@ -0,0 +1,223 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "tarteel-ai/whisper-base-ar-quran",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu",
5
+ "apply_spec_augment": false,
6
+ "architectures": [
7
+ "WhisperForAudioClassification"
8
+ ],
9
+ "attention_dropout": 0.0,
10
+ "begin_suppress_tokens": [
11
+ 220,
12
+ 50257
13
+ ],
14
+ "bos_token_id": 50257,
15
+ "classifier_proj_size": 256,
16
+ "d_model": 512,
17
+ "decoder_attention_heads": 8,
18
+ "decoder_ffn_dim": 2048,
19
+ "decoder_layerdrop": 0.0,
20
+ "decoder_layers": 6,
21
+ "decoder_start_token_id": 50258,
22
+ "dropout": 0.0,
23
+ "encoder_attention_heads": 8,
24
+ "encoder_ffn_dim": 2048,
25
+ "encoder_layerdrop": 0.0,
26
+ "encoder_layers": 6,
27
+ "eos_token_id": 50257,
28
+ "forced_decoder_ids": null,
29
+ "id2label": {
30
+ "0": "01. alif_fathah",
31
+ "1": "02. alif_kasroh",
32
+ "2": "03. alif_dommah",
33
+ "3": "04. ba_fathah",
34
+ "4": "05. ba_kasroh",
35
+ "5": "06. ba_dommah",
36
+ "6": "07. ta_fathah",
37
+ "7": "08. ta_kasroh",
38
+ "8": "09. ta_dommah",
39
+ "9": "10. tsa_fathah",
40
+ "10": "11. tsa_kasroh",
41
+ "11": "12. tsa_dommah",
42
+ "12": "13. jim_fathah",
43
+ "13": "14. jim_kasroh",
44
+ "14": "15. jim_dommah",
45
+ "15": "16. hah_fathah",
46
+ "16": "17. hah_kasroh",
47
+ "17": "18. hah_dommah",
48
+ "18": "19. kha_fathah",
49
+ "19": "20. kha_kasroh",
50
+ "20": "21. kha_dommah",
51
+ "21": "22. dal_fathah",
52
+ "22": "23. dal_kasroh",
53
+ "23": "24. dal_dommah",
54
+ "24": "25. dzal_fathah",
55
+ "25": "26. dzal_kasroh",
56
+ "26": "27. dzal_dommah",
57
+ "27": "28. ra_fathah",
58
+ "28": "29. ra_kasroh",
59
+ "29": "30. ra_dommah",
60
+ "30": "31. zay_fathah",
61
+ "31": "32. zay_kasroh",
62
+ "32": "33. zay_dommah",
63
+ "33": "34. sin_fathah",
64
+ "34": "35. sin_kasroh",
65
+ "35": "36. sin_dommah",
66
+ "36": "37. shin_fathah",
67
+ "37": "38. shin_kasroh",
68
+ "38": "39. shin_dommah",
69
+ "39": "40. sad_fathah",
70
+ "40": "41. sad_kasroh",
71
+ "41": "42. sad_dommah",
72
+ "42": "43. dad_fathah",
73
+ "43": "44. dad_kasroh",
74
+ "44": "45. dad_dommah",
75
+ "45": "46. tah_fathah",
76
+ "46": "47. tah_kasroh",
77
+ "47": "48. tah_dommah",
78
+ "48": "49. zah_fathah",
79
+ "49": "50. zah_kasroh",
80
+ "50": "51. zah_dommah",
81
+ "51": "52. ain_fathah",
82
+ "52": "53. ain_kasroh",
83
+ "53": "54. ain_dommah",
84
+ "54": "55. ghaiin_fathah",
85
+ "55": "56. ghaiin_kasroh",
86
+ "56": "57. ghaiin_dommah",
87
+ "57": "58. fa_fathah",
88
+ "58": "59. fa_kasroh",
89
+ "59": "60. fa_dommah",
90
+ "60": "61. qaf_fathah",
91
+ "61": "62. qaf_kasroh",
92
+ "62": "63. qaf_dommah",
93
+ "63": "64. kaf_fathah",
94
+ "64": "65. kaf_kasroh",
95
+ "65": "66. kaf_dommah",
96
+ "66": "67. lam_fathah",
97
+ "67": "68. lam_kasroh",
98
+ "68": "69. lam_dommah",
99
+ "69": "70. mim_fathah",
100
+ "70": "71. mim_kasroh",
101
+ "71": "72. mim_dommah",
102
+ "72": "73. nun_fathah",
103
+ "73": "74. nun_kasroh",
104
+ "74": "75. nun_dommah",
105
+ "75": "76. Ha_fathah",
106
+ "76": "77. Ha_kasroh",
107
+ "77": "78. Ha_dommah",
108
+ "78": "79. waw_fathah",
109
+ "79": "80. waw_kasroh",
110
+ "80": "81. waw_dommah",
111
+ "81": "82. ya_fathah",
112
+ "82": "83. ya_kasroh",
113
+ "83": "84. ya_dommah"
114
+ },
115
+ "init_std": 0.02,
116
+ "is_encoder_decoder": true,
117
+ "label2id": {
118
+ "01. alif_fathah": 0,
119
+ "02. alif_kasroh": 1,
120
+ "03. alif_dommah": 2,
121
+ "04. ba_fathah": 3,
122
+ "05. ba_kasroh": 4,
123
+ "06. ba_dommah": 5,
124
+ "07. ta_fathah": 6,
125
+ "08. ta_kasroh": 7,
126
+ "09. ta_dommah": 8,
127
+ "10. tsa_fathah": 9,
128
+ "11. tsa_kasroh": 10,
129
+ "12. tsa_dommah": 11,
130
+ "13. jim_fathah": 12,
131
+ "14. jim_kasroh": 13,
132
+ "15. jim_dommah": 14,
133
+ "16. hah_fathah": 15,
134
+ "17. hah_kasroh": 16,
135
+ "18. hah_dommah": 17,
136
+ "19. kha_fathah": 18,
137
+ "20. kha_kasroh": 19,
138
+ "21. kha_dommah": 20,
139
+ "22. dal_fathah": 21,
140
+ "23. dal_kasroh": 22,
141
+ "24. dal_dommah": 23,
142
+ "25. dzal_fathah": 24,
143
+ "26. dzal_kasroh": 25,
144
+ "27. dzal_dommah": 26,
145
+ "28. ra_fathah": 27,
146
+ "29. ra_kasroh": 28,
147
+ "30. ra_dommah": 29,
148
+ "31. zay_fathah": 30,
149
+ "32. zay_kasroh": 31,
150
+ "33. zay_dommah": 32,
151
+ "34. sin_fathah": 33,
152
+ "35. sin_kasroh": 34,
153
+ "36. sin_dommah": 35,
154
+ "37. shin_fathah": 36,
155
+ "38. shin_kasroh": 37,
156
+ "39. shin_dommah": 38,
157
+ "40. sad_fathah": 39,
158
+ "41. sad_kasroh": 40,
159
+ "42. sad_dommah": 41,
160
+ "43. dad_fathah": 42,
161
+ "44. dad_kasroh": 43,
162
+ "45. dad_dommah": 44,
163
+ "46. tah_fathah": 45,
164
+ "47. tah_kasroh": 46,
165
+ "48. tah_dommah": 47,
166
+ "49. zah_fathah": 48,
167
+ "50. zah_kasroh": 49,
168
+ "51. zah_dommah": 50,
169
+ "52. ain_fathah": 51,
170
+ "53. ain_kasroh": 52,
171
+ "54. ain_dommah": 53,
172
+ "55. ghaiin_fathah": 54,
173
+ "56. ghaiin_kasroh": 55,
174
+ "57. ghaiin_dommah": 56,
175
+ "58. fa_fathah": 57,
176
+ "59. fa_kasroh": 58,
177
+ "60. fa_dommah": 59,
178
+ "61. qaf_fathah": 60,
179
+ "62. qaf_kasroh": 61,
180
+ "63. qaf_dommah": 62,
181
+ "64. kaf_fathah": 63,
182
+ "65. kaf_kasroh": 64,
183
+ "66. kaf_dommah": 65,
184
+ "67. lam_fathah": 66,
185
+ "68. lam_kasroh": 67,
186
+ "69. lam_dommah": 68,
187
+ "70. mim_fathah": 69,
188
+ "71. mim_kasroh": 70,
189
+ "72. mim_dommah": 71,
190
+ "73. nun_fathah": 72,
191
+ "74. nun_kasroh": 73,
192
+ "75. nun_dommah": 74,
193
+ "76. Ha_fathah": 75,
194
+ "77. Ha_kasroh": 76,
195
+ "78. Ha_dommah": 77,
196
+ "79. waw_fathah": 78,
197
+ "80. waw_kasroh": 79,
198
+ "81. waw_dommah": 80,
199
+ "82. ya_fathah": 81,
200
+ "83. ya_kasroh": 82,
201
+ "84. ya_dommah": 83
202
+ },
203
+ "mask_feature_length": 10,
204
+ "mask_feature_min_masks": 0,
205
+ "mask_feature_prob": 0.0,
206
+ "mask_time_length": 10,
207
+ "mask_time_min_masks": 2,
208
+ "mask_time_prob": 0.05,
209
+ "max_length": 1024,
210
+ "max_source_positions": 1500,
211
+ "max_target_positions": 448,
212
+ "median_filter_width": 7,
213
+ "model_type": "whisper",
214
+ "num_hidden_layers": 6,
215
+ "num_mel_bins": 80,
216
+ "pad_token_id": 50257,
217
+ "scale_embedding": false,
218
+ "torch_dtype": "float32",
219
+ "transformers_version": "4.46.2",
220
+ "use_cache": false,
221
+ "use_weighted_layer_sum": false,
222
+ "vocab_size": 51865
223
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e90fde2fa09ae30a7aefb852773ccb2af009a7c378fb1c54a12b6ace02304779
3
+ size 82984576
preprocessor_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunk_length": 30,
3
+ "feature_extractor_type": "WhisperFeatureExtractor",
4
+ "feature_size": 80,
5
+ "hop_length": 160,
6
+ "n_fft": 400,
7
+ "n_samples": 480000,
8
+ "nb_max_frames": 3000,
9
+ "padding_side": "right",
10
+ "padding_value": 0.0,
11
+ "processor_class": "WhisperProcessor",
12
+ "return_attention_mask": false,
13
+ "sampling_rate": 16000
14
+ }
runs/Dec02_22-50-35_7f2944423b7c/events.out.tfevents.1733179838.7f2944423b7c.321.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc92abf96de98f4bdb0df0074ecccf399016ef160538d1a40c5563c4a93e6e22
3
+ size 180419
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fde6556d113284f5f39ae01598b4ff7e15b293fb94119b2dc749b62565ed3c19
3
+ size 5304