Hawoly18 commited on
Commit
490094d
1 Parent(s): 70dd6ca

End of training

Browse files
README.md CHANGED
@@ -1,7 +1,7 @@
1
  ---
2
  library_name: transformers
3
  license: mit
4
- base_model: microsoft/speecht5_tts
5
  tags:
6
  - generated_from_trainer
7
  model-index:
@@ -14,9 +14,9 @@ should probably proofread and complete it, then remove this comment. -->
14
 
15
  # speecht5_tts_wolof
16
 
17
- This model is a fine-tuned version of [microsoft/speecht5_tts](https://huggingface.co/microsoft/speecht5_tts) on an unknown dataset.
18
  It achieves the following results on the evaluation set:
19
- - Loss: 0.2993
20
 
21
  ## Model description
22
 
@@ -51,86 +51,86 @@ The following hyperparameters were used during training:
51
 
52
  | Training Loss | Epoch | Step | Validation Loss |
53
  |:-------------:|:-------:|:----:|:---------------:|
54
- | 1.1192 | 0.5952 | 50 | 0.4722 |
55
- | 0.9979 | 1.1905 | 100 | 0.4139 |
56
- | 0.8933 | 1.7857 | 150 | 0.3900 |
57
- | 0.8718 | 2.3810 | 200 | 0.3818 |
58
- | 0.8246 | 2.9762 | 250 | 0.3758 |
59
- | 0.8062 | 3.5714 | 300 | 0.3615 |
60
- | 0.7931 | 4.1667 | 350 | 0.3546 |
61
- | 0.756 | 4.7619 | 400 | 0.3469 |
62
- | 0.7462 | 5.3571 | 450 | 0.3393 |
63
- | 0.7311 | 5.9524 | 500 | 0.3358 |
64
- | 0.7298 | 6.5476 | 550 | 0.3315 |
65
- | 0.7234 | 7.1429 | 600 | 0.3300 |
66
- | 0.7199 | 7.7381 | 650 | 0.3287 |
67
- | 0.697 | 8.3333 | 700 | 0.3250 |
68
- | 0.7006 | 8.9286 | 750 | 0.3231 |
69
- | 0.7081 | 9.5238 | 800 | 0.3218 |
70
- | 0.6998 | 10.1190 | 850 | 0.3196 |
71
- | 0.7074 | 10.7143 | 900 | 0.3202 |
72
- | 0.6831 | 11.3095 | 950 | 0.3161 |
73
- | 0.6899 | 11.9048 | 1000 | 0.3169 |
74
- | 0.6935 | 12.5 | 1050 | 0.3160 |
75
- | 0.6778 | 13.0952 | 1100 | 0.3145 |
76
- | 0.6701 | 13.6905 | 1150 | 0.3122 |
77
- | 0.6792 | 14.2857 | 1200 | 0.3121 |
78
- | 0.6668 | 14.8810 | 1250 | 0.3117 |
79
- | 0.6682 | 15.4762 | 1300 | 0.3120 |
80
- | 0.6742 | 16.0714 | 1350 | 0.3103 |
81
- | 0.6759 | 16.6667 | 1400 | 0.3103 |
82
- | 0.6776 | 17.2619 | 1450 | 0.3100 |
83
- | 0.6699 | 17.8571 | 1500 | 0.3099 |
84
- | 0.6744 | 18.4524 | 1550 | 0.3092 |
85
- | 0.6636 | 19.0476 | 1600 | 0.3083 |
86
- | 0.6552 | 19.6429 | 1650 | 0.3067 |
87
- | 0.6618 | 20.2381 | 1700 | 0.3074 |
88
- | 0.6482 | 20.8333 | 1750 | 0.3059 |
89
- | 0.6684 | 21.4286 | 1800 | 0.3063 |
90
- | 0.6726 | 22.0238 | 1850 | 0.3060 |
91
- | 0.648 | 22.6190 | 1900 | 0.3053 |
92
- | 0.6542 | 23.2143 | 1950 | 0.3043 |
93
- | 0.6516 | 23.8095 | 2000 | 0.3050 |
94
- | 0.6654 | 24.4048 | 2050 | 0.3059 |
95
- | 0.6556 | 25.0 | 2100 | 0.3050 |
96
- | 0.6493 | 25.5952 | 2150 | 0.3051 |
97
- | 0.6504 | 26.1905 | 2200 | 0.3033 |
98
- | 0.6463 | 26.7857 | 2250 | 0.3033 |
99
- | 0.655 | 27.3810 | 2300 | 0.3028 |
100
- | 0.6474 | 27.9762 | 2350 | 0.3030 |
101
- | 0.6434 | 28.5714 | 2400 | 0.3022 |
102
- | 0.6427 | 29.1667 | 2450 | 0.3027 |
103
- | 0.6611 | 29.7619 | 2500 | 0.3030 |
104
- | 0.6536 | 30.3571 | 2550 | 0.3026 |
105
- | 0.6478 | 30.9524 | 2600 | 0.3011 |
106
- | 0.6471 | 31.5476 | 2650 | 0.3021 |
107
- | 0.6424 | 32.1429 | 2700 | 0.3014 |
108
- | 0.6424 | 32.7381 | 2750 | 0.3012 |
109
- | 0.645 | 33.3333 | 2800 | 0.3010 |
110
- | 0.6454 | 33.9286 | 2850 | 0.3010 |
111
- | 0.6373 | 34.5238 | 2900 | 0.3006 |
112
- | 0.6409 | 35.1190 | 2950 | 0.3005 |
113
- | 0.6382 | 35.7143 | 3000 | 0.3007 |
114
- | 0.6377 | 36.3095 | 3050 | 0.3005 |
115
- | 0.643 | 36.9048 | 3100 | 0.3007 |
116
- | 0.6383 | 37.5 | 3150 | 0.2999 |
117
- | 0.6396 | 38.0952 | 3200 | 0.2998 |
118
- | 0.6413 | 38.6905 | 3250 | 0.3006 |
119
- | 0.6368 | 39.2857 | 3300 | 0.2998 |
120
- | 0.6452 | 39.8810 | 3350 | 0.3006 |
121
- | 0.6425 | 40.4762 | 3400 | 0.3000 |
122
- | 0.6406 | 41.0714 | 3450 | 0.3001 |
123
- | 0.657 | 41.6667 | 3500 | 0.2996 |
124
- | 0.6353 | 42.2619 | 3550 | 0.2998 |
125
- | 0.6369 | 42.8571 | 3600 | 0.2999 |
126
- | 0.6314 | 43.4524 | 3650 | 0.2997 |
127
- | 0.634 | 44.0476 | 3700 | 0.2992 |
128
- | 0.6506 | 44.6429 | 3750 | 0.3010 |
129
- | 0.63 | 45.2381 | 3800 | 0.2993 |
130
- | 0.6395 | 45.8333 | 3850 | 0.2997 |
131
- | 0.6393 | 46.4286 | 3900 | 0.2983 |
132
- | 0.6344 | 47.0238 | 3950 | 0.2998 |
133
- | 0.6432 | 47.6190 | 4000 | 0.2993 |
134
 
135
 
136
  ### Framework versions
 
1
  ---
2
  library_name: transformers
3
  license: mit
4
+ base_model: Moustapha91/speecht5_finetuned_wo_v1
5
  tags:
6
  - generated_from_trainer
7
  model-index:
 
14
 
15
  # speecht5_tts_wolof
16
 
17
+ This model is a fine-tuned version of [Moustapha91/speecht5_finetuned_wo_v1](https://huggingface.co/Moustapha91/speecht5_finetuned_wo_v1) on an unknown dataset.
18
  It achieves the following results on the evaluation set:
19
+ - Loss: 0.2943
20
 
21
  ## Model description
22
 
 
51
 
52
  | Training Loss | Epoch | Step | Validation Loss |
53
  |:-------------:|:-------:|:----:|:---------------:|
54
+ | 0.9404 | 0.5952 | 50 | 0.4362 |
55
+ | 0.8342 | 1.1905 | 100 | 0.3784 |
56
+ | 0.7869 | 1.7857 | 150 | 0.3627 |
57
+ | 0.7841 | 2.3810 | 200 | 0.3546 |
58
+ | 0.762 | 2.9762 | 250 | 0.3489 |
59
+ | 0.7487 | 3.5714 | 300 | 0.3431 |
60
+ | 0.7423 | 4.1667 | 350 | 0.3392 |
61
+ | 0.7211 | 4.7619 | 400 | 0.3362 |
62
+ | 0.7147 | 5.3571 | 450 | 0.3304 |
63
+ | 0.7097 | 5.9524 | 500 | 0.3266 |
64
+ | 0.7058 | 6.5476 | 550 | 0.3223 |
65
+ | 0.6929 | 7.1429 | 600 | 0.3198 |
66
+ | 0.6887 | 7.7381 | 650 | 0.3152 |
67
+ | 0.664 | 8.3333 | 700 | 0.3131 |
68
+ | 0.6736 | 8.9286 | 750 | 0.3115 |
69
+ | 0.6767 | 9.5238 | 800 | 0.3105 |
70
+ | 0.6722 | 10.1190 | 850 | 0.3095 |
71
+ | 0.6702 | 10.7143 | 900 | 0.3075 |
72
+ | 0.6615 | 11.3095 | 950 | 0.3058 |
73
+ | 0.6654 | 11.9048 | 1000 | 0.3063 |
74
+ | 0.6682 | 12.5 | 1050 | 0.3083 |
75
+ | 0.6607 | 13.0952 | 1100 | 0.3051 |
76
+ | 0.6514 | 13.6905 | 1150 | 0.3042 |
77
+ | 0.6605 | 14.2857 | 1200 | 0.3041 |
78
+ | 0.6509 | 14.8810 | 1250 | 0.3028 |
79
+ | 0.6556 | 15.4762 | 1300 | 0.3025 |
80
+ | 0.6477 | 16.0714 | 1350 | 0.3019 |
81
+ | 0.6489 | 16.6667 | 1400 | 0.3011 |
82
+ | 0.6567 | 17.2619 | 1450 | 0.3007 |
83
+ | 0.6533 | 17.8571 | 1500 | 0.3016 |
84
+ | 0.6489 | 18.4524 | 1550 | 0.3009 |
85
+ | 0.6454 | 19.0476 | 1600 | 0.3002 |
86
+ | 0.6354 | 19.6429 | 1650 | 0.2992 |
87
+ | 0.645 | 20.2381 | 1700 | 0.2996 |
88
+ | 0.6376 | 20.8333 | 1750 | 0.2993 |
89
+ | 0.6472 | 21.4286 | 1800 | 0.2991 |
90
+ | 0.6571 | 22.0238 | 1850 | 0.2995 |
91
+ | 0.6333 | 22.6190 | 1900 | 0.2986 |
92
+ | 0.6323 | 23.2143 | 1950 | 0.2973 |
93
+ | 0.6314 | 23.8095 | 2000 | 0.2980 |
94
+ | 0.6437 | 24.4048 | 2050 | 0.2980 |
95
+ | 0.6383 | 25.0 | 2100 | 0.2977 |
96
+ | 0.6314 | 25.5952 | 2150 | 0.2978 |
97
+ | 0.6309 | 26.1905 | 2200 | 0.2965 |
98
+ | 0.6365 | 26.7857 | 2250 | 0.2965 |
99
+ | 0.6406 | 27.3810 | 2300 | 0.2966 |
100
+ | 0.6286 | 27.9762 | 2350 | 0.2968 |
101
+ | 0.6279 | 28.5714 | 2400 | 0.2963 |
102
+ | 0.6304 | 29.1667 | 2450 | 0.2967 |
103
+ | 0.6457 | 29.7619 | 2500 | 0.2960 |
104
+ | 0.6372 | 30.3571 | 2550 | 0.2958 |
105
+ | 0.6338 | 30.9524 | 2600 | 0.2952 |
106
+ | 0.6325 | 31.5476 | 2650 | 0.2956 |
107
+ | 0.6313 | 32.1429 | 2700 | 0.2951 |
108
+ | 0.6345 | 32.7381 | 2750 | 0.2956 |
109
+ | 0.6289 | 33.3333 | 2800 | 0.2949 |
110
+ | 0.6264 | 33.9286 | 2850 | 0.2947 |
111
+ | 0.6302 | 34.5238 | 2900 | 0.2952 |
112
+ | 0.6248 | 35.1190 | 2950 | 0.2945 |
113
+ | 0.626 | 35.7143 | 3000 | 0.2945 |
114
+ | 0.6248 | 36.3095 | 3050 | 0.2947 |
115
+ | 0.6306 | 36.9048 | 3100 | 0.2943 |
116
+ | 0.6258 | 37.5 | 3150 | 0.2944 |
117
+ | 0.6318 | 38.0952 | 3200 | 0.2947 |
118
+ | 0.6279 | 38.6905 | 3250 | 0.2947 |
119
+ | 0.628 | 39.2857 | 3300 | 0.2940 |
120
+ | 0.632 | 39.8810 | 3350 | 0.2947 |
121
+ | 0.6259 | 40.4762 | 3400 | 0.2939 |
122
+ | 0.6305 | 41.0714 | 3450 | 0.2943 |
123
+ | 0.6381 | 41.6667 | 3500 | 0.2939 |
124
+ | 0.6341 | 42.2619 | 3550 | 0.2942 |
125
+ | 0.6163 | 42.8571 | 3600 | 0.2937 |
126
+ | 0.6256 | 43.4524 | 3650 | 0.2934 |
127
+ | 0.628 | 44.0476 | 3700 | 0.2934 |
128
+ | 0.6371 | 44.6429 | 3750 | 0.2945 |
129
+ | 0.6209 | 45.2381 | 3800 | 0.2930 |
130
+ | 0.6285 | 45.8333 | 3850 | 0.2939 |
131
+ | 0.6309 | 46.4286 | 3900 | 0.2938 |
132
+ | 0.6216 | 47.0238 | 3950 | 0.2935 |
133
+ | 0.6352 | 47.6190 | 4000 | 0.2943 |
134
 
135
 
136
  ### Framework versions
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "microsoft/speecht5_tts",
3
  "activation_dropout": 0.1,
4
  "apply_spec_augment": true,
5
  "architectures": [
 
1
  {
2
+ "_name_or_path": "Moustapha91/speecht5_finetuned_wo_v1",
3
  "activation_dropout": 0.1,
4
  "apply_spec_augment": true,
5
  "architectures": [
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0ab12d28f1bf0a8310298b67604a638b4ca422315284e42a9019ece275e12e30
3
  size 577789320
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e6457e6ebda21fba8e5144328b1ebefb6623447e9ac53b4059a90733b6d74f2
3
  size 577789320
runs/Oct25_16-03-50_67f7e6f44b7d/events.out.tfevents.1729872238.67f7e6f44b7d.30.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f349b4e25c5aac240818e9bf541c4663bc4b539726307f6c302a7c8fdbbd1b93
3
+ size 6665
runs/Oct25_16-05-35_67f7e6f44b7d/events.out.tfevents.1729872336.67f7e6f44b7d.30.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52b280d778a2a6f3212c408d0df4b6479314039780f13d67301291c3c7ad4ae3
3
+ size 62429
special_tokens_map.json CHANGED
@@ -1,6 +1,18 @@
1
  {
2
- "bos_token": "<s>",
3
- "eos_token": "</s>",
 
 
 
 
 
 
 
 
 
 
 
 
4
  "mask_token": {
5
  "content": "<mask>",
6
  "lstrip": true,
@@ -8,6 +20,18 @@
8
  "rstrip": false,
9
  "single_word": false
10
  },
11
- "pad_token": "<pad>",
12
- "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
13
  }
 
1
  {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
  "mask_token": {
17
  "content": "<mask>",
18
  "lstrip": true,
 
20
  "rstrip": false,
21
  "single_word": false
22
  },
23
+ "pad_token": {
24
+ "content": "<pad>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "unk_token": {
31
+ "content": "<unk>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ }
37
  }
tokenizer_config.json CHANGED
@@ -50,7 +50,7 @@
50
  }
51
  },
52
  "bos_token": "<s>",
53
- "clean_up_tokenization_spaces": false,
54
  "eos_token": "</s>",
55
  "mask_token": "<mask>",
56
  "model_max_length": 600,
 
50
  }
51
  },
52
  "bos_token": "<s>",
53
+ "clean_up_tokenization_spaces": true,
54
  "eos_token": "</s>",
55
  "mask_token": "<mask>",
56
  "model_max_length": 600,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:eac6e4b1088d7c2285789c1414175ef7687b4e17cc077befaaf7f2aa24f8ac8d
3
  size 5432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6db3115f6704d102ae510ef6826c25e34238aabbf19d8a9142dafabd1057646d
3
  size 5432