Leo97 commited on
Commit
7567ae6
·
1 Parent(s): 8799d5f

update model card README.md

Browse files
Files changed (1) hide show
  1. README.md +56 -105
README.md CHANGED
@@ -1,4 +1,5 @@
1
  ---
 
2
  tags:
3
  - generated_from_trainer
4
  metrics:
@@ -14,11 +15,11 @@ should probably proofread and complete it, then remove this comment. -->
14
 
15
  # KcELECTRA-small-v2022-finetuned-in-vehicle
16
 
17
- This model was trained from scratch on an unknown dataset.
18
  It achieves the following results on the evaluation set:
19
- - Loss: 0.3512
20
- - Accuracy: 0.9267
21
- - F1: 0.9181
22
 
23
  ## Model description
24
 
@@ -43,112 +44,62 @@ The following hyperparameters were used during training:
43
  - seed: 42
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
  - lr_scheduler_type: linear
46
- - num_epochs: 100
47
 
48
  ### Training results
49
 
50
  | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 |
51
  |:-------------:|:-----:|:----:|:---------------:|:--------:|:------:|
52
- | 2.257 | 1.0 | 38 | 2.1802 | 0.41 | 0.2474 |
53
- | 2.1748 | 2.0 | 76 | 2.0939 | 0.42 | 0.2779 |
54
- | 2.1017 | 3.0 | 114 | 2.0072 | 0.42 | 0.2938 |
55
- | 2.0129 | 4.0 | 152 | 1.9214 | 0.4267 | 0.3036 |
56
- | 1.9357 | 5.0 | 190 | 1.8354 | 0.44 | 0.3233 |
57
- | 1.8433 | 6.0 | 228 | 1.7492 | 0.5133 | 0.4003 |
58
- | 1.7616 | 7.0 | 266 | 1.6635 | 0.5833 | 0.4799 |
59
- | 1.6779 | 8.0 | 304 | 1.5743 | 0.57 | 0.4758 |
60
- | 1.602 | 9.0 | 342 | 1.4930 | 0.65 | 0.5778 |
61
- | 1.5121 | 10.0 | 380 | 1.4169 | 0.7 | 0.6336 |
62
- | 1.4362 | 11.0 | 418 | 1.3440 | 0.7267 | 0.6620 |
63
- | 1.3681 | 12.0 | 456 | 1.2720 | 0.73 | 0.6702 |
64
- | 1.2934 | 13.0 | 494 | 1.2106 | 0.7333 | 0.6734 |
65
- | 1.2141 | 14.0 | 532 | 1.1430 | 0.7467 | 0.6874 |
66
- | 1.1599 | 15.0 | 570 | 1.0846 | 0.7533 | 0.6975 |
67
- | 1.1105 | 16.0 | 608 | 1.0311 | 0.76 | 0.7054 |
68
- | 1.0367 | 17.0 | 646 | 0.9794 | 0.7633 | 0.7120 |
69
- | 0.9879 | 18.0 | 684 | 0.9321 | 0.7767 | 0.7247 |
70
- | 0.9396 | 19.0 | 722 | 0.8882 | 0.7733 | 0.7204 |
71
- | 0.8891 | 20.0 | 760 | 0.8504 | 0.7967 | 0.7496 |
72
- | 0.8522 | 21.0 | 798 | 0.8135 | 0.8 | 0.7531 |
73
- | 0.797 | 22.0 | 836 | 0.7806 | 0.8 | 0.7540 |
74
- | 0.7532 | 23.0 | 874 | 0.7495 | 0.8233 | 0.7857 |
75
- | 0.7381 | 24.0 | 912 | 0.7233 | 0.8233 | 0.7895 |
76
- | 0.6876 | 25.0 | 950 | 0.6939 | 0.8333 | 0.8011 |
77
- | 0.672 | 26.0 | 988 | 0.6655 | 0.8367 | 0.8028 |
78
- | 0.6318 | 27.0 | 1026 | 0.6441 | 0.8433 | 0.8005 |
79
- | 0.6093 | 28.0 | 1064 | 0.6241 | 0.85 | 0.8116 |
80
- | 0.5908 | 29.0 | 1102 | 0.6047 | 0.8533 | 0.8150 |
81
- | 0.5509 | 30.0 | 1140 | 0.5900 | 0.86 | 0.8244 |
82
- | 0.5316 | 31.0 | 1178 | 0.5696 | 0.8633 | 0.8267 |
83
- | 0.506 | 32.0 | 1216 | 0.5611 | 0.87 | 0.8433 |
84
- | 0.4912 | 33.0 | 1254 | 0.5352 | 0.8733 | 0.8464 |
85
- | 0.4707 | 34.0 | 1292 | 0.5234 | 0.8967 | 0.8711 |
86
- | 0.4527 | 35.0 | 1330 | 0.5121 | 0.8933 | 0.8684 |
87
- | 0.4348 | 36.0 | 1368 | 0.4920 | 0.9033 | 0.8848 |
88
- | 0.3974 | 37.0 | 1406 | 0.4881 | 0.9033 | 0.8841 |
89
- | 0.3817 | 38.0 | 1444 | 0.4744 | 0.91 | 0.8953 |
90
- | 0.3665 | 39.0 | 1482 | 0.4664 | 0.9167 | 0.9040 |
91
- | 0.3546 | 40.0 | 1520 | 0.4631 | 0.92 | 0.9074 |
92
- | 0.3352 | 41.0 | 1558 | 0.4497 | 0.9167 | 0.9040 |
93
- | 0.3372 | 42.0 | 1596 | 0.4432 | 0.9233 | 0.9113 |
94
- | 0.3054 | 43.0 | 1634 | 0.4299 | 0.92 | 0.9078 |
95
- | 0.3032 | 44.0 | 1672 | 0.4217 | 0.9233 | 0.9130 |
96
- | 0.2973 | 45.0 | 1710 | 0.4195 | 0.9233 | 0.9133 |
97
- | 0.2805 | 46.0 | 1748 | 0.4140 | 0.92 | 0.9078 |
98
- | 0.2725 | 47.0 | 1786 | 0.4074 | 0.9233 | 0.9113 |
99
- | 0.2579 | 48.0 | 1824 | 0.4057 | 0.9267 | 0.9146 |
100
- | 0.2477 | 49.0 | 1862 | 0.4078 | 0.92 | 0.9082 |
101
- | 0.2485 | 50.0 | 1900 | 0.3917 | 0.92 | 0.9089 |
102
- | 0.24 | 51.0 | 1938 | 0.3942 | 0.92 | 0.9082 |
103
- | 0.2279 | 52.0 | 1976 | 0.3773 | 0.9267 | 0.9169 |
104
- | 0.2148 | 53.0 | 2014 | 0.3794 | 0.92 | 0.9086 |
105
- | 0.2077 | 54.0 | 2052 | 0.3789 | 0.92 | 0.9082 |
106
- | 0.2061 | 55.0 | 2090 | 0.3770 | 0.9233 | 0.9135 |
107
- | 0.204 | 56.0 | 2128 | 0.3779 | 0.9267 | 0.9165 |
108
- | 0.191 | 57.0 | 2166 | 0.3713 | 0.92 | 0.9103 |
109
- | 0.1914 | 58.0 | 2204 | 0.3731 | 0.9233 | 0.9133 |
110
- | 0.1789 | 59.0 | 2242 | 0.3682 | 0.9233 | 0.9132 |
111
- | 0.1808 | 60.0 | 2280 | 0.3650 | 0.9267 | 0.9167 |
112
- | 0.1677 | 61.0 | 2318 | 0.3603 | 0.9233 | 0.9132 |
113
- | 0.1747 | 62.0 | 2356 | 0.3589 | 0.9233 | 0.9132 |
114
- | 0.1684 | 63.0 | 2394 | 0.3590 | 0.9167 | 0.9069 |
115
- | 0.159 | 64.0 | 2432 | 0.3573 | 0.9233 | 0.9135 |
116
- | 0.1535 | 65.0 | 2470 | 0.3618 | 0.92 | 0.9101 |
117
- | 0.1563 | 66.0 | 2508 | 0.3632 | 0.92 | 0.9098 |
118
- | 0.1415 | 67.0 | 2546 | 0.3543 | 0.9233 | 0.9132 |
119
- | 0.1435 | 68.0 | 2584 | 0.3522 | 0.92 | 0.9103 |
120
- | 0.1421 | 69.0 | 2622 | 0.3552 | 0.9233 | 0.9135 |
121
- | 0.1388 | 70.0 | 2660 | 0.3558 | 0.93 | 0.9196 |
122
- | 0.1382 | 71.0 | 2698 | 0.3536 | 0.9267 | 0.9182 |
123
- | 0.1326 | 72.0 | 2736 | 0.3429 | 0.9233 | 0.9135 |
124
- | 0.1303 | 73.0 | 2774 | 0.3466 | 0.9267 | 0.9169 |
125
- | 0.1262 | 74.0 | 2812 | 0.3477 | 0.9233 | 0.9140 |
126
- | 0.1247 | 75.0 | 2850 | 0.3458 | 0.9233 | 0.9140 |
127
- | 0.1198 | 76.0 | 2888 | 0.3518 | 0.9267 | 0.9165 |
128
- | 0.1175 | 77.0 | 2926 | 0.3517 | 0.9233 | 0.9135 |
129
- | 0.119 | 78.0 | 2964 | 0.3531 | 0.9267 | 0.9181 |
130
- | 0.1134 | 79.0 | 3002 | 0.3506 | 0.9267 | 0.9181 |
131
- | 0.113 | 80.0 | 3040 | 0.3501 | 0.9233 | 0.9135 |
132
- | 0.1167 | 81.0 | 3078 | 0.3486 | 0.9233 | 0.9135 |
133
- | 0.1115 | 82.0 | 3116 | 0.3446 | 0.92 | 0.9103 |
134
- | 0.111 | 83.0 | 3154 | 0.3494 | 0.9233 | 0.9135 |
135
- | 0.107 | 84.0 | 3192 | 0.3504 | 0.9233 | 0.9135 |
136
- | 0.1074 | 85.0 | 3230 | 0.3494 | 0.9233 | 0.9135 |
137
- | 0.1092 | 86.0 | 3268 | 0.3446 | 0.92 | 0.9103 |
138
- | 0.102 | 87.0 | 3306 | 0.3478 | 0.9233 | 0.9135 |
139
- | 0.1067 | 88.0 | 3344 | 0.3451 | 0.92 | 0.9108 |
140
- | 0.1073 | 89.0 | 3382 | 0.3477 | 0.9267 | 0.9181 |
141
- | 0.1005 | 90.0 | 3420 | 0.3475 | 0.9233 | 0.9135 |
142
- | 0.0987 | 91.0 | 3458 | 0.3495 | 0.9233 | 0.9135 |
143
- | 0.1028 | 92.0 | 3496 | 0.3501 | 0.9233 | 0.9135 |
144
- | 0.1027 | 93.0 | 3534 | 0.3498 | 0.9233 | 0.9135 |
145
- | 0.0998 | 94.0 | 3572 | 0.3505 | 0.9233 | 0.9135 |
146
- | 0.1 | 95.0 | 3610 | 0.3511 | 0.9233 | 0.9135 |
147
- | 0.1013 | 96.0 | 3648 | 0.3509 | 0.9233 | 0.9135 |
148
- | 0.1014 | 97.0 | 3686 | 0.3506 | 0.9267 | 0.9181 |
149
- | 0.1034 | 98.0 | 3724 | 0.3509 | 0.9267 | 0.9181 |
150
- | 0.0958 | 99.0 | 3762 | 0.3512 | 0.9267 | 0.9181 |
151
- | 0.1029 | 100.0 | 3800 | 0.3512 | 0.9267 | 0.9181 |
152
 
153
 
154
  ### Framework versions
 
1
  ---
2
+ license: mit
3
  tags:
4
  - generated_from_trainer
5
  metrics:
 
15
 
16
  # KcELECTRA-small-v2022-finetuned-in-vehicle
17
 
18
+ This model is a fine-tuned version of [beomi/KcELECTRA-small-v2022](https://huggingface.co/beomi/KcELECTRA-small-v2022) on an unknown dataset.
19
  It achieves the following results on the evaluation set:
20
+ - Loss: 0.5014
21
+ - Accuracy: 0.92
22
+ - F1: 0.9010
23
 
24
  ## Model description
25
 
 
44
  - seed: 42
45
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
46
  - lr_scheduler_type: linear
47
+ - num_epochs: 50
48
 
49
  ### Training results
50
 
51
  | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 |
52
  |:-------------:|:-----:|:----:|:---------------:|:--------:|:------:|
53
+ | 2.6201 | 1.0 | 38 | 2.5909 | 0.18 | 0.0549 |
54
+ | 2.5788 | 2.0 | 76 | 2.5466 | 0.18 | 0.0549 |
55
+ | 2.5397 | 3.0 | 114 | 2.4976 | 0.18 | 0.0549 |
56
+ | 2.4886 | 4.0 | 152 | 2.4178 | 0.3833 | 0.2516 |
57
+ | 2.4062 | 5.0 | 190 | 2.3038 | 0.4267 | 0.2575 |
58
+ | 2.3015 | 6.0 | 228 | 2.1798 | 0.4333 | 0.2746 |
59
+ | 2.1868 | 7.0 | 266 | 2.0589 | 0.52 | 0.4121 |
60
+ | 2.0713 | 8.0 | 304 | 1.9436 | 0.6133 | 0.5349 |
61
+ | 1.9763 | 9.0 | 342 | 1.8359 | 0.66 | 0.6048 |
62
+ | 1.8715 | 10.0 | 380 | 1.7361 | 0.72 | 0.6863 |
63
+ | 1.7755 | 11.0 | 418 | 1.6402 | 0.7233 | 0.6891 |
64
+ | 1.6873 | 12.0 | 456 | 1.5496 | 0.81 | 0.7774 |
65
+ | 1.5828 | 13.0 | 494 | 1.4681 | 0.8433 | 0.8089 |
66
+ | 1.5222 | 14.0 | 532 | 1.3870 | 0.84 | 0.8038 |
67
+ | 1.4397 | 15.0 | 570 | 1.3148 | 0.88 | 0.8554 |
68
+ | 1.3673 | 16.0 | 608 | 1.2461 | 0.89 | 0.8705 |
69
+ | 1.3047 | 17.0 | 646 | 1.1801 | 0.91 | 0.8903 |
70
+ | 1.2232 | 18.0 | 684 | 1.1209 | 0.9033 | 0.8844 |
71
+ | 1.1661 | 19.0 | 722 | 1.0618 | 0.9 | 0.8817 |
72
+ | 1.1104 | 20.0 | 760 | 1.0207 | 0.89 | 0.8660 |
73
+ | 1.0572 | 21.0 | 798 | 0.9679 | 0.8933 | 0.8725 |
74
+ | 1.0191 | 22.0 | 836 | 0.9243 | 0.8933 | 0.8722 |
75
+ | 0.9548 | 23.0 | 874 | 0.8850 | 0.8967 | 0.8757 |
76
+ | 0.9364 | 24.0 | 912 | 0.8429 | 0.9 | 0.8790 |
77
+ | 0.871 | 25.0 | 950 | 0.8094 | 0.8933 | 0.8724 |
78
+ | 0.8629 | 26.0 | 988 | 0.7773 | 0.8967 | 0.8746 |
79
+ | 0.7992 | 27.0 | 1026 | 0.7540 | 0.8933 | 0.8735 |
80
+ | 0.7948 | 28.0 | 1064 | 0.7234 | 0.8933 | 0.8704 |
81
+ | 0.7455 | 29.0 | 1102 | 0.6967 | 0.8967 | 0.8749 |
82
+ | 0.7236 | 30.0 | 1140 | 0.6760 | 0.91 | 0.8881 |
83
+ | 0.6905 | 31.0 | 1178 | 0.6519 | 0.9033 | 0.8832 |
84
+ | 0.6857 | 32.0 | 1216 | 0.6396 | 0.9133 | 0.8944 |
85
+ | 0.6526 | 33.0 | 1254 | 0.6155 | 0.9167 | 0.8963 |
86
+ | 0.6294 | 34.0 | 1292 | 0.6025 | 0.9033 | 0.8835 |
87
+ | 0.6179 | 35.0 | 1330 | 0.5909 | 0.9167 | 0.8970 |
88
+ | 0.6022 | 36.0 | 1368 | 0.5757 | 0.9133 | 0.8934 |
89
+ | 0.5753 | 37.0 | 1406 | 0.5610 | 0.92 | 0.8999 |
90
+ | 0.561 | 38.0 | 1444 | 0.5536 | 0.9167 | 0.8970 |
91
+ | 0.553 | 39.0 | 1482 | 0.5417 | 0.92 | 0.8998 |
92
+ | 0.5395 | 40.0 | 1520 | 0.5367 | 0.92 | 0.9018 |
93
+ | 0.5402 | 41.0 | 1558 | 0.5276 | 0.92 | 0.9018 |
94
+ | 0.5266 | 42.0 | 1596 | 0.5238 | 0.92 | 0.9010 |
95
+ | 0.5178 | 43.0 | 1634 | 0.5182 | 0.92 | 0.9018 |
96
+ | 0.52 | 44.0 | 1672 | 0.5129 | 0.92 | 0.9010 |
97
+ | 0.495 | 45.0 | 1710 | 0.5069 | 0.9167 | 0.8981 |
98
+ | 0.5124 | 46.0 | 1748 | 0.5054 | 0.9167 | 0.8981 |
99
+ | 0.5034 | 47.0 | 1786 | 0.5038 | 0.92 | 0.9018 |
100
+ | 0.5108 | 48.0 | 1824 | 0.5020 | 0.92 | 0.9018 |
101
+ | 0.483 | 49.0 | 1862 | 0.5016 | 0.92 | 0.9010 |
102
+ | 0.4974 | 50.0 | 1900 | 0.5014 | 0.92 | 0.9010 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
103
 
104
 
105
  ### Framework versions