Initial Commit
Browse files- README.md +112 -60
- config.json +1 -1
- eval_result_ner.json +1 -1
- model.safetensors +1 -1
- training_args.bin +1 -1
README.md
CHANGED
@@ -1,14 +1,14 @@
|
|
1 |
---
|
2 |
-
base_model: haryoaw/scenario-TCR-NER_data-univner_half
|
3 |
library_name: transformers
|
4 |
license: mit
|
|
|
|
|
|
|
5 |
metrics:
|
6 |
- precision
|
7 |
- recall
|
8 |
- f1
|
9 |
- accuracy
|
10 |
-
tags:
|
11 |
-
- generated_from_trainer
|
12 |
model-index:
|
13 |
- name: scenario-kd-scr-ner-full_data-univner_full55
|
14 |
results: []
|
@@ -19,13 +19,13 @@ should probably proofread and complete it, then remove this comment. -->
|
|
19 |
|
20 |
# scenario-kd-scr-ner-full_data-univner_full55
|
21 |
|
22 |
-
This model is a fine-tuned version of [haryoaw/scenario-TCR-NER_data-
|
23 |
It achieves the following results on the evaluation set:
|
24 |
-
- Loss: 1.
|
25 |
-
- Precision: 0.
|
26 |
-
- Recall: 0.
|
27 |
-
- F1: 0.
|
28 |
-
- Accuracy: 0.
|
29 |
|
30 |
## Model description
|
31 |
|
@@ -56,57 +56,109 @@ The following hyperparameters were used during training:
|
|
56 |
|
57 |
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|
58 |
|:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
|
59 |
-
| 2.
|
60 |
-
| 2.
|
61 |
-
| 1.
|
62 |
-
| 1.
|
63 |
-
| 1.
|
64 |
-
| 1.
|
65 |
-
| 1.
|
66 |
-
| 1.
|
67 |
-
| 1.
|
68 |
-
| 1.
|
69 |
-
| 1.
|
70 |
-
|
|
71 |
-
|
|
72 |
-
|
|
73 |
-
|
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
79 |
-
| 0.
|
80 |
-
| 0.
|
81 |
-
| 0.
|
82 |
-
| 0.
|
83 |
-
| 0.
|
84 |
-
| 0.
|
85 |
-
| 0.
|
86 |
-
| 0.
|
87 |
-
| 0.
|
88 |
-
| 0.
|
89 |
-
| 0.
|
90 |
-
| 0.
|
91 |
-
| 0.
|
92 |
-
| 0.
|
93 |
-
| 0.
|
94 |
-
| 0.
|
95 |
-
| 0.
|
96 |
-
| 0.
|
97 |
-
| 0.
|
98 |
-
| 0.
|
99 |
-
| 0.
|
100 |
-
| 0.
|
101 |
-
| 0.
|
102 |
-
| 0.
|
103 |
-
| 0.
|
104 |
-
| 0.
|
105 |
-
| 0.
|
106 |
-
| 0.
|
107 |
-
| 0.
|
108 |
-
| 0.
|
109 |
-
| 0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
110 |
|
111 |
|
112 |
### Framework versions
|
|
|
1 |
---
|
|
|
2 |
library_name: transformers
|
3 |
license: mit
|
4 |
+
base_model: haryoaw/scenario-TCR-NER_data-univner_full
|
5 |
+
tags:
|
6 |
+
- generated_from_trainer
|
7 |
metrics:
|
8 |
- precision
|
9 |
- recall
|
10 |
- f1
|
11 |
- accuracy
|
|
|
|
|
12 |
model-index:
|
13 |
- name: scenario-kd-scr-ner-full_data-univner_full55
|
14 |
results: []
|
|
|
19 |
|
20 |
# scenario-kd-scr-ner-full_data-univner_full55
|
21 |
|
22 |
+
This model is a fine-tuned version of [haryoaw/scenario-TCR-NER_data-univner_full](https://huggingface.co/haryoaw/scenario-TCR-NER_data-univner_full) on the None dataset.
|
23 |
It achieves the following results on the evaluation set:
|
24 |
+
- Loss: 1.1087
|
25 |
+
- Precision: 0.6202
|
26 |
+
- Recall: 0.5509
|
27 |
+
- F1: 0.5835
|
28 |
+
- Accuracy: 0.9594
|
29 |
|
30 |
## Model description
|
31 |
|
|
|
56 |
|
57 |
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|
58 |
|:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
|
59 |
+
| 2.7888 | 0.2910 | 500 | 2.4429 | 0.2941 | 0.0087 | 0.0168 | 0.9242 |
|
60 |
+
| 2.1116 | 0.5821 | 1000 | 2.1355 | 0.2870 | 0.1215 | 0.1707 | 0.9277 |
|
61 |
+
| 1.9109 | 0.8731 | 1500 | 2.0239 | 0.2850 | 0.1580 | 0.2033 | 0.9306 |
|
62 |
+
| 1.771 | 1.1641 | 2000 | 1.9268 | 0.4072 | 0.1535 | 0.2230 | 0.9319 |
|
63 |
+
| 1.633 | 1.4552 | 2500 | 1.8866 | 0.2862 | 0.2658 | 0.2756 | 0.9336 |
|
64 |
+
| 1.5872 | 1.7462 | 3000 | 1.7527 | 0.3273 | 0.2818 | 0.3028 | 0.9372 |
|
65 |
+
| 1.4833 | 2.0373 | 3500 | 1.7643 | 0.3558 | 0.2464 | 0.2912 | 0.9377 |
|
66 |
+
| 1.3965 | 2.3283 | 4000 | 1.6664 | 0.3675 | 0.3353 | 0.3507 | 0.9394 |
|
67 |
+
| 1.3237 | 2.6193 | 4500 | 1.6537 | 0.3445 | 0.3481 | 0.3463 | 0.9376 |
|
68 |
+
| 1.2791 | 2.9104 | 5000 | 1.5552 | 0.3960 | 0.3758 | 0.3857 | 0.9431 |
|
69 |
+
| 1.2023 | 3.2014 | 5500 | 1.5571 | 0.4338 | 0.3855 | 0.4083 | 0.9442 |
|
70 |
+
| 1.1456 | 3.4924 | 6000 | 1.4999 | 0.4258 | 0.3998 | 0.4124 | 0.9457 |
|
71 |
+
| 1.1315 | 3.7835 | 6500 | 1.4824 | 0.4244 | 0.3741 | 0.3977 | 0.9458 |
|
72 |
+
| 1.0693 | 4.0745 | 7000 | 1.4836 | 0.4407 | 0.3842 | 0.4105 | 0.9461 |
|
73 |
+
| 1.0052 | 4.3655 | 7500 | 1.4413 | 0.4322 | 0.4275 | 0.4298 | 0.9472 |
|
74 |
+
| 0.9737 | 4.6566 | 8000 | 1.4101 | 0.4634 | 0.4161 | 0.4385 | 0.9491 |
|
75 |
+
| 0.9521 | 4.9476 | 8500 | 1.3865 | 0.4476 | 0.4214 | 0.4341 | 0.9491 |
|
76 |
+
| 0.8818 | 5.2386 | 9000 | 1.4115 | 0.4612 | 0.4232 | 0.4414 | 0.9494 |
|
77 |
+
| 0.8396 | 5.5297 | 9500 | 1.3702 | 0.4645 | 0.4470 | 0.4556 | 0.9501 |
|
78 |
+
| 0.8456 | 5.8207 | 10000 | 1.3441 | 0.5076 | 0.4252 | 0.4627 | 0.9508 |
|
79 |
+
| 0.829 | 6.1118 | 10500 | 1.3357 | 0.4922 | 0.4718 | 0.4818 | 0.9518 |
|
80 |
+
| 0.7611 | 6.4028 | 11000 | 1.3320 | 0.5100 | 0.4548 | 0.4808 | 0.9522 |
|
81 |
+
| 0.7475 | 6.6938 | 11500 | 1.3570 | 0.4852 | 0.4953 | 0.4902 | 0.9531 |
|
82 |
+
| 0.7362 | 6.9849 | 12000 | 1.3154 | 0.5039 | 0.4929 | 0.4983 | 0.9529 |
|
83 |
+
| 0.6776 | 7.2759 | 12500 | 1.3044 | 0.5099 | 0.4884 | 0.4989 | 0.9534 |
|
84 |
+
| 0.6701 | 7.5669 | 13000 | 1.2921 | 0.5229 | 0.4675 | 0.4936 | 0.9541 |
|
85 |
+
| 0.6586 | 7.8580 | 13500 | 1.2670 | 0.5185 | 0.5067 | 0.5126 | 0.9548 |
|
86 |
+
| 0.6284 | 8.1490 | 14000 | 1.2752 | 0.5346 | 0.4979 | 0.5156 | 0.9548 |
|
87 |
+
| 0.6025 | 8.4400 | 14500 | 1.2738 | 0.5270 | 0.4884 | 0.5070 | 0.9545 |
|
88 |
+
| 0.5955 | 8.7311 | 15000 | 1.2564 | 0.5340 | 0.4895 | 0.5108 | 0.9552 |
|
89 |
+
| 0.5784 | 9.0221 | 15500 | 1.2502 | 0.5406 | 0.5035 | 0.5214 | 0.9546 |
|
90 |
+
| 0.5479 | 9.3132 | 16000 | 1.2339 | 0.5418 | 0.5203 | 0.5308 | 0.9566 |
|
91 |
+
| 0.54 | 9.6042 | 16500 | 1.2380 | 0.5473 | 0.5175 | 0.5320 | 0.9564 |
|
92 |
+
| 0.5368 | 9.8952 | 17000 | 1.2403 | 0.5726 | 0.5044 | 0.5363 | 0.9568 |
|
93 |
+
| 0.5151 | 10.1863 | 17500 | 1.2152 | 0.5516 | 0.5445 | 0.5480 | 0.9571 |
|
94 |
+
| 0.4959 | 10.4773 | 18000 | 1.2323 | 0.5657 | 0.5359 | 0.5504 | 0.9570 |
|
95 |
+
| 0.4946 | 10.7683 | 18500 | 1.2150 | 0.5679 | 0.5236 | 0.5449 | 0.9575 |
|
96 |
+
| 0.499 | 11.0594 | 19000 | 1.2119 | 0.5637 | 0.5372 | 0.5501 | 0.9576 |
|
97 |
+
| 0.462 | 11.3504 | 19500 | 1.2289 | 0.5736 | 0.5294 | 0.5506 | 0.9578 |
|
98 |
+
| 0.4631 | 11.6414 | 20000 | 1.2106 | 0.5661 | 0.5435 | 0.5546 | 0.9576 |
|
99 |
+
| 0.464 | 11.9325 | 20500 | 1.2292 | 0.5886 | 0.5087 | 0.5458 | 0.9576 |
|
100 |
+
| 0.4463 | 12.2235 | 21000 | 1.2135 | 0.5823 | 0.5465 | 0.5639 | 0.9578 |
|
101 |
+
| 0.4339 | 12.5146 | 21500 | 1.2098 | 0.5890 | 0.5208 | 0.5528 | 0.9578 |
|
102 |
+
| 0.4386 | 12.8056 | 22000 | 1.1906 | 0.5754 | 0.5387 | 0.5565 | 0.9573 |
|
103 |
+
| 0.4249 | 13.0966 | 22500 | 1.1972 | 0.5873 | 0.5379 | 0.5615 | 0.9580 |
|
104 |
+
| 0.4076 | 13.3877 | 23000 | 1.1994 | 0.5680 | 0.5585 | 0.5632 | 0.9576 |
|
105 |
+
| 0.4122 | 13.6787 | 23500 | 1.2129 | 0.5894 | 0.5331 | 0.5598 | 0.9580 |
|
106 |
+
| 0.4156 | 13.9697 | 24000 | 1.1865 | 0.5779 | 0.5485 | 0.5628 | 0.9580 |
|
107 |
+
| 0.3926 | 14.2608 | 24500 | 1.1828 | 0.5974 | 0.5397 | 0.5671 | 0.9589 |
|
108 |
+
| 0.3966 | 14.5518 | 25000 | 1.1764 | 0.5959 | 0.5390 | 0.5660 | 0.9586 |
|
109 |
+
| 0.3861 | 14.8428 | 25500 | 1.1769 | 0.5869 | 0.5307 | 0.5574 | 0.9581 |
|
110 |
+
| 0.3847 | 15.1339 | 26000 | 1.1997 | 0.5829 | 0.5406 | 0.5610 | 0.9581 |
|
111 |
+
| 0.3703 | 15.4249 | 26500 | 1.1809 | 0.5736 | 0.5543 | 0.5638 | 0.9582 |
|
112 |
+
| 0.3747 | 15.7159 | 27000 | 1.1896 | 0.5871 | 0.5320 | 0.5582 | 0.9577 |
|
113 |
+
| 0.3713 | 16.0070 | 27500 | 1.1700 | 0.5965 | 0.5422 | 0.5681 | 0.9589 |
|
114 |
+
| 0.3558 | 16.2980 | 28000 | 1.1922 | 0.5970 | 0.5416 | 0.5680 | 0.9586 |
|
115 |
+
| 0.3582 | 16.5891 | 28500 | 1.1507 | 0.5831 | 0.5470 | 0.5644 | 0.9586 |
|
116 |
+
| 0.3571 | 16.8801 | 29000 | 1.1405 | 0.5899 | 0.5418 | 0.5648 | 0.9584 |
|
117 |
+
| 0.3522 | 17.1711 | 29500 | 1.1610 | 0.6046 | 0.5517 | 0.5769 | 0.9588 |
|
118 |
+
| 0.3414 | 17.4622 | 30000 | 1.1670 | 0.6042 | 0.5485 | 0.5750 | 0.9590 |
|
119 |
+
| 0.3488 | 17.7532 | 30500 | 1.1502 | 0.5904 | 0.5624 | 0.5761 | 0.9586 |
|
120 |
+
| 0.34 | 18.0442 | 31000 | 1.1595 | 0.6091 | 0.5304 | 0.5670 | 0.9585 |
|
121 |
+
| 0.3336 | 18.3353 | 31500 | 1.1553 | 0.6025 | 0.5439 | 0.5717 | 0.9589 |
|
122 |
+
| 0.3295 | 18.6263 | 32000 | 1.1683 | 0.5916 | 0.5337 | 0.5611 | 0.9580 |
|
123 |
+
| 0.3345 | 18.9173 | 32500 | 1.1478 | 0.5825 | 0.5536 | 0.5677 | 0.9585 |
|
124 |
+
| 0.3263 | 19.2084 | 33000 | 1.1415 | 0.6093 | 0.5369 | 0.5708 | 0.9589 |
|
125 |
+
| 0.3206 | 19.4994 | 33500 | 1.1410 | 0.5888 | 0.5637 | 0.5760 | 0.9593 |
|
126 |
+
| 0.3234 | 19.7905 | 34000 | 1.1371 | 0.6072 | 0.5490 | 0.5766 | 0.9591 |
|
127 |
+
| 0.3212 | 20.0815 | 34500 | 1.1401 | 0.6006 | 0.5478 | 0.5730 | 0.9587 |
|
128 |
+
| 0.3154 | 20.3725 | 35000 | 1.1505 | 0.6165 | 0.5400 | 0.5758 | 0.9591 |
|
129 |
+
| 0.3081 | 20.6636 | 35500 | 1.1512 | 0.5977 | 0.5393 | 0.5670 | 0.9591 |
|
130 |
+
| 0.3137 | 20.9546 | 36000 | 1.1477 | 0.6185 | 0.5357 | 0.5741 | 0.9590 |
|
131 |
+
| 0.3048 | 21.2456 | 36500 | 1.1344 | 0.6070 | 0.5416 | 0.5724 | 0.9593 |
|
132 |
+
| 0.3028 | 21.5367 | 37000 | 1.1308 | 0.6192 | 0.5481 | 0.5815 | 0.9594 |
|
133 |
+
| 0.3039 | 21.8277 | 37500 | 1.1492 | 0.6167 | 0.5318 | 0.5711 | 0.9591 |
|
134 |
+
| 0.3013 | 22.1187 | 38000 | 1.1340 | 0.6139 | 0.5393 | 0.5742 | 0.9592 |
|
135 |
+
| 0.2966 | 22.4098 | 38500 | 1.1176 | 0.6073 | 0.5561 | 0.5806 | 0.9594 |
|
136 |
+
| 0.2956 | 22.7008 | 39000 | 1.1156 | 0.6100 | 0.5627 | 0.5854 | 0.9593 |
|
137 |
+
| 0.2982 | 22.9919 | 39500 | 1.1282 | 0.6162 | 0.5553 | 0.5842 | 0.9596 |
|
138 |
+
| 0.2915 | 23.2829 | 40000 | 1.1359 | 0.6048 | 0.5510 | 0.5766 | 0.9593 |
|
139 |
+
| 0.2882 | 23.5739 | 40500 | 1.1194 | 0.6075 | 0.5517 | 0.5783 | 0.9592 |
|
140 |
+
| 0.2906 | 23.8650 | 41000 | 1.1256 | 0.6058 | 0.5442 | 0.5734 | 0.9590 |
|
141 |
+
| 0.2852 | 24.1560 | 41500 | 1.1115 | 0.6143 | 0.5465 | 0.5785 | 0.9596 |
|
142 |
+
| 0.2864 | 24.4470 | 42000 | 1.1214 | 0.6103 | 0.5441 | 0.5753 | 0.9594 |
|
143 |
+
| 0.2829 | 24.7381 | 42500 | 1.1333 | 0.6267 | 0.5346 | 0.5770 | 0.9592 |
|
144 |
+
| 0.2836 | 25.0291 | 43000 | 1.1195 | 0.6067 | 0.5550 | 0.5797 | 0.9591 |
|
145 |
+
| 0.2795 | 25.3201 | 43500 | 1.1260 | 0.6332 | 0.5315 | 0.5779 | 0.9593 |
|
146 |
+
| 0.2779 | 25.6112 | 44000 | 1.1119 | 0.6164 | 0.5457 | 0.5789 | 0.9597 |
|
147 |
+
| 0.2787 | 25.9022 | 44500 | 1.1094 | 0.6103 | 0.5640 | 0.5862 | 0.9600 |
|
148 |
+
| 0.2765 | 26.1932 | 45000 | 1.1104 | 0.6166 | 0.5474 | 0.5799 | 0.9596 |
|
149 |
+
| 0.2743 | 26.4843 | 45500 | 1.1164 | 0.6172 | 0.5553 | 0.5846 | 0.9596 |
|
150 |
+
| 0.2731 | 26.7753 | 46000 | 1.1246 | 0.6158 | 0.5578 | 0.5854 | 0.9594 |
|
151 |
+
| 0.2705 | 27.0664 | 46500 | 1.1110 | 0.6153 | 0.5468 | 0.5790 | 0.9593 |
|
152 |
+
| 0.2707 | 27.3574 | 47000 | 1.1101 | 0.6207 | 0.5586 | 0.5880 | 0.9602 |
|
153 |
+
| 0.2713 | 27.6484 | 47500 | 1.1131 | 0.6203 | 0.5455 | 0.5805 | 0.9596 |
|
154 |
+
| 0.2704 | 27.9395 | 48000 | 1.1122 | 0.6193 | 0.5494 | 0.5823 | 0.9596 |
|
155 |
+
| 0.2669 | 28.2305 | 48500 | 1.1127 | 0.6139 | 0.5519 | 0.5812 | 0.9596 |
|
156 |
+
| 0.2696 | 28.5215 | 49000 | 1.1148 | 0.6233 | 0.5449 | 0.5815 | 0.9597 |
|
157 |
+
| 0.2658 | 28.8126 | 49500 | 1.1130 | 0.6182 | 0.5451 | 0.5794 | 0.9597 |
|
158 |
+
| 0.2663 | 29.1036 | 50000 | 1.1070 | 0.6170 | 0.5475 | 0.5802 | 0.9593 |
|
159 |
+
| 0.2625 | 29.3946 | 50500 | 1.1055 | 0.6172 | 0.5498 | 0.5816 | 0.9599 |
|
160 |
+
| 0.2652 | 29.6857 | 51000 | 1.1010 | 0.6332 | 0.5516 | 0.5896 | 0.9603 |
|
161 |
+
| 0.2662 | 29.9767 | 51500 | 1.1087 | 0.6202 | 0.5509 | 0.5835 | 0.9594 |
|
162 |
|
163 |
|
164 |
### Framework versions
|
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "haryoaw/scenario-TCR-NER_data-
|
3 |
"architectures": [
|
4 |
"XLMRobertaForTokenClassificationKD"
|
5 |
],
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "haryoaw/scenario-TCR-NER_data-univner_full",
|
3 |
"architectures": [
|
4 |
"XLMRobertaForTokenClassificationKD"
|
5 |
],
|
eval_result_ner.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"ceb_gja": {"precision": 0.
|
|
|
1 |
+
{"ceb_gja": {"precision": 0.5490196078431373, "recall": 0.5714285714285714, "f1": 0.5599999999999999, "accuracy": 0.9637065637065637}, "en_pud": {"precision": 0.5394736842105263, "recall": 0.3813953488372093, "f1": 0.446866485013624, "accuracy": 0.9480544012089157}, "de_pud": {"precision": 0.21278825995807127, "recall": 0.19538017324350337, "f1": 0.2037129954841947, "accuracy": 0.9054427828043692}, "pt_pud": {"precision": 0.5997818974918212, "recall": 0.5004549590536852, "f1": 0.5456349206349207, "accuracy": 0.9587302943563891}, "ru_pud": {"precision": 0.037037037037037035, "recall": 0.011583011583011582, "f1": 0.01764705882352941, "accuracy": 0.8990958408679928}, "sv_pud": {"precision": 0.5989304812834224, "recall": 0.32653061224489793, "f1": 0.4226415094339623, "accuracy": 0.9466345145732858}, "tl_trg": {"precision": 0.5, "recall": 0.5217391304347826, "f1": 0.5106382978723404, "accuracy": 0.9700272479564033}, "tl_ugnayan": {"precision": 0.08333333333333333, "recall": 0.06060606060606061, "f1": 0.07017543859649122, "accuracy": 0.9471285323609845}, "zh_gsd": {"precision": 0.5624123422159888, "recall": 0.5228161668839635, "f1": 0.5418918918918918, "accuracy": 0.9383949383949384}, "zh_gsdsimp": {"precision": 0.5539772727272727, "recall": 0.5111402359108781, "f1": 0.5316973415132925, "accuracy": 0.9368964368964369}, "hr_set": {"precision": 0.7628865979381443, "recall": 0.7384176764076978, "f1": 0.7504527345164794, "accuracy": 0.9711046990931574}, "da_ddt": {"precision": 0.6574585635359116, "recall": 0.5324384787472036, "f1": 0.588380716934487, "accuracy": 0.9711663174698194}, "en_ewt": {"precision": 0.6628787878787878, "recall": 0.48253676470588236, "f1": 0.5585106382978723, "accuracy": 0.9603538271506554}, "pt_bosque": {"precision": 0.6170774647887324, "recall": 0.5769547325102881, "f1": 0.5963419821352616, "accuracy": 0.9644978988552384}, "sr_set": {"precision": 0.8199753390875463, "recall": 0.7851239669421488, "f1": 0.802171290711701, "accuracy": 0.9699676035373435}, "sk_snk": {"precision": 0.42857142857142855, "recall": 0.2459016393442623, "f1": 0.3125, "accuracy": 0.915358040201005}, "sv_talbanken": {"precision": 0.7730061349693251, "recall": 0.6428571428571429, "f1": 0.701949860724234, "accuracy": 0.9945526819453305}}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 939737140
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d1e068ce2a9d3256455ba93d8c9f0aaf7602e56d690f67dfac65fc45ce9af55b
|
3 |
size 939737140
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5304
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:251b7d8b74b4c08d44b8abdc3bd3f4e499caa1b3a6b4c04561c2ee812befd4bb
|
3 |
size 5304
|