MF21377197 commited on
Commit
62825fd
1 Parent(s): d6f66d6

End of training

Browse files
README.md CHANGED
@@ -2,8 +2,6 @@
2
  license: other
3
  base_model: nvidia/mit-b0
4
  tags:
5
- - vision
6
- - image-segmentation
7
  - generated_from_trainer
8
  model-index:
9
  - name: segformer-b0-finetuned
@@ -15,14 +13,14 @@ should probably proofread and complete it, then remove this comment. -->
15
 
16
  # segformer-b0-finetuned
17
 
18
- This model is a fine-tuned version of [nvidia/mit-b0](https://huggingface.co/nvidia/mit-b0) on the segments/sidewalk-semantic dataset.
19
  It achieves the following results on the evaluation set:
20
- - Loss: 0.5754
21
- - Mean Iou: 0.2781
22
- - Mean Accuracy: 0.3329
23
- - Overall Accuracy: 0.8463
24
- - Per Category Iou: [0.0, 0.7432161129630078, 0.854265404236928, 0.4606401052721709, 0.6557337899613191, 0.4079867997829282, nan, 0.37471812939221005, 0.2905341043386837, 0.0, 0.7537587486511262, 0.0, 0.0, nan, 0.0, 0.019848656872972055, 0.0, 0.0, 0.7115931639469374, nan, 0.3661808713379434, 0.13378413732653244, 0.0, nan, 0.0, 0.23570903658727577, 0.0, 0.0, 0.8461792428096935, 0.7553019453875489, 0.9045825383881589, 0.0, 0.0, 0.10651182264386322, 0.0]
25
- - Per Category Accuracy: [0.0, 0.8511274737458464, 0.9523527728262475, 0.7305783824446481, 0.7179823443918317, 0.5112934364530293, nan, 0.4671955914617317, 0.39620749876026823, 0.0, 0.9325380267720194, 0.0, 0.0, nan, 0.0, 0.019920987025907694, 0.0, 0.0, 0.9114075726560573, nan, 0.4767221960460328, 0.14080931640440494, 0.0, nan, 0.0, 0.2902864462270403, 0.0, 0.0, 0.9417630123717813, 0.8946072183599384, 0.9626510283976625, 0.0, 0.0, 0.12104456389804058, 0.0]
26
 
27
  ## Model description
28
 
@@ -42,27 +40,25 @@ More information needed
42
 
43
  The following hyperparameters were used during training:
44
  - learning_rate: 5e-05
45
- - train_batch_size: 2
46
- - eval_batch_size: 2
47
  - seed: 42
48
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
49
  - lr_scheduler_type: linear
50
- - num_epochs: 5
51
 
52
  ### Training results
53
 
54
- | Training Loss | Epoch | Step | Validation Loss | Mean Iou | Mean Accuracy | Overall Accuracy | Per Category Iou | Per Category Accuracy |
55
- |:-------------:|:-----:|:----:|:---------------:|:--------:|:-------------:|:----------------:|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|
56
- | 0.6702 | 1.0 | 400 | 0.7027 | 0.2195 | 0.2629 | 0.8084 | [0.0, 0.6792133271879363, 0.7849474176188894, 0.058328120930117175, 0.6300690246185523, 0.25673461142351706, nan, 0.3004378389548008, 0.0, 0.0, 0.6990982425959871, 0.0, 0.0, nan, 0.0, 0.0, 0.0, 0.0, 0.6732565754528083, nan, 0.2872208279956378, 0.00018886917594771138, 0.0, nan, 0.0, 0.04832964515878562, 0.0, 0.0, 0.8103176781510323, 0.6868793807107686, 0.8837386972387465, 0.0, 0.0, 0.005328297121957592, 0.0] | [0.0, 0.766565968404771, 0.9690642801889813, 0.05881921997258986, 0.6774284161746986, 0.2997799346472589, nan, 0.3604784648706302, 0.0, 0.0, 0.9238506174053699, 0.0, 0.0, nan, 0.0, 0.0, 0.0, 0.0, 0.879793151515724, nan, 0.3814629549547224, 0.0001889329509116015, 0.0, nan, 0.0, 0.049252727470549255, 0.0, 0.0, 0.92204060605898, 0.9165746690826654, 0.9399161153753854, 0.0, 0.0, 0.005432287151031732, 0.0] |
57
- | 0.3787 | 2.0 | 800 | 0.6242 | 0.2529 | 0.3048 | 0.8336 | [0.0, 0.7200155470846057, 0.8500725277201905, 0.4283923004744409, 0.6393695507210657, 0.35502977816991826, nan, 0.35184539253673836, 0.007092160389449536, 0.0, 0.7043921336269122, 0.0, 0.0, nan, 0.0, 0.0, 0.0, 0.0, 0.6907551328541987, nan, 0.30912243068319983, 0.08139332433133045, 0.0, nan, 0.0, 0.188100947571913, 0.0, 0.0, 0.8385996121617959, 0.7447284921504436, 0.8944303097872178, 0.0, 0.0, 0.03573598106370054, 0.0] | [0.0, 0.8883665924421738, 0.9304929976181545, 0.6379432034836245, 0.684408431327974, 0.47546406343261166, nan, 0.46904807570859863, 0.007098812013540027, 0.0, 0.9393400531924911, 0.0, 0.0, nan, 0.0, 0.0, 0.0, 0.0, 0.9030339520321862, nan, 0.38990531390988364, 0.08456558485802895, 0.0, nan, 0.0, 0.22015360876747014, 0.0, 0.0, 0.9441960488646456, 0.8731757915405423, 0.9605664489132197, 0.0, 0.0, 0.0402316629096584, 0.0] |
58
- | 0.5272 | 3.0 | 1200 | 0.5910 | 0.2640 | 0.3116 | 0.8409 | [0.0, 0.7417372358583919, 0.8491040334276788, 0.5026923409983705, 0.6531274799797995, 0.39671746797276214, nan, 0.3489393985838212, 0.10917691003765771, 0.0, 0.7340253134142348, 0.0, 0.0, nan, 0.0, 0.0015906711475390872, 0.0, 0.0, 0.6888929372303201, nan, 0.3000933215998536, 0.09430463167198322, 0.0, nan, 0.0, 0.22079460109263602, 0.0, 0.0, 0.8375430585634192, 0.7432674869295846, 0.8979452744557967, 0.0, 0.0, 0.06465850622764936, 0.0] | [0.0, 0.8742236634719254, 0.9469015516965127, 0.6845976124235456, 0.7153136135405865, 0.4838119970613863, nan, 0.426013369696913, 0.11595265302602359, 0.0, 0.9307602006332041, 0.0, 0.0, nan, 0.0, 0.0015906711475390872, 0.0, 0.0, 0.9211760279107508, nan, 0.3640489760089845, 0.09822101537391639, 0.0, nan, 0.0, 0.27522724799952525, 0.0, 0.0, 0.9529837430876597, 0.8328291532204077, 0.9626684254773068, 0.0, 0.0, 0.07271163516559737, 0.0] |
59
- | 1.0028 | 4.0 | 1600 | 0.5819 | 0.2749 | 0.3265 | 0.8451 | [0.0, 0.7442319582171808, 0.8549546101758252, 0.4558465282708946, 0.6592549345415454, 0.40147520263994, nan, 0.3560786579426865, 0.2724675418610539, 0.0, 0.7615078761694535, 0.0, 0.0, nan, 0.0, 0.01480792989181263, 0.0, 0.0, 0.6971675525618446, nan, 0.3289306001269004, 0.1400376526683254, 0.0, nan, 0.0, 0.2330975509072671, 0.0, 0.0, 0.8412274001878343, 0.7610379911113287, 0.9042555512089849, 0.0, 0.0, 0.09514392306437187, 0.0] | [0.0, 0.8831147123698461, 0.9454704608007805, 0.7031575260834061, 0.7194367374187804, 0.4940337653992012, nan, 0.4313144685568867, 0.3654837110023501, 0.0, 0.911745245214873, 0.0, 0.0, nan, 0.0, 0.01483662361250094, 0.0, 0.0, 0.9249937012782616, nan, 0.39416005628239303, 0.1487585698177601, 0.0, nan, 0.0, 0.28903522220353906, 0.0, 0.0, 0.9446123320713813, 0.8885032163816529, 0.9544933330809051, 0.0, 0.0, 0.10758314548292006, 0.0] |
60
- | 1.3105 | 5.0 | 2000 | 0.5754 | 0.2781 | 0.3329 | 0.8463 | [0.0, 0.7432161129630078, 0.854265404236928, 0.4606401052721709, 0.6557337899613191, 0.4079867997829282, nan, 0.37471812939221005, 0.2905341043386837, 0.0, 0.7537587486511262, 0.0, 0.0, nan, 0.0, 0.019848656872972055, 0.0, 0.0, 0.7115931639469374, nan, 0.3661808713379434, 0.13378413732653244, 0.0, nan, 0.0, 0.23570903658727577, 0.0, 0.0, 0.8461792428096935, 0.7553019453875489, 0.9045825383881589, 0.0, 0.0, 0.10651182264386322, 0.0] | [0.0, 0.8511274737458464, 0.9523527728262475, 0.7305783824446481, 0.7179823443918317, 0.5112934364530293, nan, 0.4671955914617317, 0.39620749876026823, 0.0, 0.9325380267720194, 0.0, 0.0, nan, 0.0, 0.019920987025907694, 0.0, 0.0, 0.9114075726560573, nan, 0.4767221960460328, 0.14080931640440494, 0.0, nan, 0.0, 0.2902864462270403, 0.0, 0.0, 0.9417630123717813, 0.8946072183599384, 0.9626510283976625, 0.0, 0.0, 0.12104456389804058, 0.0] |
61
 
62
 
63
  ### Framework versions
64
 
65
- - Transformers 4.38.2
66
  - Pytorch 2.2.1+cu121
67
  - Datasets 2.18.0
68
  - Tokenizers 0.15.2
 
2
  license: other
3
  base_model: nvidia/mit-b0
4
  tags:
 
 
5
  - generated_from_trainer
6
  model-index:
7
  - name: segformer-b0-finetuned
 
13
 
14
  # segformer-b0-finetuned
15
 
16
+ This model is a fine-tuned version of [nvidia/mit-b0](https://huggingface.co/nvidia/mit-b0) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Loss: 2.9804
19
+ - Mean Iou: 0.0270
20
+ - Mean Accuracy: 0.0832
21
+ - Overall Accuracy: 0.5068
22
+ - Per Category Iou: [0.8021452931267776, 0.0, nan, 0.0, 0.0, 0.0, nan, nan, 0.0, 0.0, 0.00757909387963703, nan, nan, 0.0, 0.0, nan, nan, nan, nan, nan, nan, nan, nan, nan, 0.0, nan, nan, nan, 0.0, 0.0, 0.0, nan, nan, nan, nan, nan, 0.0, 0.0, nan, nan, 0.0, nan, nan, nan, 0.0, nan, 0.000748031223377361, 0.0, 0.15502171352863242, nan, 0.0, nan, 0.056221004367996964, nan, 0.0, nan, 0.0, nan, 0.040391599317767694, 0.0, nan, nan, 0.0, nan, nan, 0.0, 0.07047672462142457, 0.0, nan, nan, 0.05062781753814023, nan, 0.0, 0.03166986564299424, nan, nan, 0.0, 0.0, nan, nan, 0.0, nan, 0.0, nan, 0.08493484197676761, 0.0, nan, 0.04267983360952349, nan, 0.0003925481280361144, 0.0, nan, nan, 0.00012044806680852773, 0.0, 0.0, 0.002362410370862814, nan, nan, nan, nan, 0.0022562095679258846, 0.0, 0.0]
23
+ - Per Category Accuracy: [0.8694985349631393, nan, nan, 0.0, 0.0, 0.0, nan, nan, nan, 0.0, 0.008096131396611705, nan, nan, nan, 0.0, nan, nan, nan, nan, nan, nan, nan, nan, nan, 0.0, nan, nan, nan, 0.0, 0.0, 0.0, nan, nan, nan, nan, nan, nan, 0.0, nan, nan, 0.0, nan, nan, nan, 0.0, nan, 0.0010361203050798676, nan, 0.8231816690745898, nan, 0.0, nan, 0.11201351351351352, nan, 0.0, nan, nan, nan, 0.07139561448697561, nan, nan, nan, nan, nan, nan, nan, 0.10334561484308179, 0.0, nan, nan, 0.12090652522991582, nan, 0.0, 0.037541019470575365, nan, nan, nan, nan, nan, nan, 0.0, nan, 0.0, nan, 0.8687054097111588, 0.0, nan, 0.05824436171194732, nan, 0.00040998308819761186, nan, nan, nan, 0.00012146732528949713, 0.0, 0.0, 0.0023685118842166654, nan, nan, nan, nan, 0.0027728020023065785, nan, 0.0]
24
 
25
  ## Model description
26
 
 
40
 
41
  The following hyperparameters were used during training:
42
  - learning_rate: 5e-05
43
+ - train_batch_size: 1
44
+ - eval_batch_size: 1
45
  - seed: 42
46
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
47
  - lr_scheduler_type: linear
48
+ - num_epochs: 3
49
 
50
  ### Training results
51
 
52
+ | Training Loss | Epoch | Step | Validation Loss | Mean Iou | Mean Accuracy | Overall Accuracy | Per Category Iou | Per Category Accuracy |
53
+ |:-------------:|:-----:|:----:|:---------------:|:--------:|:-------------:|:----------------:|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|
54
+ | 4.0562 | 1.0 | 80 | 3.4450 | 0.0153 | 0.0686 | 0.4812 | [0.7793520968900188, 0.0, 0.0, 0.0, 0.0, 0.0, nan, nan, 0.0, 0.0, 0.02432875250844616, nan, 0.0, 0.0, 0.0, 0.0, 0.0, nan, nan, nan, nan, nan, 0.0, nan, 0.0, nan, 0.0, nan, 0.0, 0.0, 0.0032801739760782893, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, nan, 0.0, 0.0, 0.0, 0.0, nan, 0.0, nan, 0.0, 0.0, 0.12812692281805033, nan, 0.0, 0.0, 0.05380656279838841, nan, 0.0, nan, nan, nan, 0.0003018650007218511, 0.0, nan, 0.0, 0.0, nan, 0.0, 0.0, 0.008481858011825258, 0.0003758691975192633, nan, 0.0, 0.010826130083604932, 0.0, 0.0, 0.005665119052689867, 0.0, nan, 0.0, 0.0, nan, nan, 0.0, nan, 0.0, nan, 0.07572116691493873, 0.0009061488673139158, nan, 0.00028572287888357543, 0.0, 0.0005691934965994337, 0.0, nan, nan, 0.0, 0.005145141665218147, 0.0, 0.0, nan, nan, 0.0, 0.0, 0.0016498912362201822, 0.0, 0.0] | [0.8320357969624665, nan, nan, 0.0, 0.0, 0.0, nan, nan, nan, 0.0, 0.044499683472624736, nan, nan, nan, 0.0, nan, nan, nan, nan, nan, nan, nan, nan, nan, 0.0, nan, nan, nan, 0.0, 0.0, 0.0033331798720132593, nan, nan, nan, nan, nan, nan, 0.0, nan, nan, 0.0, nan, nan, nan, 0.0, nan, 0.0, nan, 0.843680423236109, nan, 0.0, nan, 0.1561081081081081, nan, 0.0, nan, nan, nan, 0.00030909131632584497, nan, nan, nan, nan, nan, nan, nan, 0.016399105204289756, 0.0004157427937915743, nan, nan, 0.01858790326504793, nan, 0.0, 0.005819295558958652, nan, nan, nan, nan, nan, nan, 0.0, nan, 0.0, nan, 0.6029646048347995, 0.0009201324990798675, nan, 0.00029229869850158456, nan, 0.0006662225183211193, nan, nan, nan, 0.0, 0.0084153067606755, 0.0, 0.0, nan, nan, nan, nan, 0.002196157338110078, nan, 0.0] |
55
+ | 3.4584 | 2.0 | 160 | 3.1139 | 0.0241 | 0.0797 | 0.4821 | [0.7782469073708838, nan, nan, 0.0, 0.0, 0.0, nan, nan, 0.0, 0.0, 0.010029985991945368, nan, nan, nan, 0.0, 0.0, nan, nan, nan, nan, nan, nan, nan, nan, 0.0, nan, nan, nan, 0.0, 0.0, 0.0, nan, nan, nan, nan, nan, 0.0, 0.0, nan, nan, 0.0, nan, nan, nan, 0.0, nan, 0.0017241143388842517, 0.0, 0.1557832833948993, nan, 0.0, nan, 0.07580358123695485, nan, 0.0, nan, 0.0, nan, 0.028521816696650947, 0.0, nan, nan, 0.0, nan, nan, 0.0, 0.021143006947175254, 0.0, nan, nan, 0.018894200915169017, nan, 0.0, 0.0064420428056791695, nan, nan, 0.0, 0.0, nan, nan, 0.0, nan, 0.0, nan, 0.06671080655150016, 0.0, nan, 0.016629320979883684, nan, 0.0010633250993799996, 0.0, nan, nan, 0.0, 0.0, 0.0, 0.0, nan, nan, nan, nan, 0.000702695105998854, 0.0, 0.0] | [0.8244354154654462, nan, nan, 0.0, 0.0, 0.0, nan, nan, nan, 0.0, 0.01064577392395212, nan, nan, nan, 0.0, nan, nan, nan, nan, nan, nan, nan, nan, nan, 0.0, nan, nan, nan, 0.0, 0.0, 0.0, nan, nan, nan, nan, nan, nan, 0.0, nan, nan, 0.0, nan, nan, nan, 0.0, nan, 0.002417614045186358, nan, 0.8166523587177286, nan, 0.0, nan, 0.23312162162162162, nan, 0.0, nan, nan, nan, 0.050991108025175265, nan, nan, nan, nan, nan, nan, nan, 0.026531350746759656, 0.0, nan, nan, 0.03812466546640066, nan, 0.0, 0.0066506234959527455, nan, nan, nan, nan, nan, nan, 0.0, nan, 0.0, nan, 0.9177460691279352, 0.0, nan, 0.02032245161687333, nan, 0.0011103708638685321, nan, nan, nan, 0.0, 0.0, 0.0, 0.0, nan, nan, nan, nan, 0.0007974873015483523, nan, 0.0] |
56
+ | 3.1748 | 3.0 | 240 | 2.9804 | 0.0270 | 0.0832 | 0.5068 | [0.8021452931267776, 0.0, nan, 0.0, 0.0, 0.0, nan, nan, 0.0, 0.0, 0.00757909387963703, nan, nan, 0.0, 0.0, nan, nan, nan, nan, nan, nan, nan, nan, nan, 0.0, nan, nan, nan, 0.0, 0.0, 0.0, nan, nan, nan, nan, nan, 0.0, 0.0, nan, nan, 0.0, nan, nan, nan, 0.0, nan, 0.000748031223377361, 0.0, 0.15502171352863242, nan, 0.0, nan, 0.056221004367996964, nan, 0.0, nan, 0.0, nan, 0.040391599317767694, 0.0, nan, nan, 0.0, nan, nan, 0.0, 0.07047672462142457, 0.0, nan, nan, 0.05062781753814023, nan, 0.0, 0.03166986564299424, nan, nan, 0.0, 0.0, nan, nan, 0.0, nan, 0.0, nan, 0.08493484197676761, 0.0, nan, 0.04267983360952349, nan, 0.0003925481280361144, 0.0, nan, nan, 0.00012044806680852773, 0.0, 0.0, 0.002362410370862814, nan, nan, nan, nan, 0.0022562095679258846, 0.0, 0.0] | [0.8694985349631393, nan, nan, 0.0, 0.0, 0.0, nan, nan, nan, 0.0, 0.008096131396611705, nan, nan, nan, 0.0, nan, nan, nan, nan, nan, nan, nan, nan, nan, 0.0, nan, nan, nan, 0.0, 0.0, 0.0, nan, nan, nan, nan, nan, nan, 0.0, nan, nan, 0.0, nan, nan, nan, 0.0, nan, 0.0010361203050798676, nan, 0.8231816690745898, nan, 0.0, nan, 0.11201351351351352, nan, 0.0, nan, nan, nan, 0.07139561448697561, nan, nan, nan, nan, nan, nan, nan, 0.10334561484308179, 0.0, nan, nan, 0.12090652522991582, nan, 0.0, 0.037541019470575365, nan, nan, nan, nan, nan, nan, 0.0, nan, 0.0, nan, 0.8687054097111588, 0.0, nan, 0.05824436171194732, nan, 0.00040998308819761186, nan, nan, nan, 0.00012146732528949713, 0.0, 0.0, 0.0023685118842166654, nan, nan, nan, nan, 0.0027728020023065785, nan, 0.0] |
 
 
57
 
58
 
59
  ### Framework versions
60
 
61
+ - Transformers 4.39.3
62
  - Pytorch 2.2.1+cu121
63
  - Datasets 2.18.0
64
  - Tokenizers 0.15.2
config.json CHANGED
@@ -28,80 +28,218 @@
28
  256
29
  ],
30
  "id2label": {
31
- "0": "unlabeled",
32
- "1": "flat-road",
33
- "2": "flat-sidewalk",
34
- "3": "flat-crosswalk",
35
- "4": "flat-cyclinglane",
36
- "5": "flat-parkingdriveway",
37
- "6": "flat-railtrack",
38
- "7": "flat-curb",
39
- "8": "human-person",
40
- "9": "human-rider",
41
- "10": "vehicle-car",
42
- "11": "vehicle-truck",
43
- "12": "vehicle-bus",
44
- "13": "vehicle-tramtrain",
45
- "14": "vehicle-motorcycle",
46
- "15": "vehicle-bicycle",
47
- "16": "vehicle-caravan",
48
- "17": "vehicle-cartrailer",
49
- "18": "construction-building",
50
- "19": "construction-door",
51
- "20": "construction-wall",
52
- "21": "construction-fenceguardrail",
53
- "22": "construction-bridge",
54
- "23": "construction-tunnel",
55
- "24": "construction-stairs",
56
- "25": "object-pole",
57
- "26": "object-trafficsign",
58
- "27": "object-trafficlight",
59
- "28": "nature-vegetation",
60
- "29": "nature-terrain",
61
- "30": "sky",
62
- "31": "void-ground",
63
- "32": "void-dynamic",
64
- "33": "void-static",
65
- "34": "void-unclear"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  },
67
  "image_size": 224,
68
  "initializer_range": 0.02,
69
  "label2id": {
70
- "construction-bridge": 22,
71
- "construction-building": 18,
72
- "construction-door": 19,
73
- "construction-fenceguardrail": 21,
74
- "construction-stairs": 24,
75
- "construction-tunnel": 23,
76
- "construction-wall": 20,
77
- "flat-crosswalk": 3,
78
- "flat-curb": 7,
79
- "flat-cyclinglane": 4,
80
- "flat-parkingdriveway": 5,
81
- "flat-railtrack": 6,
82
- "flat-road": 1,
83
- "flat-sidewalk": 2,
84
- "human-person": 8,
85
- "human-rider": 9,
86
- "nature-terrain": 29,
87
- "nature-vegetation": 28,
88
- "object-pole": 25,
89
- "object-trafficlight": 27,
90
- "object-trafficsign": 26,
91
- "sky": 30,
92
- "unlabeled": 0,
93
- "vehicle-bicycle": 15,
94
- "vehicle-bus": 12,
95
- "vehicle-car": 10,
96
- "vehicle-caravan": 16,
97
- "vehicle-cartrailer": 17,
98
- "vehicle-motorcycle": 14,
99
- "vehicle-tramtrain": 13,
100
- "vehicle-truck": 11,
101
- "void-dynamic": 32,
102
- "void-ground": 31,
103
- "void-static": 33,
104
- "void-unclear": 34
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
105
  },
106
  "layer_norm_eps": 1e-06,
107
  "mlp_ratios": [
@@ -140,5 +278,5 @@
140
  2
141
  ],
142
  "torch_dtype": "float32",
143
- "transformers_version": "4.38.2"
144
  }
 
28
  256
29
  ],
30
  "id2label": {
31
+ "0": "background",
32
+ "1": "candy",
33
+ "10": "cake",
34
+ "100": "oyster mushroom",
35
+ "101": "white button mushroom",
36
+ "102": "salad",
37
+ "103": "other ingredients",
38
+ "11": "wine",
39
+ "12": "milkshake",
40
+ "13": "coffee",
41
+ "14": "juice",
42
+ "15": "milk",
43
+ "16": "tea",
44
+ "17": "almond",
45
+ "18": "red beans",
46
+ "19": "cashew",
47
+ "2": "egg tart",
48
+ "20": "dried cranberries",
49
+ "21": "soy",
50
+ "22": "walnut",
51
+ "23": "peanut",
52
+ "24": "egg",
53
+ "25": "apple",
54
+ "26": "date",
55
+ "27": "apricot",
56
+ "28": "avocado",
57
+ "29": "banana",
58
+ "3": "french fries",
59
+ "30": "strawberry",
60
+ "31": "cherry",
61
+ "32": "blueberry",
62
+ "33": "raspberry",
63
+ "34": "mango",
64
+ "35": "olives",
65
+ "36": "peach",
66
+ "37": "lemon",
67
+ "38": "pear",
68
+ "39": "fig",
69
+ "4": "chocolate",
70
+ "40": "pineapple",
71
+ "41": "grape",
72
+ "42": "kiwi",
73
+ "43": "melon",
74
+ "44": "orange",
75
+ "45": "watermelon",
76
+ "46": "steak",
77
+ "47": "pork",
78
+ "48": "chicken duck",
79
+ "49": "sausage",
80
+ "5": "biscuit",
81
+ "50": "fried meat",
82
+ "51": "lamb",
83
+ "52": "sauce",
84
+ "53": "crab",
85
+ "54": "fish",
86
+ "55": "shellfish",
87
+ "56": "shrimp",
88
+ "57": "soup",
89
+ "58": "bread",
90
+ "59": "corn",
91
+ "6": "popcorn",
92
+ "60": "hamburg",
93
+ "61": "pizza",
94
+ "62": " hanamaki baozi",
95
+ "63": "wonton dumplings",
96
+ "64": "pasta",
97
+ "65": "noodles",
98
+ "66": "rice",
99
+ "67": "pie",
100
+ "68": "tofu",
101
+ "69": "eggplant",
102
+ "7": "pudding",
103
+ "70": "potato",
104
+ "71": "garlic",
105
+ "72": "cauliflower",
106
+ "73": "tomato",
107
+ "74": "kelp",
108
+ "75": "seaweed",
109
+ "76": "spring onion",
110
+ "77": "rape",
111
+ "78": "ginger",
112
+ "79": "okra",
113
+ "8": "ice cream",
114
+ "80": "lettuce",
115
+ "81": "pumpkin",
116
+ "82": "cucumber",
117
+ "83": "white radish",
118
+ "84": "carrot",
119
+ "85": "asparagus",
120
+ "86": "bamboo shoots",
121
+ "87": "broccoli",
122
+ "88": "celery stick",
123
+ "89": "cilantro mint",
124
+ "9": "cheese butter",
125
+ "90": "snow peas",
126
+ "91": " cabbage",
127
+ "92": "bean sprouts",
128
+ "93": "onion",
129
+ "94": "pepper",
130
+ "95": "green beans",
131
+ "96": "French beans",
132
+ "97": "king oyster mushroom",
133
+ "98": "shiitake",
134
+ "99": "enoki mushroom"
135
  },
136
  "image_size": 224,
137
  "initializer_range": 0.02,
138
  "label2id": {
139
+ " cabbage": "91",
140
+ " hanamaki baozi": "62",
141
+ "French beans": "96",
142
+ "almond": "17",
143
+ "apple": "25",
144
+ "apricot": "27",
145
+ "asparagus": "85",
146
+ "avocado": "28",
147
+ "background": "0",
148
+ "bamboo shoots": "86",
149
+ "banana": "29",
150
+ "bean sprouts": "92",
151
+ "biscuit": "5",
152
+ "blueberry": "32",
153
+ "bread": "58",
154
+ "broccoli": "87",
155
+ "cake": "10",
156
+ "candy": "1",
157
+ "carrot": "84",
158
+ "cashew": "19",
159
+ "cauliflower": "72",
160
+ "celery stick": "88",
161
+ "cheese butter": "9",
162
+ "cherry": "31",
163
+ "chicken duck": "48",
164
+ "chocolate": "4",
165
+ "cilantro mint": "89",
166
+ "coffee": "13",
167
+ "corn": "59",
168
+ "crab": "53",
169
+ "cucumber": "82",
170
+ "date": "26",
171
+ "dried cranberries": "20",
172
+ "egg": "24",
173
+ "egg tart": "2",
174
+ "eggplant": "69",
175
+ "enoki mushroom": "99",
176
+ "fig": "39",
177
+ "fish": "54",
178
+ "french fries": "3",
179
+ "fried meat": "50",
180
+ "garlic": "71",
181
+ "ginger": "78",
182
+ "grape": "41",
183
+ "green beans": "95",
184
+ "hamburg": "60",
185
+ "ice cream": "8",
186
+ "juice": "14",
187
+ "kelp": "74",
188
+ "king oyster mushroom": "97",
189
+ "kiwi": "42",
190
+ "lamb": "51",
191
+ "lemon": "37",
192
+ "lettuce": "80",
193
+ "mango": "34",
194
+ "melon": "43",
195
+ "milk": "15",
196
+ "milkshake": "12",
197
+ "noodles": "65",
198
+ "okra": "79",
199
+ "olives": "35",
200
+ "onion": "93",
201
+ "orange": "44",
202
+ "other ingredients": "103",
203
+ "oyster mushroom": "100",
204
+ "pasta": "64",
205
+ "peach": "36",
206
+ "peanut": "23",
207
+ "pear": "38",
208
+ "pepper": "94",
209
+ "pie": "67",
210
+ "pineapple": "40",
211
+ "pizza": "61",
212
+ "popcorn": "6",
213
+ "pork": "47",
214
+ "potato": "70",
215
+ "pudding": "7",
216
+ "pumpkin": "81",
217
+ "rape": "77",
218
+ "raspberry": "33",
219
+ "red beans": "18",
220
+ "rice": "66",
221
+ "salad": "102",
222
+ "sauce": "52",
223
+ "sausage": "49",
224
+ "seaweed": "75",
225
+ "shellfish": "55",
226
+ "shiitake": "98",
227
+ "shrimp": "56",
228
+ "snow peas": "90",
229
+ "soup": "57",
230
+ "soy": "21",
231
+ "spring onion": "76",
232
+ "steak": "46",
233
+ "strawberry": "30",
234
+ "tea": "16",
235
+ "tofu": "68",
236
+ "tomato": "73",
237
+ "walnut": "22",
238
+ "watermelon": "45",
239
+ "white button mushroom": "101",
240
+ "white radish": "83",
241
+ "wine": "11",
242
+ "wonton dumplings": "63"
243
  },
244
  "layer_norm_eps": 1e-06,
245
  "mlp_ratios": [
 
278
  2
279
  ],
280
  "torch_dtype": "float32",
281
+ "transformers_version": "4.39.3"
282
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b47a41f2af4b878220cb3b6da94caa7acbe2ef0916608224c941ee6ae51b391f
3
- size 14918708
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0fa79fe674d5d4eb6bf68351e9c64d82ad6dfaa1518d87fd9920d54dc5527ae5
3
+ size 14989656
runs/Apr13_17-54-21_b1fdc3b2e8ea/events.out.tfevents.1713030898.b1fdc3b2e8ea.3243.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8e354c8a41c1b1c2bcb59b1057aed0323a0c6262d4339f013357f9d3372a174
3
+ size 61263
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b9ad9a4acdb55bddb2f2428318b07daa202268063ea95bc9b9e4761ea79f8282
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d804272fd4a0307298bef737f7a1d6c1e36a5f7ba235fe73ba429e600f12a19
3
  size 4920