anniew666 commited on
Commit
71b6e11
1 Parent(s): 42e8896

Training in progress, step 675

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ cf.png filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
adapter_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_model_name_or_path": "roberta-large",
3
+ "bias": "none",
4
+ "fan_in_fan_out": false,
5
+ "inference_mode": true,
6
+ "init_lora_weights": true,
7
+ "lora_alpha": 16,
8
+ "lora_dropout": 0.05,
9
+ "modules_to_save": [
10
+ "classifier"
11
+ ],
12
+ "peft_type": "LORA",
13
+ "r": 8,
14
+ "target_modules": [
15
+ "query",
16
+ "value"
17
+ ],
18
+ "task_type": null
19
+ }
adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc55517cb4a6a385a273b3f0a2e4fc78a25eb939b7935e9740b97a94d13c5bef
3
+ size 7409629
cf.png ADDED

Git LFS Details

  • SHA256: 8b07adc1933623aea98cb75e46ce14e1df85d999a031b536a6677d468af1a457
  • Pointer size: 130 Bytes
  • Size of remote file: 57.8 kB
cf.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ 4.300727566693613446e-01 5.497170573969280272e-02 3.799514955537591082e-02 8.488278092158448029e-02 2.473726758286176330e-01 1.228779304769603842e-01 2.182700080840743739e-02
2
+ 1.565495207667731703e-01 4.089456869009584494e-01 5.431309904153354406e-02 5.111821086261980618e-02 1.469648562300319428e-01 1.693290734824281218e-01 1.277955271565495154e-02
3
+ 5.942622950819671790e-02 1.844262295081967179e-02 6.311475409836065920e-01 5.737704918032787010e-02 1.209016393442622905e-01 8.811475409836065642e-02 2.459016393442622905e-02
4
+ 1.095713825330325505e-02 1.933612632935868565e-03 1.127940702545923365e-02 8.105059619722848607e-01 1.089268449887205864e-01 4.382855301321302022e-02 1.256848211408314459e-02
5
+ 4.579630895420368913e-02 5.126452494873547615e-03 4.101161995898838092e-03 1.715652768284347263e-01 6.623376623376623362e-01 8.509911141490088260e-02 2.597402597402597574e-02
6
+ 3.431667670078265842e-02 1.565322095123419799e-02 4.093919325707405521e-02 5.659241420830824626e-02 1.595424443106562284e-01 6.821192052980132869e-01 1.083684527393136664e-02
7
+ 3.250270855904658501e-02 2.166847237269772363e-03 2.708559046587215649e-02 1.462621885157096291e-01 4.366197183098591283e-01 8.992416034669556413e-02 2.654387865655471468e-01
class_report.txt ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ precision recall f1-score support
2
+
3
+ 0anger 0.62 0.43 0.51 1237
4
+ 1disgust 0.50 0.41 0.45 313
5
+ 2fear 0.60 0.63 0.62 488
6
+ 3joy 0.74 0.81 0.77 3103
7
+ 4neutral 0.58 0.66 0.62 2926
8
+ 5sadness 0.61 0.68 0.65 1661
9
+ 6surprise 0.58 0.27 0.36 923
10
+
11
+ accuracy 0.64 10651
12
+ macro avg 0.60 0.56 0.57 10651
13
+ weighted avg 0.63 0.64 0.63 10651
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
+ "unk_token": "<unk>"
15
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "bos_token": "<s>",
4
+ "clean_up_tokenization_spaces": true,
5
+ "cls_token": "<s>",
6
+ "eos_token": "</s>",
7
+ "errors": "replace",
8
+ "mask_token": "<mask>",
9
+ "model_max_length": 512,
10
+ "pad_token": "<pad>",
11
+ "sep_token": "</s>",
12
+ "tokenizer_class": "RobertaTokenizer",
13
+ "trim_offsets": true,
14
+ "unk_token": "<unk>"
15
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6d3f8ac52d7d0434bde19b92a4f087003b00432a21a51c10dcccc680b1b45b1
3
+ size 4027
vocab.json ADDED
The diff for this file is too large to render. See raw diff