Naying0206 commited on
Commit
8a67a55
1 Parent(s): afdbca4

Training in progress, step 500

Browse files
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "patrickvonplaten/bert2bert_cnn_daily_mail",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -16,12 +16,12 @@
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
- "r": 32,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "query",
24
- "value"
25
  ],
26
  "task_type": "SEQ_2_SEQ_LM",
27
  "use_dora": false,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "facebook/bart-base",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
+ "r": 4,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "v_proj",
24
+ "q_proj"
25
  ],
26
  "task_type": "SEQ_2_SEQ_LM",
27
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:97c646b4eb28beb7d2273e49e0d34f710ae868cb8dc2f5690336347ff08e31d3
3
- size 14177360
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8008c348445a22c11fde48a1480ae6493253328384138bd241a040b990c2c69a
3
+ size 894624
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
runs/Apr13_17-04-33_ef033c8b66bb/events.out.tfevents.1713027875.ef033c8b66bb.375.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0e44e1fa2dd121808898268e4c1c6d080a37ce411adef4800748d5acdd46b0ca
3
- size 11439
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad53066bc7b2768851e1ee004fee54d6cd9a507966180565bb370df1acffce19
3
+ size 11793
runs/Apr13_17-04-33_ef033c8b66bb/events.out.tfevents.1713031044.ef033c8b66bb.375.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:714017987bc1098f99aa8fdae36c86b0581d531aecf1078a74c7ccc16248e71a
3
+ size 359
runs/Apr13_18-07-35_ef033c8b66bb/events.out.tfevents.1713031666.ef033c8b66bb.375.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ab565a7cda2aea03b7deeb6ef9fe5ff6f11cb3b4855c3ec6d2161d9ff51ed21
3
+ size 6336
special_tokens_map.json CHANGED
@@ -1,9 +1,15 @@
1
  {
2
- "bos_token": "[CLS]",
3
- "cls_token": "[CLS]",
4
- "eos_token": "[SEP]",
5
- "mask_token": "[MASK]",
6
- "pad_token": "[PAD]",
7
- "sep_token": "[SEP]",
8
- "unk_token": "[UNK]"
 
 
 
 
 
 
9
  }
 
1
  {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
+ "unk_token": "<unk>"
15
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,60 +1,58 @@
1
  {
 
2
  "added_tokens_decoder": {
3
  "0": {
4
- "content": "[PAD]",
5
  "lstrip": false,
6
- "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
- "100": {
12
- "content": "[UNK]",
13
  "lstrip": false,
14
- "normalized": false,
15
  "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  },
19
- "101": {
20
- "content": "[CLS]",
21
  "lstrip": false,
22
- "normalized": false,
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
26
  },
27
- "102": {
28
- "content": "[SEP]",
29
  "lstrip": false,
30
- "normalized": false,
31
  "rstrip": false,
32
  "single_word": false,
33
  "special": true
34
  },
35
- "103": {
36
- "content": "[MASK]",
37
- "lstrip": false,
38
- "normalized": false,
39
  "rstrip": false,
40
  "single_word": false,
41
  "special": true
42
  }
43
  },
44
- "bos_token": "[CLS]",
45
  "clean_up_tokenization_spaces": true,
46
- "cls_token": "[CLS]",
47
  "device_map": "auto",
48
- "do_basic_tokenize": true,
49
- "do_lower_case": true,
50
- "eos_token": "[SEP]",
51
- "mask_token": "[MASK]",
52
- "model_max_length": 512,
53
- "never_split": null,
54
- "pad_token": "[PAD]",
55
- "sep_token": "[SEP]",
56
- "strip_accents": null,
57
- "tokenize_chinese_chars": true,
58
- "tokenizer_class": "BertTokenizer",
59
- "unk_token": "[UNK]"
60
  }
 
1
  {
2
+ "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "0": {
5
+ "content": "<s>",
6
  "lstrip": false,
7
+ "normalized": true,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  },
12
+ "1": {
13
+ "content": "<pad>",
14
  "lstrip": false,
15
+ "normalized": true,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
19
  },
20
+ "2": {
21
+ "content": "</s>",
22
  "lstrip": false,
23
+ "normalized": true,
24
  "rstrip": false,
25
  "single_word": false,
26
  "special": true
27
  },
28
+ "3": {
29
+ "content": "<unk>",
30
  "lstrip": false,
31
+ "normalized": true,
32
  "rstrip": false,
33
  "single_word": false,
34
  "special": true
35
  },
36
+ "50264": {
37
+ "content": "<mask>",
38
+ "lstrip": true,
39
+ "normalized": true,
40
  "rstrip": false,
41
  "single_word": false,
42
  "special": true
43
  }
44
  },
45
+ "bos_token": "<s>",
46
  "clean_up_tokenization_spaces": true,
47
+ "cls_token": "<s>",
48
  "device_map": "auto",
49
+ "eos_token": "</s>",
50
+ "errors": "replace",
51
+ "mask_token": "<mask>",
52
+ "model_max_length": 1000000000000000019884624838656,
53
+ "pad_token": "<pad>",
54
+ "sep_token": "</s>",
55
+ "tokenizer_class": "BartTokenizer",
56
+ "trim_offsets": true,
57
+ "unk_token": "<unk>"
 
 
 
58
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:73f1cb5bdd996ea2f49275203b73e73be2eb736ca9a553bd8faa7f8dad7261f5
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8643b4d962f39729fe1c7aff20bb36fc0615286f9c620f70f132d6b383c4b10e
3
  size 5112
vocab.json ADDED
The diff for this file is too large to render. See raw diff