robinsmits commited on
Commit
169ae42
1 Parent(s): ad7ce9e

Training in progress, epoch 1

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
adapter_config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "auto_mapping": null,
3
+ "base_model_name_or_path": "DAMO-NLP-MT/polylm-13b",
4
+ "bias": "none",
5
+ "fan_in_fan_out": true,
6
+ "inference_mode": true,
7
+ "init_lora_weights": true,
8
+ "layers_pattern": null,
9
+ "layers_to_transform": null,
10
+ "lora_alpha": 16,
11
+ "lora_dropout": 0.1,
12
+ "modules_to_save": null,
13
+ "peft_type": "LORA",
14
+ "r": 8,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "c_attn"
18
+ ],
19
+ "task_type": "CAUSAL_LM"
20
+ }
adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92d453daae5636e8e353d6c11e6720f8c551335799f8fce8f66634bdbdca34bf
3
+ size 26242657
runs/Jul25_13-57-41_33d894fe09a6/events.out.tfevents.1690293466.33d894fe09a6.2605.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d94bd4004ecd9bedf6a657b9b8ae7bb7d2438f7120ae6ac4de9294af4c76c9e3
3
+ size 5259
runs/Jul25_14-22-46_33d894fe09a6/events.out.tfevents.1690294969.33d894fe09a6.20833.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:729cc890ea5a2c0c00a9b4451decb23603c1817ebb521c21af4c1f6aff9011d1
3
+ size 4644
runs/Jul25_14-29-39_33d894fe09a6/events.out.tfevents.1690295383.33d894fe09a6.22651.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:334ea248171bc021834e77083b2050f09b8fc1cb2a52d8c2db045ed8ac1dd776
3
+ size 13784
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0cf7dde8f9ce245a67bc98a88fe398422c7270d5521ad1a9ca1a74141761d71b
3
+ size 4027