tiagoblima commited on
Commit
a9179bc
1 Parent(s): 2c96f9f

Training in progress, epoch 1

Browse files
adapter_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "unicamp-dl/ptt5-small-t5-vocab",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "loftq_config": {},
12
+ "lora_alpha": 32,
13
+ "lora_dropout": 0.01,
14
+ "megatron_config": null,
15
+ "megatron_core": "megatron.core",
16
+ "modules_to_save": [
17
+ "lm_head"
18
+ ],
19
+ "peft_type": "LORA",
20
+ "r": 8,
21
+ "rank_pattern": {},
22
+ "revision": null,
23
+ "target_modules": [
24
+ "q",
25
+ "v"
26
+ ],
27
+ "task_type": "SEQ_2_SEQ_LM"
28
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4a95c6da58c2d2532c5b8df559c3a25a7e121caf93f5b3680ff22d4c5b273d9
3
+ size 66987784
tokenizer.json CHANGED
@@ -1,21 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 30,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 30
12
- },
13
- "direction": "Right",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 0,
16
- "pad_type_id": 0,
17
- "pad_token": "<pad>"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5ef7f9f298708ba4ea500bf7b01d036c84000697e790d4413d81e4aa76df5098
3
- size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d32c88196888bdc31f3c5ec824c3f43bc509650f396eb8b48526f3bbbf8a5e3b
3
+ size 4792