Jayveersinh-Raj commited on
Commit
71d43e1
1 Parent(s): cad1c5f

base 500k samples 4 epochs total (checkpoint trained)

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Jayveersinh-Raj/guj-grammar-base",
3
+ "architectures": [
4
+ "MT5ForConditionalGeneration"
5
+ ],
6
+ "classifier_dropout": 0.0,
7
+ "d_ff": 2048,
8
+ "d_kv": 64,
9
+ "d_model": 768,
10
+ "decoder_start_token_id": 0,
11
+ "dense_act_fn": "gelu_new",
12
+ "dropout_rate": 0.1,
13
+ "eos_token_id": 1,
14
+ "feed_forward_proj": "gated-gelu",
15
+ "initializer_factor": 1.0,
16
+ "is_encoder_decoder": true,
17
+ "is_gated_act": true,
18
+ "layer_norm_epsilon": 1e-06,
19
+ "model_type": "mt5",
20
+ "num_decoder_layers": 12,
21
+ "num_heads": 12,
22
+ "num_layers": 12,
23
+ "output_past": true,
24
+ "pad_token_id": 0,
25
+ "relative_attention_max_distance": 128,
26
+ "relative_attention_num_buckets": 32,
27
+ "tie_word_embeddings": false,
28
+ "tokenizer_class": "T5Tokenizer",
29
+ "torch_dtype": "float32",
30
+ "transformers_version": "4.33.2",
31
+ "use_cache": true,
32
+ "vocab_size": 250112
33
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "decoder_start_token_id": 0,
3
+ "eos_token_id": 1,
4
+ "pad_token_id": 0,
5
+ "transformers_version": "4.33.2"
6
+ }
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:114407f6d2c32aa02f7d46ee665f0948ce1f11842409f8ddbb95ad5d38e1c95d
3
+ size 4659447661
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d1be8566ca513b07a80ccc844c0dc9a3d3299f1893125592a2b5d35ee434e65
3
+ size 2329702581
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e1ebc939c431e0d0a873844d29c6d4306286ace18a383582b3d0aded278cd7f
3
+ size 14575
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c1e2f83fc1737955ca5dbbf89ec1dec84e89ca11ede4c6b0789e0988e8afa22
3
+ size 627
special_tokens_map.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "eos_token": "</s>",
3
+ "pad_token": "<pad>",
4
+ "unk_token": "<unk>"
5
+ }
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef78f86560d809067d12bac6c09f19a462cb3af3f54d2b8acbba26e1433125d6
3
+ size 4309802
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6502d07619068a98aa2d3bb531332a694ffe108ca6c6fe62a467ccfe98d666b9
3
+ size 16315219
tokenizer_config.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": null,
3
+ "clean_up_tokenization_spaces": true,
4
+ "eos_token": "</s>",
5
+ "extra_ids": 0,
6
+ "legacy": true,
7
+ "max_length": 128,
8
+ "model_max_length": 1000000000000000019884624838656,
9
+ "pad_token": "<pad>",
10
+ "sp_model_kwargs": {},
11
+ "stride": 0,
12
+ "tokenizer_class": "T5Tokenizer",
13
+ "truncation_side": "right",
14
+ "truncation_strategy": "longest_first",
15
+ "unk_token": "<unk>"
16
+ }
trainer_state.json ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.9715578539107952,
5
+ "eval_steps": 500,
6
+ "global_step": 6100,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.16,
13
+ "learning_rate": 1.8383968972204266e-05,
14
+ "loss": 0.144,
15
+ "step": 500
16
+ },
17
+ {
18
+ "epoch": 0.32,
19
+ "learning_rate": 1.6767937944408533e-05,
20
+ "loss": 0.137,
21
+ "step": 1000
22
+ },
23
+ {
24
+ "epoch": 0.48,
25
+ "learning_rate": 1.51519069166128e-05,
26
+ "loss": 0.1307,
27
+ "step": 1500
28
+ },
29
+ {
30
+ "epoch": 0.65,
31
+ "learning_rate": 1.3535875888817068e-05,
32
+ "loss": 0.1263,
33
+ "step": 2000
34
+ },
35
+ {
36
+ "epoch": 0.81,
37
+ "learning_rate": 1.1919844861021333e-05,
38
+ "loss": 0.1241,
39
+ "step": 2500
40
+ },
41
+ {
42
+ "epoch": 0.97,
43
+ "learning_rate": 1.0303813833225597e-05,
44
+ "loss": 0.12,
45
+ "step": 3000
46
+ },
47
+ {
48
+ "epoch": 1.0,
49
+ "eval_gen_len": 18.2165,
50
+ "eval_loss": 0.06346726417541504,
51
+ "eval_rouge1": 0.1191,
52
+ "eval_rouge2": 0.0373,
53
+ "eval_rougeL": 0.1197,
54
+ "eval_rougeLsum": 0.1193,
55
+ "eval_runtime": 34.2167,
56
+ "eval_samples_per_second": 58.451,
57
+ "eval_steps_per_second": 0.935,
58
+ "step": 3094
59
+ },
60
+ {
61
+ "epoch": 1.13,
62
+ "learning_rate": 8.687782805429864e-06,
63
+ "loss": 0.1159,
64
+ "step": 3500
65
+ },
66
+ {
67
+ "epoch": 1.29,
68
+ "learning_rate": 7.071751777634132e-06,
69
+ "loss": 0.1147,
70
+ "step": 4000
71
+ },
72
+ {
73
+ "epoch": 1.45,
74
+ "learning_rate": 5.4557207498383965e-06,
75
+ "loss": 0.1122,
76
+ "step": 4500
77
+ },
78
+ {
79
+ "epoch": 1.62,
80
+ "learning_rate": 3.839689722042664e-06,
81
+ "loss": 0.111,
82
+ "step": 5000
83
+ },
84
+ {
85
+ "epoch": 1.78,
86
+ "learning_rate": 2.2236586942469294e-06,
87
+ "loss": 0.1104,
88
+ "step": 5500
89
+ },
90
+ {
91
+ "epoch": 1.94,
92
+ "learning_rate": 6.076276664511959e-07,
93
+ "loss": 0.1089,
94
+ "step": 6000
95
+ }
96
+ ],
97
+ "logging_steps": 500,
98
+ "max_steps": 6188,
99
+ "num_train_epochs": 2,
100
+ "save_steps": 100,
101
+ "total_flos": 1.1413435354310246e+17,
102
+ "trial_name": null,
103
+ "trial_params": null
104
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a7665e49347f091fdd3ec5c4781be8ca50d62bc8ab272e8cc0e27b3ca1b1631
3
+ size 4155